repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
zhangg/trove
trove/tests/scenario/runners/guest_log_runners.py
2
34388
# Copyright 2015 Tesora Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from swiftclient.client import ClientException import tempfile from troveclient.compat import exceptions from trove.common import cfg from trove.guestagent.common import operating_system from trove.guestagent import guest_log from trove.tests.config import CONFIG from trove.tests.scenario.helpers.test_helper import DataType from trove.tests.scenario import runners from trove.tests.scenario.runners.test_runners import SkipKnownBug from trove.tests.scenario.runners.test_runners import TestRunner CONF = cfg.CONF class GuestLogRunner(TestRunner): def __init__(self): super(GuestLogRunner, self).__init__() self.container = CONF.guest_log_container_name self.prefix_pattern = '%(instance_id)s/%(datastore)s-%(log)s/' self.stopped_log_details = None self._last_log_published = {} self._last_log_contents = {} def _get_last_log_published(self, log_name): return self._last_log_published.get(log_name, None) def _set_last_log_published(self, log_name, published): self._last_log_published[log_name] = published def _get_last_log_contents(self, log_name): return self._last_log_contents.get(log_name, []) def _set_last_log_contents(self, log_name, published): self._last_log_contents[log_name] = published def _get_exposed_user_log_names(self): """Returns the full list of exposed user logs.""" return self.test_helper.get_exposed_user_log_names() def _get_exposed_user_log_name(self): """Return the first exposed user log name.""" return self.test_helper.get_exposed_user_log_names()[0] def _get_unexposed_sys_log_name(self): """Return the first unexposed sys log name.""" return self.test_helper.get_unexposed_sys_log_names()[0] def run_test_log_list(self): self.assert_log_list(self.auth_client, self.test_helper.get_exposed_log_list()) def assert_log_list(self, client, expected_list): log_list = list(client.instances.log_list(self.instance_info.id)) log_names = list(ll.name for ll in log_list) self.assert_list_elements_equal(expected_list, log_names) self.register_debug_inst_ids(self.instance_info.id) def run_test_admin_log_list(self): self.assert_log_list(self.admin_client, self.test_helper.get_full_log_list()) def run_test_log_show(self): log_pending = self._set_zero_or_none() log_name = self._get_exposed_user_log_name() self.assert_log_show(self.auth_client, log_name, expected_published=0, expected_pending=log_pending) def _set_zero_or_none(self): """This attempts to handle the case where an existing instance is used. Values that would normally be '0' are not, and must be ignored. """ value = 0 if self.is_using_existing_instance: value = None return value def assert_log_show(self, client, log_name, expected_http_code=200, expected_type=guest_log.LogType.USER.name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=None, expected_pending=None): self.report.log("Executing log_show for log '%s'" % log_name) log_details = client.instances.log_show( self.instance_info.id, log_name) self.assert_client_code(client, expected_http_code) self.assert_log_details( log_details, log_name, expected_type=expected_type, expected_status=expected_status, expected_published=expected_published, expected_pending=expected_pending) def assert_log_details(self, log_details, expected_log_name, expected_type=guest_log.LogType.USER.name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=None, expected_pending=None): """Check that the action generates the proper response data. For log_published and log_pending, setting the value to 'None' will skip that check (useful when using an existing instance, as there may be pending things in user logs right from the get-go) and setting it to a value other than '0' will verify that the actual value is '>=value' (since it's impossible to know what the actual value will be at any given time). '0' will still match exclusively. """ self.report.log("Validating log details for log '%s'" % expected_log_name) self._set_last_log_published(expected_log_name, log_details.published) self.assert_equal(expected_log_name, log_details.name, "Wrong log name for '%s' log" % expected_log_name) self.assert_equal(expected_type, log_details.type, "Wrong log type for '%s' log" % expected_log_name) current_status = log_details.status.replace(' ', '_') if not isinstance(expected_status, list): expected_status = [expected_status] self.assert_is_sublist([current_status], expected_status, "Wrong log status for '%s' log" % expected_log_name) if expected_published is None: pass elif expected_published == 0: self.assert_equal(0, log_details.published, "Wrong log published for '%s' log" % expected_log_name) else: self.assert_true(log_details.published >= expected_published, "Missing log published for '%s' log: " "expected %d, got %d" % (expected_log_name, expected_published, log_details.published)) if expected_pending is None: pass elif expected_pending == 0: self.assert_equal(0, log_details.pending, "Wrong log pending for '%s' log" % expected_log_name) else: self.assert_true(log_details.pending >= expected_pending, "Missing log pending for '%s' log: " "expected %d, got %d" % (expected_log_name, expected_pending, log_details.pending)) container = self.container prefix = self.prefix_pattern % { 'instance_id': self.instance_info.id, 'datastore': CONFIG.dbaas_datastore, 'log': expected_log_name} metafile = prefix.rstrip('/') + '_metafile' if expected_published == 0: self.assert_storage_gone(container, prefix, metafile) container = 'None' prefix = 'None' else: self.assert_storage_exists(container, prefix, metafile) self.assert_equal(container, log_details.container, "Wrong log container for '%s' log" % expected_log_name) self.assert_equal(prefix, log_details.prefix, "Wrong log prefix for '%s' log" % expected_log_name) self.assert_equal(metafile, log_details.metafile, "Wrong log metafile for '%s' log" % expected_log_name) def assert_log_enable(self, client, log_name, expected_http_code=200, expected_type=guest_log.LogType.USER.name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=None, expected_pending=None): self.report.log("Executing log_enable for log '%s'" % log_name) log_details = client.instances.log_enable( self.instance_info.id, log_name) self.assert_client_code(client, expected_http_code) self.assert_log_details( log_details, log_name, expected_type=expected_type, expected_status=expected_status, expected_published=expected_published, expected_pending=expected_pending) def assert_log_disable(self, client, log_name, discard=None, expected_http_code=200, expected_type=guest_log.LogType.USER.name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=None, expected_pending=None): self.report.log("Executing log_disable for log '%s' (discard: %s)" % (log_name, discard)) log_details = client.instances.log_disable( self.instance_info.id, log_name, discard=discard) self.assert_client_code(client, expected_http_code) self.assert_log_details( log_details, log_name, expected_type=expected_type, expected_status=expected_status, expected_published=expected_published, expected_pending=expected_pending) def assert_log_publish(self, client, log_name, disable=None, discard=None, expected_http_code=200, expected_type=guest_log.LogType.USER.name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=None, expected_pending=None): self.report.log("Executing log_publish for log '%s' (disable: %s " "discard: %s)" % (log_name, disable, discard)) log_details = client.instances.log_publish( self.instance_info.id, log_name, disable=disable, discard=discard) self.assert_client_code(client, expected_http_code) self.assert_log_details( log_details, log_name, expected_type=expected_type, expected_status=expected_status, expected_published=expected_published, expected_pending=expected_pending) def assert_log_discard(self, client, log_name, expected_http_code=200, expected_type=guest_log.LogType.USER.name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=None, expected_pending=None): self.report.log("Executing log_discard for log '%s'" % log_name) log_details = client.instances.log_discard( self.instance_info.id, log_name) self.assert_client_code(client, expected_http_code) self.assert_log_details( log_details, log_name, expected_type=expected_type, expected_status=expected_status, expected_published=expected_published, expected_pending=expected_pending) def assert_storage_gone(self, container, prefix, metafile): try: headers, container_files = self.swift_client.get_container( container, prefix=prefix) self.assert_equal(0, len(container_files), "Found files in %s/%s: %s" % (container, prefix, container_files)) except ClientException as ex: if ex.http_status == 404: self.report.log("Container '%s' does not exist" % container) pass else: raise try: self.swift_client.get_object(container, metafile) self.fail("Found metafile after discard: %s" % metafile) except ClientException as ex: if ex.http_status == 404: self.report.log("Metafile '%s' gone as expected" % metafile) pass else: raise def assert_storage_exists(self, container, prefix, metafile): try: headers, container_files = self.swift_client.get_container( container, prefix=prefix) self.assert_true(len(container_files) > 0, "No files found in %s/%s" % (container, prefix)) except ClientException as ex: if ex.http_status == 404: self.fail("Container '%s' does not exist" % container) else: raise try: self.swift_client.get_object(container, metafile) except ClientException as ex: if ex.http_status == 404: self.fail("Missing metafile: %s" % metafile) else: raise def run_test_log_enable_sys(self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_enable_fails( self.admin_client, expected_exception, expected_http_code, log_name) def assert_log_enable_fails(self, client, expected_exception, expected_http_code, log_name): self.assert_raises(expected_exception, expected_http_code, client, client.instances.log_enable, self.instance_info.id, log_name) def run_test_log_disable_sys(self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_disable_fails( self.admin_client, expected_exception, expected_http_code, log_name) def assert_log_disable_fails(self, client, expected_exception, expected_http_code, log_name, discard=None): self.assert_raises(expected_exception, expected_http_code, client, client.instances.log_disable, self.instance_info.id, log_name, discard=discard) def run_test_log_show_unauth_user(self, expected_exception=exceptions.NotFound, expected_http_code=404): log_name = self._get_exposed_user_log_name() self.assert_log_show_fails( self.unauth_client, expected_exception, expected_http_code, log_name) def assert_log_show_fails(self, client, expected_exception, expected_http_code, log_name): self.assert_raises(expected_exception, expected_http_code, client, client.instances.log_show, self.instance_info.id, log_name) def run_test_log_list_unauth_user(self, expected_exception=exceptions.NotFound, expected_http_code=404): client = self.unauth_client self.assert_raises(expected_exception, expected_http_code, client, client.instances.log_list, self.instance_info.id) def run_test_log_generator_unauth_user( self, expected_exception=exceptions.NotFound, expected_http_code=404): log_name = self._get_exposed_user_log_name() self.assert_log_generator_unauth_user( self.unauth_client, log_name, expected_exception, expected_http_code) def assert_log_generator_unauth_user(self, client, log_name, expected_exception, expected_http_code, publish=None): raise SkipKnownBug(runners.BUG_UNAUTH_TEST_WRONG) # self.assert_raises(expected_exception, expected_http_code, # client, client.instances.log_generator, # self.instance_info.id, log_name, publish=publish) def run_test_log_generator_publish_unauth_user( self, expected_exception=exceptions.NotFound, expected_http_code=404): log_name = self._get_exposed_user_log_name() self.assert_log_generator_unauth_user( self.unauth_client, log_name, expected_exception, expected_http_code, publish=True) def run_test_log_show_unexposed_user( self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_show_fails( self.auth_client, expected_exception, expected_http_code, log_name) def run_test_log_enable_unexposed_user( self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_enable_fails( self.auth_client, expected_exception, expected_http_code, log_name) def run_test_log_disable_unexposed_user( self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_disable_fails( self.auth_client, expected_exception, expected_http_code, log_name) def run_test_log_publish_unexposed_user( self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_publish_fails( self.auth_client, expected_exception, expected_http_code, log_name) def assert_log_publish_fails(self, client, expected_exception, expected_http_code, log_name, disable=None, discard=None): self.assert_raises(expected_exception, expected_http_code, client, client.instances.log_publish, self.instance_info.id, log_name, disable=disable, discard=discard) def run_test_log_discard_unexposed_user( self, expected_exception=exceptions.BadRequest, expected_http_code=400): log_name = self._get_unexposed_sys_log_name() self.assert_log_discard_fails( self.auth_client, expected_exception, expected_http_code, log_name) def assert_log_discard_fails(self, client, expected_exception, expected_http_code, log_name): self.assert_raises(expected_exception, expected_http_code, client, client.instances.log_discard, self.instance_info.id, log_name) def run_test_log_enable_user(self): expected_status = guest_log.LogStatus.Ready.name expected_pending = 1 if self.test_helper.log_enable_requires_restart(): expected_status = guest_log.LogStatus.Restart_Required.name # if using an existing instance, there may already be something expected_pending = self._set_zero_or_none() for log_name in self._get_exposed_user_log_names(): self.assert_log_enable( self.auth_client, log_name, expected_status=expected_status, expected_published=0, expected_pending=expected_pending) def run_test_log_enable_flip_user(self): # for restart required datastores, test that flipping them # back to disabled returns the status to 'Disabled' # from 'Restart_Required' if self.test_helper.log_enable_requires_restart(): # if using an existing instance, there may already be something expected_pending = self._set_zero_or_none() for log_name in self._get_exposed_user_log_names(): self.assert_log_disable( self.auth_client, log_name, expected_status=guest_log.LogStatus.Disabled.name, expected_published=0, expected_pending=expected_pending) self.assert_log_enable( self.auth_client, log_name, expected_status=guest_log.LogStatus.Restart_Required.name, expected_published=0, expected_pending=expected_pending) def run_test_restart_datastore(self, expected_http_code=202): if self.test_helper.log_enable_requires_restart(): instance_id = self.instance_info.id # we need to wait until the heartbeat flips the instance # back into 'ACTIVE' before we issue the restart command expected_states = ['RESTART_REQUIRED', 'ACTIVE'] self.assert_instance_action(instance_id, expected_states) client = self.auth_client client.instances.restart(instance_id) self.assert_client_code(client, expected_http_code) def run_test_wait_for_restart(self, expected_states=['REBOOT', 'ACTIVE']): if self.test_helper.log_enable_requires_restart(): self.assert_instance_action(self.instance_info.id, expected_states) def run_test_log_publish_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_log_publish( self.auth_client, log_name, expected_status=[guest_log.LogStatus.Published.name, guest_log.LogStatus.Partial.name], expected_published=1, expected_pending=None) def run_test_add_data(self): self.test_helper.add_data(DataType.micro, self.get_instance_host()) def run_test_verify_data(self): self.test_helper.verify_data(DataType.micro, self.get_instance_host()) def run_test_log_publish_again_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_log_publish( self.admin_client, log_name, expected_status=[guest_log.LogStatus.Published.name, guest_log.LogStatus.Partial.name], expected_published=self._get_last_log_published(log_name), expected_pending=None) def run_test_log_generator_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_log_generator( self.auth_client, log_name, lines=2, expected_lines=2) def assert_log_generator(self, client, log_name, publish=False, lines=4, expected_lines=None, swift_client=None): self.report.log("Executing log_generator for log '%s' (publish: %s)" % (log_name, publish)) log_gen = client.instances.log_generator( self.instance_info.id, log_name, publish=publish, lines=lines, swift=swift_client) log_contents = "".join([chunk for chunk in log_gen()]) self.report.log("Returned %d lines for log '%s': %s" % ( len(log_contents.splitlines()), log_name, log_contents)) self._set_last_log_contents(log_name, log_contents) if expected_lines: self.assert_equal(expected_lines, len(log_contents.splitlines()), "Wrong line count for '%s' log" % log_name) else: self.assert_true(len(log_contents.splitlines()) <= lines, "More than %d lines found for '%s' log" % (lines, log_name)) def run_test_log_generator_publish_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_log_generator( self.auth_client, log_name, publish=True, lines=3, expected_lines=3) def run_test_log_generator_swift_client_user(self): swift_client = self.swift_client for log_name in self._get_exposed_user_log_names(): self.assert_log_generator( self.auth_client, log_name, publish=True, lines=3, expected_lines=3, swift_client=swift_client) def run_test_add_data_again(self): # Add some more data so we have at least 3 log data files self.test_helper.add_data(DataType.micro2, self.get_instance_host()) def run_test_verify_data_again(self): self.test_helper.verify_data(DataType.micro2, self.get_instance_host()) def run_test_log_generator_user_by_row(self): log_name = self._get_exposed_user_log_name() self.assert_log_publish( self.auth_client, log_name, expected_status=[guest_log.LogStatus.Published.name, guest_log.LogStatus.Partial.name], expected_published=self._get_last_log_published(log_name), expected_pending=None) # Now get the full contents of the log self.assert_log_generator(self.auth_client, log_name, lines=100000) log_lines = len(self._get_last_log_contents(log_name).splitlines()) # cap at 100, so the test can't run away if something goes wrong log_lines = min(log_lines, 100) # Make sure we get the right number of log lines back each time for lines in range(1, log_lines): self.assert_log_generator( self.auth_client, log_name, lines=lines, expected_lines=lines) def run_test_log_save_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_test_log_save(self.auth_client, log_name) def run_test_log_save_publish_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_test_log_save(self.auth_client, log_name, publish=True) def assert_test_log_save(self, client, log_name, publish=False): # generate the file self.report.log("Executing log_save for log '%s' (publish: %s)" % (log_name, publish)) with tempfile.NamedTemporaryFile() as temp_file: client.instances.log_save(self.instance_info.id, log_name=log_name, publish=publish, filename=temp_file.name) file_contents = operating_system.read_file(temp_file.name) # now grab the contents ourselves self.assert_log_generator(client, log_name, lines=100000) # and compare them self.assert_equal(self._get_last_log_contents(log_name), file_contents) def run_test_log_discard_user(self): for log_name in self._get_exposed_user_log_names(): self.assert_log_discard( self.auth_client, log_name, expected_status=guest_log.LogStatus.Ready.name, expected_published=0, expected_pending=1) def run_test_log_disable_user(self): expected_status = guest_log.LogStatus.Disabled.name if self.test_helper.log_enable_requires_restart(): expected_status = guest_log.LogStatus.Restart_Required.name for log_name in self._get_exposed_user_log_names(): self.assert_log_disable( self.auth_client, log_name, expected_status=expected_status, expected_published=0, expected_pending=1) def run_test_log_show_after_stop_details(self): log_name = self._get_exposed_user_log_name() self.stopped_log_details = self.auth_client.instances.log_show( self.instance_info.id, log_name) self.assert_is_not_none(self.stopped_log_details) def run_test_add_data_again_after_stop(self): # Add some more data to make sure logging has stopped self.test_helper.add_data(DataType.micro3, self.get_instance_host()) def run_test_verify_data_again_after_stop(self): self.test_helper.verify_data(DataType.micro3, self.get_instance_host()) def run_test_log_show_after_stop(self): log_name = self._get_exposed_user_log_name() self.assert_log_show( self.auth_client, log_name, expected_published=self.stopped_log_details.published, expected_pending=self.stopped_log_details.pending) def run_test_log_enable_user_after_stop(self): expected_status = guest_log.LogStatus.Ready.name expected_pending = 1 if self.test_helper.log_enable_requires_restart(): expected_status = guest_log.LogStatus.Restart_Required.name log_name = self._get_exposed_user_log_name() self.assert_log_enable( self.auth_client, log_name, expected_status=expected_status, expected_published=0, expected_pending=expected_pending) def run_test_add_data_again_after_stop_start(self): # Add some more data to make sure logging has started again self.test_helper.add_data(DataType.micro4, self.get_instance_host()) def run_test_verify_data_again_after_stop_start(self): self.test_helper.verify_data(DataType.micro4, self.get_instance_host()) def run_test_log_publish_after_stop_start(self): log_name = self._get_exposed_user_log_name() self.assert_log_publish( self.auth_client, log_name, expected_status=[guest_log.LogStatus.Published.name, guest_log.LogStatus.Partial.name], expected_published=self._get_last_log_published(log_name) + 1, expected_pending=None) def run_test_log_disable_user_after_stop_start(self): expected_status = guest_log.LogStatus.Disabled.name if self.test_helper.log_enable_requires_restart(): expected_status = guest_log.LogStatus.Restart_Required.name log_name = self._get_exposed_user_log_name() self.assert_log_disable( self.auth_client, log_name, discard=True, expected_status=expected_status, expected_published=0, expected_pending=1) def run_test_log_show_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_show( self.admin_client, log_name, expected_type=guest_log.LogType.SYS.name, expected_status=[guest_log.LogStatus.Ready.name, guest_log.LogStatus.Partial.name], expected_published=0, expected_pending=1) def run_test_log_publish_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_publish( self.admin_client, log_name, expected_type=guest_log.LogType.SYS.name, expected_status=guest_log.LogStatus.Partial.name, expected_published=1, expected_pending=1) def run_test_log_publish_again_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_publish( self.admin_client, log_name, expected_type=guest_log.LogType.SYS.name, expected_status=guest_log.LogStatus.Partial.name, expected_published=self._get_last_log_published(log_name) + 1, expected_pending=1) def run_test_log_generator_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_generator( self.admin_client, log_name, lines=4, expected_lines=4) def run_test_log_generator_publish_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_generator( self.admin_client, log_name, publish=True, lines=4, expected_lines=4) def run_test_log_generator_swift_client_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_generator( self.admin_client, log_name, publish=True, lines=4, expected_lines=4, swift_client=self.swift_client) def run_test_log_save_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_test_log_save( self.admin_client, log_name) def run_test_log_save_publish_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_test_log_save( self.admin_client, log_name, publish=True) def run_test_log_discard_sys(self): log_name = self._get_unexposed_sys_log_name() self.assert_log_discard( self.admin_client, log_name, expected_type=guest_log.LogType.SYS.name, expected_status=guest_log.LogStatus.Ready.name, expected_published=0, expected_pending=1) class CassandraGuestLogRunner(GuestLogRunner): def run_test_log_show(self): log_name = self._get_exposed_user_log_name() self.assert_log_show(self.auth_client, log_name, expected_published=0, expected_pending=None)
apache-2.0
igemsoftware/SYSU-Software2013
project/Python27_32/Lib/htmlentitydefs.py
390
18054
"""HTML character entity references.""" # maps the HTML entity name to the Unicode codepoint name2codepoint = { 'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1 'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1 'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1 'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1 'Alpha': 0x0391, # greek capital letter alpha, U+0391 'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1 'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1 'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1 'Beta': 0x0392, # greek capital letter beta, U+0392 'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1 'Chi': 0x03a7, # greek capital letter chi, U+03A7 'Dagger': 0x2021, # double dagger, U+2021 ISOpub 'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3 'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1 'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1 'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1 'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1 'Epsilon': 0x0395, # greek capital letter epsilon, U+0395 'Eta': 0x0397, # greek capital letter eta, U+0397 'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1 'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3 'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1 'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1 'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1 'Iota': 0x0399, # greek capital letter iota, U+0399 'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1 'Kappa': 0x039a, # greek capital letter kappa, U+039A 'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3 'Mu': 0x039c, # greek capital letter mu, U+039C 'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1 'Nu': 0x039d, # greek capital letter nu, U+039D 'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2 'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1 'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1 'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1 'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3 'Omicron': 0x039f, # greek capital letter omicron, U+039F 'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1 'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1 'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1 'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3 'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3 'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech 'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3 'Rho': 0x03a1, # greek capital letter rho, U+03A1 'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2 'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3 'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1 'Tau': 0x03a4, # greek capital letter tau, U+03A4 'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3 'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1 'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1 'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1 'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3 'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1 'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3 'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1 'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2 'Zeta': 0x0396, # greek capital letter zeta, U+0396 'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1 'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1 'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia 'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1 'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1 'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW 'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3 'amp': 0x0026, # ampersand, U+0026 ISOnum 'and': 0x2227, # logical and = wedge, U+2227 ISOtech 'ang': 0x2220, # angle, U+2220 ISOamso 'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1 'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr 'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1 'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1 'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW 'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3 'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum 'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub 'cap': 0x2229, # intersection = cap, U+2229 ISOtech 'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1 'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia 'cent': 0x00a2, # cent sign, U+00A2 ISOnum 'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3 'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub 'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub 'cong': 0x2245, # approximately equal to, U+2245 ISOtech 'copy': 0x00a9, # copyright sign, U+00A9 ISOnum 'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW 'cup': 0x222a, # union = cup, U+222A ISOtech 'curren': 0x00a4, # currency sign, U+00A4 ISOnum 'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa 'dagger': 0x2020, # dagger, U+2020 ISOpub 'darr': 0x2193, # downwards arrow, U+2193 ISOnum 'deg': 0x00b0, # degree sign, U+00B0 ISOnum 'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3 'diams': 0x2666, # black diamond suit, U+2666 ISOpub 'divide': 0x00f7, # division sign, U+00F7 ISOnum 'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1 'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1 'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1 'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso 'emsp': 0x2003, # em space, U+2003 ISOpub 'ensp': 0x2002, # en space, U+2002 ISOpub 'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3 'equiv': 0x2261, # identical to, U+2261 ISOtech 'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3 'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1 'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1 'euro': 0x20ac, # euro sign, U+20AC NEW 'exist': 0x2203, # there exists, U+2203 ISOtech 'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech 'forall': 0x2200, # for all, U+2200 ISOtech 'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum 'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum 'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum 'frasl': 0x2044, # fraction slash, U+2044 NEW 'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3 'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech 'gt': 0x003e, # greater-than sign, U+003E ISOnum 'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa 'harr': 0x2194, # left right arrow, U+2194 ISOamsa 'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub 'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub 'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1 'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1 'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum 'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1 'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso 'infin': 0x221e, # infinity, U+221E ISOtech 'int': 0x222b, # integral, U+222B ISOtech 'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3 'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum 'isin': 0x2208, # element of, U+2208 ISOtech 'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1 'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3 'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech 'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3 'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech 'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum 'larr': 0x2190, # leftwards arrow, U+2190 ISOnum 'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc 'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum 'le': 0x2264, # less-than or equal to, U+2264 ISOtech 'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc 'lowast': 0x2217, # asterisk operator, U+2217 ISOtech 'loz': 0x25ca, # lozenge, U+25CA ISOpub 'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070 'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed 'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum 'lt': 0x003c, # less-than sign, U+003C ISOnum 'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia 'mdash': 0x2014, # em dash, U+2014 ISOpub 'micro': 0x00b5, # micro sign, U+00B5 ISOnum 'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum 'minus': 0x2212, # minus sign, U+2212 ISOtech 'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3 'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech 'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum 'ndash': 0x2013, # en dash, U+2013 ISOpub 'ne': 0x2260, # not equal to, U+2260 ISOtech 'ni': 0x220b, # contains as member, U+220B ISOtech 'not': 0x00ac, # not sign, U+00AC ISOnum 'notin': 0x2209, # not an element of, U+2209 ISOtech 'nsub': 0x2284, # not a subset of, U+2284 ISOamsn 'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1 'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3 'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1 'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1 'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2 'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1 'oline': 0x203e, # overline = spacing overscore, U+203E NEW 'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3 'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW 'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb 'or': 0x2228, # logical or = vee, U+2228 ISOtech 'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum 'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum 'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1 'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1 'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb 'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1 'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum 'part': 0x2202, # partial differential, U+2202 ISOtech 'permil': 0x2030, # per mille sign, U+2030 ISOtech 'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech 'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3 'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3 'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3 'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum 'pound': 0x00a3, # pound sign, U+00A3 ISOnum 'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech 'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb 'prop': 0x221d, # proportional to, U+221D ISOtech 'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3 'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum 'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech 'radic': 0x221a, # square root = radical sign, U+221A ISOtech 'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech 'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum 'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum 'rceil': 0x2309, # right ceiling, U+2309 ISOamsc 'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum 'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso 'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum 'rfloor': 0x230b, # right floor, U+230B ISOamsc 'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3 'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070 'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed 'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum 'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW 'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2 'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb 'sect': 0x00a7, # section sign, U+00A7 ISOnum 'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum 'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3 'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3 'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech 'spades': 0x2660, # black spade suit, U+2660 ISOpub 'sub': 0x2282, # subset of, U+2282 ISOtech 'sube': 0x2286, # subset of or equal to, U+2286 ISOtech 'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb 'sup': 0x2283, # superset of, U+2283 ISOtech 'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum 'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum 'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum 'supe': 0x2287, # superset of or equal to, U+2287 ISOtech 'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1 'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3 'there4': 0x2234, # therefore, U+2234 ISOtech 'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3 'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW 'thinsp': 0x2009, # thin space, U+2009 ISOpub 'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1 'tilde': 0x02dc, # small tilde, U+02DC ISOdia 'times': 0x00d7, # multiplication sign, U+00D7 ISOnum 'trade': 0x2122, # trade mark sign, U+2122 ISOnum 'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa 'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1 'uarr': 0x2191, # upwards arrow, U+2191 ISOnum 'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1 'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1 'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia 'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW 'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3 'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1 'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso 'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3 'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1 'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum 'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1 'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3 'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070 'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070 } # maps the Unicode codepoint to the HTML entity name codepoint2name = {} # maps the HTML entity name to the character # (or a character reference if the character is outside the Latin-1 range) entitydefs = {} for (name, codepoint) in name2codepoint.iteritems(): codepoint2name[codepoint] = name if codepoint <= 0xff: entitydefs[name] = chr(codepoint) else: entitydefs[name] = '&#%d;' % codepoint del name, codepoint
mit
matsumoto-r/synciga
src/tools/gyp/pylib/gyp/ninja_syntax.py
217
5286
# This file comes from # https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py # Do not edit! Edit the upstream one instead. """Python module for generating .ninja files. Note that this is emphatically not a required piece of Ninja; it's just a helpful utility for build-file-generation systems that already use Python. """ import textwrap import re def escape_path(word): return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') class Writer(object): def __init__(self, output, width=78): self.output = output self.width = width def newline(self): self.output.write('\n') def comment(self, text): for line in textwrap.wrap(text, self.width - 2): self.output.write('# ' + line + '\n') def variable(self, key, value, indent=0): if value is None: return if isinstance(value, list): value = ' '.join(filter(None, value)) # Filter out empty strings. self._line('%s = %s' % (key, value), indent) def rule(self, name, command, description=None, depfile=None, generator=False, restat=False, rspfile=None, rspfile_content=None): self._line('rule %s' % name) self.variable('command', command, indent=1) if description: self.variable('description', description, indent=1) if depfile: self.variable('depfile', depfile, indent=1) if generator: self.variable('generator', '1', indent=1) if restat: self.variable('restat', '1', indent=1) if rspfile: self.variable('rspfile', rspfile, indent=1) if rspfile_content: self.variable('rspfile_content', rspfile_content, indent=1) def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None): outputs = self._as_list(outputs) all_inputs = self._as_list(inputs)[:] out_outputs = list(map(escape_path, outputs)) all_inputs = list(map(escape_path, all_inputs)) if implicit: implicit = map(escape_path, self._as_list(implicit)) all_inputs.append('|') all_inputs.extend(implicit) if order_only: order_only = map(escape_path, self._as_list(order_only)) all_inputs.append('||') all_inputs.extend(order_only) self._line('build %s: %s %s' % (' '.join(out_outputs), rule, ' '.join(all_inputs))) if variables: if isinstance(variables, dict): iterator = variables.iteritems() else: iterator = iter(variables) for key, val in iterator: self.variable(key, val, indent=1) return outputs def include(self, path): self._line('include %s' % path) def subninja(self, path): self._line('subninja %s' % path) def default(self, paths): self._line('default %s' % ' '.join(self._as_list(paths))) def _count_dollars_before_index(self, s, i): """Returns the number of '$' characters right in front of s[i].""" dollar_count = 0 dollar_index = i - 1 while dollar_index > 0 and s[dollar_index] == '$': dollar_count += 1 dollar_index -= 1 return dollar_count def _line(self, text, indent=0): """Write 'text' word-wrapped at self.width characters.""" leading_space = ' ' * indent while len(leading_space) + len(text) > self.width: # The text is too wide; wrap if possible. # Find the rightmost space that would obey our width constraint and # that's not an escaped space. available_space = self.width - len(leading_space) - len(' $') space = available_space while True: space = text.rfind(' ', 0, space) if space < 0 or \ self._count_dollars_before_index(text, space) % 2 == 0: break if space < 0: # No such space; just use the first unescaped space we can find. space = available_space - 1 while True: space = text.find(' ', space + 1) if space < 0 or \ self._count_dollars_before_index(text, space) % 2 == 0: break if space < 0: # Give up on breaking. break self.output.write(leading_space + text[0:space] + ' $\n') text = text[space+1:] # Subsequent lines are continuations, so indent them. leading_space = ' ' * (indent+2) self.output.write(leading_space + text + '\n') def _as_list(self, input): if input is None: return [] if isinstance(input, list): return input return [input] def escape(string): """Escape a string such that it can be embedded into a Ninja file without further interpretation.""" assert '\n' not in string, 'Ninja syntax does not allow newlines' # We only have one special metacharacter: '$'. return string.replace('$', '$$')
bsd-3-clause
Jon-ICS/upm
examples/python/ads1115.py
7
3049
# Author: Mihai Tudor Panu <mihai.tudor.panu@intel.com> # Copyright (c) 2016 Intel Corporation. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # This example demonstrates how to use one of the ADS1115 ADCs on the # DFRobot Joule Shield with devices that output a small differential # voltage (e.g. geophones, piezoelectric bands or pads, thermocouples). from __future__ import print_function from threading import Timer from time import sleep from upm import pyupm_ads1x15 as upm def stop(): global running running = False def main(): global running running = True fileName = './ads1115.data' # Output filename id = 0 # Sample number # Initialize and configure the ADS1115 for the SM-24 Geophone # There are two ADS1115 chips on the DFRobot Joule Shield on the same I2C bus # - 0x48 gives access to pins A0 - A3 # - 0x49 gives access to pins A4 - A7 ads1115 = upm.ADS1115(0, 0x48) # Put the ADC into differential mode for pins A0 and A1, # the SM-24 Geophone is connected to these pins ads1115.getSample(upm.ADS1X15.DIFF_0_1) # Set the gain based on expected VIN range to -/+ 2.048 V # Can be adjusted based on application to as low as -/+ 0.256 V, see API # documentation for details ads1115.setGain(upm.ADS1X15.GAIN_TWO) # Set the sample rate to 860 samples per second (max) and turn on continuous # sampling ads1115.setSPS(upm.ADS1115.SPS_860) ads1115.setContinuous(True) # Open the output file try: f = open(fileName, 'w') except OSError as e: print('Cannot open output file:', e) return # Setup a timer to stop logging after 10 seconds t = Timer(10, stop) t.start() # Read sensor and write to file every 1 ms while running: f.write(str(id) + ' %.7f' % ads1115.getLastSample() + '\n') id += 1 sleep(0.001) # Close and exit f.close() print('Wrote', id, 'samples to file:', fileName) return if __name__ == '__main__': main()
mit
drufat/vispy
vispy/app/_default_app.py
21
2422
# -*- coding: utf-8 -*- # Copyright (c) 2015, Vispy Development Team. # Distributed under the (new) BSD License. See LICENSE.txt for more info. from .application import Application # Initialize default app # Only for use within *this* module. # One should always call use_app() to obtain the default app. default_app = None def use_app(backend_name=None, call_reuse=True): """ Get/create the default Application object It is safe to call this function multiple times, as long as backend_name is None or matches the already selected backend. Parameters ---------- backend_name : str | None The name of the backend application to use. If not specified, Vispy tries to select a backend automatically. See ``vispy.use()`` for details. call_reuse : bool Whether to call the backend's `reuse()` function (True by default). Not implemented by default, but some backends need it. For example, the notebook backends need to inject some JavaScript in a notebook as soon as `use_app()` is called. """ global default_app # If we already have a default_app, raise error or return if default_app is not None: names = default_app.backend_name.lower().replace('(', ' ').strip(') ') names = [name for name in names.split(' ') if name] if backend_name and backend_name.lower() not in names: raise RuntimeError('Can only select a backend once, already using ' '%s.' % names) else: if call_reuse: default_app.reuse() return default_app # Current backend matches backend_name # Create default app default_app = Application(backend_name) return default_app def create(): """Create the native application. """ use_app(call_reuse=False) return default_app.create() def run(): """Enter the native GUI event loop. """ use_app(call_reuse=False) return default_app.run() def quit(): """Quit the native GUI event loop. """ use_app(call_reuse=False) return default_app.quit() def process_events(): """Process all pending GUI events If the mainloop is not running, this should be done regularly to keep the visualization interactive and to keep the event system going. """ use_app(call_reuse=False) return default_app.process_events()
bsd-3-clause
ihsanudin/odoo
openerp/addons/test_access_rights/tests/test_ir_rules.py
299
1220
import openerp.exceptions from openerp.tests.common import TransactionCase class TestRules(TransactionCase): def setUp(self): super(TestRules, self).setUp() self.id1 = self.env['test_access_right.some_obj']\ .create({'val': 1}).id self.id2 = self.env['test_access_right.some_obj']\ .create({'val': -1}).id # create a global rule forbidding access to records with a negative # (or zero) val self.env['ir.rule'].create({ 'name': 'Forbid negatives', 'model_id': self.browse_ref('test_access_rights.model_test_access_right_some_obj').id, 'domain_force': "[('val', '>', 0)]" }) def test_basic_access(self): env = self.env(user=self.browse_ref('base.public_user')) # put forbidden record in cache browse2 = env['test_access_right.some_obj'].browse(self.id2) # this is the one we want browse1 = env['test_access_right.some_obj'].browse(self.id1) # this should not blow up self.assertEqual(browse1.val, 1) # but this should with self.assertRaises(openerp.exceptions.AccessError): self.assertEqual(browse2.val, -1)
agpl-3.0
reeshupatel/demo
keystone/openstack/common/lockutils.py
1
12121
# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import errno import functools import os import shutil import subprocess import sys import tempfile import threading import time import weakref from oslo.config import cfg from keystone.openstack.common import fileutils from keystone.openstack.common.gettextutils import _, _LE, _LI from keystone.openstack.common import log as logging LOG = logging.getLogger(__name__) util_opts = [ cfg.BoolOpt('disable_process_locking', default=False, help='Enables or disables inter-process locks.'), cfg.StrOpt('lock_path', default=os.environ.get("KEYSTONE_LOCK_PATH"), help='Directory to use for lock files.') ] CONF = cfg.CONF CONF.register_opts(util_opts) def set_defaults(lock_path): cfg.set_defaults(util_opts, lock_path=lock_path) class _FileLock(object): """Lock implementation which allows multiple locks, working around issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does not require any cleanup. Since the lock is always held on a file descriptor rather than outside of the process, the lock gets dropped automatically if the process crashes, even if __exit__ is not executed. There are no guarantees regarding usage by multiple green threads in a single process here. This lock works only between processes. Exclusive access between local threads should be achieved using the semaphores in the @synchronized decorator. Note these locks are released when the descriptor is closed, so it's not safe to close the file descriptor while another green thread holds the lock. Just opening and closing the lock file can break synchronisation, so lock files must be accessed only using this abstraction. """ def __init__(self, name): self.lockfile = None self.fname = name def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): fileutils.ensure_tree(basedir) LOG.info(_LI('Created lock path: %s'), basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug('Got file lock "%s"', self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError(_("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s") % { 'filename': self.fname, 'exception': e, }) def __enter__(self): self.acquire() return self def release(self): try: self.unlock() self.lockfile.close() LOG.debug('Released file lock "%s"', self.fname) except IOError: LOG.exception(_LE("Could not release the acquired lock `%s`"), self.fname) def __exit__(self, exc_type, exc_val, exc_tb): self.release() def exists(self): return os.path.exists(self.fname) def trylock(self): raise NotImplementedError() def unlock(self): raise NotImplementedError() class _WindowsLock(_FileLock): def trylock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) def unlock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) class _FcntlLock(_FileLock): def trylock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) def unlock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_UN) class _PosixLock(object): def __init__(self, name): # Hash the name because it's not valid to have POSIX semaphore # names with things like / in them. Then use base64 to encode # the digest() instead taking the hexdigest() because the # result is shorter and most systems can't have shm sempahore # names longer than 31 characters. h = hashlib.sha1() h.update(name.encode('ascii')) self.name = str((b'/' + base64.urlsafe_b64encode( h.digest())).decode('ascii')) def acquire(self, timeout=None): self.semaphore = posix_ipc.Semaphore(self.name, flags=posix_ipc.O_CREAT, initial_value=1) self.semaphore.acquire(timeout) return self def __enter__(self): self.acquire() return self def release(self): self.semaphore.release() self.semaphore.close() def __exit__(self, exc_type, exc_val, exc_tb): self.release() def exists(self): try: semaphore = posix_ipc.Semaphore(self.name) except posix_ipc.ExistentialError: return False else: semaphore.close() return True if os.name == 'nt': import msvcrt InterProcessLock = _WindowsLock FileLock = _WindowsLock else: import base64 import fcntl import hashlib import posix_ipc InterProcessLock = _PosixLock FileLock = _FcntlLock _semaphores = weakref.WeakValueDictionary() _semaphores_lock = threading.Lock() def _get_lock_path(name, lock_file_prefix, lock_path=None): # NOTE(mikal): the lock name cannot contain directory # separators name = name.replace(os.sep, '_') if lock_file_prefix: sep = '' if lock_file_prefix.endswith('-') else '-' name = '%s%s%s' % (lock_file_prefix, sep, name) local_lock_path = lock_path or CONF.lock_path if not local_lock_path: # NOTE(bnemec): Create a fake lock path for posix locks so we don't # unnecessarily raise the RequiredOptError below. if InterProcessLock is not _PosixLock: raise cfg.RequiredOptError('lock_path') local_lock_path = 'posixlock:/' return os.path.join(local_lock_path, name) def external_lock(name, lock_file_prefix=None, lock_path=None): LOG.debug('Attempting to grab external lock "%(lock)s"', {'lock': name}) lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path) # NOTE(bnemec): If an explicit lock_path was passed to us then it # means the caller is relying on file-based locking behavior, so # we can't use posix locks for those calls. if lock_path: return FileLock(lock_file_path) return InterProcessLock(lock_file_path) def remove_external_lock_file(name, lock_file_prefix=None): """Remove an external lock file when it's not used anymore This will be helpful when we have a lot of lock files """ with internal_lock(name): lock_file_path = _get_lock_path(name, lock_file_prefix) try: os.remove(lock_file_path) except OSError: LOG.info(_LI('Failed to remove file %(file)s'), {'file': lock_file_path}) def internal_lock(name): with _semaphores_lock: try: sem = _semaphores[name] except KeyError: sem = threading.Semaphore() _semaphores[name] = sem LOG.debug('Got semaphore "%(lock)s"', {'lock': name}) return sem @contextlib.contextmanager def lock(name, lock_file_prefix=None, external=False, lock_path=None): """Context based lock This function yields a `threading.Semaphore` instance (if we don't use eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is True, in which case, it'll yield an InterProcessLock instance. :param lock_file_prefix: The lock_file_prefix argument is used to provide lock files on disk with a meaningful prefix. :param external: The external keyword argument denotes whether this lock should work across multiple processes. This means that if two different workers both run a method decorated with @synchronized('mylock', external=True), only one of them will execute at a time. """ int_lock = internal_lock(name) with int_lock: if external and not CONF.disable_process_locking: ext_lock = external_lock(name, lock_file_prefix, lock_path) with ext_lock: yield ext_lock else: yield int_lock LOG.debug('Released semaphore "%(lock)s"', {'lock': name}) def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): """Synchronization decorator. Decorating a method like so:: @synchronized('mylock') def foo(self, *args): ... ensures that only one thread will execute the foo method at a time. Different methods can share the same lock:: @synchronized('mylock') def foo(self, *args): ... @synchronized('mylock') def bar(self, *args): ... This way only one of either foo or bar can be executing at a time. """ def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): try: with lock(name, lock_file_prefix, external, lock_path): LOG.debug('Got semaphore / lock "%(function)s"', {'function': f.__name__}) return f(*args, **kwargs) finally: LOG.debug('Semaphore / lock released "%(function)s"', {'function': f.__name__}) return inner return wrap def synchronized_with_prefix(lock_file_prefix): """Partial object generator for the synchronization decorator. Redefine @synchronized in each project like so:: (in nova/utils.py) from nova.openstack.common import lockutils synchronized = lockutils.synchronized_with_prefix('nova-') (in nova/foo.py) from nova import utils @utils.synchronized('mylock') def bar(self, *args): ... The lock_file_prefix argument is used to provide lock files on disk with a meaningful prefix. """ return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) def main(argv): """Create a dir for locks and pass it to command from arguments If you run this: python -m openstack.common.lockutils python setup.py testr <etc> a temporary directory will be created for all your locks and passed to all your tests in an environment variable. The temporary dir will be deleted afterwards and the return value will be preserved. """ lock_dir = tempfile.mkdtemp() os.environ["KEYSTONE_LOCK_PATH"] = lock_dir try: ret_val = subprocess.call(argv[1:]) finally: shutil.rmtree(lock_dir, ignore_errors=True) return ret_val if __name__ == '__main__': sys.exit(main(sys.argv))
apache-2.0
shubhdev/edxOnBaadal
common/djangoapps/third_party_auth/settings.py
76
3717
"""Settings for the third-party auth module. The flow for settings registration is: The base settings file contains a boolean, ENABLE_THIRD_PARTY_AUTH, indicating whether this module is enabled. startup.py probes the ENABLE_THIRD_PARTY_AUTH. If true, it: a) loads this module. b) calls apply_settings(), passing in the Django settings """ _FIELDS_STORED_IN_SESSION = ['auth_entry', 'next'] _MIDDLEWARE_CLASSES = ( 'third_party_auth.middleware.ExceptionMiddleware', ) _SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/dashboard' def apply_settings(django_settings): """Set provider-independent settings.""" # Whitelisted URL query parameters retrained in the pipeline session. # Params not in this whitelist will be silently dropped. django_settings.FIELDS_STORED_IN_SESSION = _FIELDS_STORED_IN_SESSION # Register and configure python-social-auth with Django. django_settings.INSTALLED_APPS += ( 'social.apps.django_app.default', 'third_party_auth', ) # Inject exception middleware to make redirects fire. django_settings.MIDDLEWARE_CLASSES += _MIDDLEWARE_CLASSES # Where to send the user if there's an error during social authentication # and we cannot send them to a more specific URL # (see middleware.ExceptionMiddleware). django_settings.SOCIAL_AUTH_LOGIN_ERROR_URL = '/' # Where to send the user once social authentication is successful. django_settings.SOCIAL_AUTH_LOGIN_REDIRECT_URL = _SOCIAL_AUTH_LOGIN_REDIRECT_URL # Inject our customized auth pipeline. All auth backends must work with # this pipeline. django_settings.SOCIAL_AUTH_PIPELINE = ( 'third_party_auth.pipeline.parse_query_params', 'social.pipeline.social_auth.social_details', 'social.pipeline.social_auth.social_uid', 'social.pipeline.social_auth.auth_allowed', 'social.pipeline.social_auth.social_user', 'third_party_auth.pipeline.associate_by_email_if_login_api', 'social.pipeline.user.get_username', 'third_party_auth.pipeline.set_pipeline_timeout', 'third_party_auth.pipeline.ensure_user_information', 'social.pipeline.user.create_user', 'social.pipeline.social_auth.associate_user', 'social.pipeline.social_auth.load_extra_data', 'social.pipeline.user.user_details', 'third_party_auth.pipeline.set_logged_in_cookies', 'third_party_auth.pipeline.login_analytics', ) # Required so that we can use unmodified PSA OAuth2 backends: django_settings.SOCIAL_AUTH_STRATEGY = 'third_party_auth.strategy.ConfigurationModelStrategy' # We let the user specify their email address during signup. django_settings.SOCIAL_AUTH_PROTECTED_USER_FIELDS = ['email'] # Disable exceptions by default for prod so you get redirect behavior # instead of a Django error page. During development you may want to # enable this when you want to get stack traces rather than redirections. django_settings.SOCIAL_AUTH_RAISE_EXCEPTIONS = False # Allow users to login using social auth even if their account is not verified yet # The 'ensure_user_information' step controls this and only allows brand new users # to login without verification. Repeat logins are not permitted until the account # gets verified. django_settings.INACTIVE_USER_LOGIN = True django_settings.INACTIVE_USER_URL = '/auth/inactive' # Context processors required under Django. django_settings.SOCIAL_AUTH_UUID_LENGTH = 4 django_settings.TEMPLATE_CONTEXT_PROCESSORS += ( 'social.apps.django_app.context_processors.backends', 'social.apps.django_app.context_processors.login_redirect', )
agpl-3.0
madscatt/zazmol
src/python/extensions/dcdio/test_dcdio.py
3
2739
''' SASSIE Copyright (C) 2011 Joseph E. Curtis This program comes with ABSOLUTELY NO WARRANTY; This is free software, and you are welcome to redistribute it under certain conditions; see http://www.gnu.org/licenses/gpl-3.0.html for details. ''' import sys,numpy,time sys.path.append('./') import dcdio import sasmol A=sasmol.SasMol(0) A.read_pdb('min3.pdb') natoms = A.natoms() x=A.coor()[:,0] y=A.coor()[:,1] z=A.coor()[:,2] #x=[1.0,2.0,3.0] ; y=[1.0,2.0,3.0] ; z=[1.0,2.0,3.0] x=numpy.array(x,numpy.float32) y=numpy.array(y,numpy.float32) z=numpy.array(z,numpy.float32) filename='c7.dcd' fp=dcdio.open_dcd_write(filename) nset=200; istart=1 ; nsavc=1 ; delta=1.0 headerresult=dcdio.write_dcdheader(fp,filename,natoms,nset,istart,nsavc,delta) print 'writing '+str(nset)+' to disk' start_time=time.time() for blah in range(nset): print ".", sys.stdout.flush() x=x+5.0 y=y+5.0 z=z+5.0 stepresult=dcdio.write_dcdstep(fp,x,y,z,blah) end_time=time.time() dt=end_time-start_time print '\ntotal time = ',dt,' time per structure = ',dt/nset dcdio.close_dcd_write(fp) filename='200c.dcd' filename='c7.dcd' ifp=dcdio.open_dcd_read(filename) nnatoms=0 ; nset=0 ; istart=0 ; nsavc=0 ; delta=0.0 namnf=0 ; freeindexes=[] ; reverseEndian=0 ; charmm=0 print 'nnatoms = ',nnatoms print 'nset = ',nset print 'freeindexes = ',freeindexes readheaderresult,nnatoms,nset,istart,nsavc,delta,namnf,reverseEndian,charmm=dcdio.read_dcdheader(ifp) print 'read header result = ',readheaderresult print 'nnatoms = ',nnatoms print 'nset = ',nset print 'istart = ',istart print 'nsavc = ',nsavc print 'delta = ',delta print 'namnf = ',namnf print 'reverseEndian = ',reverseEndian print 'charmm = ',charmm x=numpy.zeros((nset,nnatoms),dtype=numpy.float32) y=numpy.zeros((nset,nnatoms),dtype=numpy.float32) z=numpy.zeros((nset,nnatoms),dtype=numpy.float32) num_fixed=0 ; first=1 result=1 i=0 #try: print 'reading dcd file' start_time=time.time() sum=0.0 for i in xrange(nset): print '.', sys.stdout.flush() read_start_time=time.time() tx=numpy.zeros(nnatoms,dtype=numpy.float32) ty=numpy.zeros(nnatoms,dtype=numpy.float32) tz=numpy.zeros(nnatoms,dtype=numpy.float32) result=dcdio.read_dcdstep(ifp,tx,ty,tz,num_fixed,i,reverseEndian,charmm) read_end_time=time.time() sum+=read_end_time-read_start_time x[i][:]=tx ; y[i][:]=ty ; z[i][:]=tz end_time=time.time() dt=end_time-start_time print '\nread total_time = ',sum,' time per structure = ',sum/nset print 'total_time = ',dt,' time per structure = ',dt/nset print 'ratio(total time) = ',dt/sum print 'ratio(per structure) = ',(dt/nset)/(sum/nset) #except: # # print " I failed :( " dcdio.close_dcd_read(ifp)
gpl-3.0
SurfasJones/djcmsrc3
venv/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.py
164
36356
# -*- coding: utf-8 -*- # # Copyright (C) 2012 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """Implementation of the Metadata for Python packages PEPs. Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). """ from __future__ import unicode_literals import codecs from email import message_from_file import json import logging import re from . import DistlibException, __version__ from .compat import StringIO, string_types, text_type from .markers import interpret from .util import extract_by_key, get_extras from .version import get_scheme, PEP426_VERSION_RE logger = logging.getLogger(__name__) class MetadataMissingError(DistlibException): """A required metadata is missing""" class MetadataConflictError(DistlibException): """Attempt to read or write metadata fields that are conflictual.""" class MetadataUnrecognizedVersionError(DistlibException): """Unknown metadata version number.""" class MetadataInvalidError(DistlibException): """A metadata value is invalid""" # public API of this module __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] # Encoding used for the PKG-INFO files PKG_INFO_ENCODING = 'utf-8' # preferred version. Hopefully will be changed # to 1.2 once PEP 345 is supported everywhere PKG_INFO_PREFERRED_VERSION = '1.1' _LINE_PREFIX = re.compile('\n \|') _241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'License') _314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes', 'Provides', 'Requires') _314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', 'Download-URL') _345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Requires-External') _345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Obsoletes-Dist', 'Requires-External', 'Maintainer', 'Maintainer-email', 'Project-URL') _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Requires-External', 'Private-Version', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', 'Provides-Extra') _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') _ALL_FIELDS = set() _ALL_FIELDS.update(_241_FIELDS) _ALL_FIELDS.update(_314_FIELDS) _ALL_FIELDS.update(_345_FIELDS) _ALL_FIELDS.update(_426_FIELDS) EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') def _version2fieldlist(version): if version == '1.0': return _241_FIELDS elif version == '1.1': return _314_FIELDS elif version == '1.2': return _345_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version) def _best_version(fields): """Detect the best version depending on the fields used.""" def _has_marker(keys, markers): for marker in markers: if marker in keys: return True return False keys = [] for key, value in fields.items(): if value in ([], 'UNKNOWN', None): continue keys.append(key) possible_versions = ['1.0', '1.1', '1.2', '2.0'] # first let's try to see if a field is not part of one of the version for key in keys: if key not in _241_FIELDS and '1.0' in possible_versions: possible_versions.remove('1.0') if key not in _314_FIELDS and '1.1' in possible_versions: possible_versions.remove('1.1') if key not in _345_FIELDS and '1.2' in possible_versions: possible_versions.remove('1.2') if key not in _426_FIELDS and '2.0' in possible_versions: possible_versions.remove('2.0') # possible_version contains qualified versions if len(possible_versions) == 1: return possible_versions[0] # found ! elif len(possible_versions) == 0: raise MetadataConflictError('Unknown metadata set') # let's see if one unique marker is found is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1: raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields') # we have the choice, 1.0, or 1.2, or 2.0 # - 1.0 has a broken Summary field but works with all tools # - 1.1 is to avoid # - 1.2 fixes Summary but has little adoption # - 2.0 adds more features and is very new if not is_1_1 and not is_1_2 and not is_2_0: # we couldn't find any specific marker if PKG_INFO_PREFERRED_VERSION in possible_versions: return PKG_INFO_PREFERRED_VERSION if is_1_1: return '1.1' if is_1_2: return '1.2' return '2.0' _ATTR2FIELD = { 'metadata_version': 'Metadata-Version', 'name': 'Name', 'version': 'Version', 'platform': 'Platform', 'supported_platform': 'Supported-Platform', 'summary': 'Summary', 'description': 'Description', 'keywords': 'Keywords', 'home_page': 'Home-page', 'author': 'Author', 'author_email': 'Author-email', 'maintainer': 'Maintainer', 'maintainer_email': 'Maintainer-email', 'license': 'License', 'classifier': 'Classifier', 'download_url': 'Download-URL', 'obsoletes_dist': 'Obsoletes-Dist', 'provides_dist': 'Provides-Dist', 'requires_dist': 'Requires-Dist', 'setup_requires_dist': 'Setup-Requires-Dist', 'requires_python': 'Requires-Python', 'requires_external': 'Requires-External', 'requires': 'Requires', 'provides': 'Provides', 'obsoletes': 'Obsoletes', 'project_url': 'Project-URL', 'private_version': 'Private-Version', 'obsoleted_by': 'Obsoleted-By', 'extension': 'Extension', 'provides_extra': 'Provides-Extra', } _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') _VERSIONS_FIELDS = ('Requires-Python',) _VERSION_FIELDS = ('Version',) _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', 'Requires', 'Provides', 'Obsoletes-Dist', 'Provides-Dist', 'Requires-Dist', 'Requires-External', 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', 'Provides-Extra', 'Extension') _LISTTUPLEFIELDS = ('Project-URL',) _ELEMENTSFIELD = ('Keywords',) _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') _MISSING = object() _FILESAFE = re.compile('[^A-Za-z0-9.]+') def _get_name_and_version(name, version, for_filename=False): """Return the distribution name with version. If for_filename is true, return a filename-escaped form.""" if for_filename: # For both name and version any runs of non-alphanumeric or '.' # characters are replaced with a single '-'. Additionally any # spaces in the version string become '.' name = _FILESAFE.sub('-', name) version = _FILESAFE.sub('-', version.replace(' ', '.')) return '%s-%s' % (name, version) class LegacyMetadata(object): """The legacy metadata of a release. Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can instantiate the class with one of these arguments (or none): - *path*, the path to a metadata file - *fileobj* give a file-like object with metadata as content - *mapping* is a dict-like object - *scheme* is a version scheme name """ # TODO document the mapping API and UNKNOWN default key def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._fields = {} self.requires_files = [] self._dependencies = None self.scheme = scheme if path is not None: self.read(path) elif fileobj is not None: self.read_file(fileobj) elif mapping is not None: self.update(mapping) self.set_metadata_version() def set_metadata_version(self): self._fields['Metadata-Version'] = _best_version(self._fields) def _write_field(self, fileobj, name, value): fileobj.write('%s: %s\n' % (name, value)) def __getitem__(self, name): return self.get(name) def __setitem__(self, name, value): return self.set(name, value) def __delitem__(self, name): field_name = self._convert_name(name) try: del self._fields[field_name] except KeyError: raise KeyError(name) def __contains__(self, name): return (name in self._fields or self._convert_name(name) in self._fields) def _convert_name(self, name): if name in _ALL_FIELDS: return name name = name.replace('-', '_').lower() return _ATTR2FIELD.get(name, name) def _default_value(self, name): if name in _LISTFIELDS or name in _ELEMENTSFIELD: return [] return 'UNKNOWN' def _remove_line_prefix(self, value): return _LINE_PREFIX.sub('\n', value) def __getattr__(self, name): if name in _ATTR2FIELD: return self[name] raise AttributeError(name) # # Public API # # dependencies = property(_get_dependencies, _set_dependencies) def get_fullname(self, filesafe=False): """Return the distribution name with version. If filesafe is true, return a filename-escaped form.""" return _get_name_and_version(self['Name'], self['Version'], filesafe) def is_field(self, name): """return True if name is a valid metadata key""" name = self._convert_name(name) return name in _ALL_FIELDS def is_multi_field(self, name): name = self._convert_name(name) return name in _LISTFIELDS def read(self, filepath): """Read the metadata values from a file path.""" fp = codecs.open(filepath, 'r', encoding='utf-8') try: self.read_file(fp) finally: fp.close() def read_file(self, fileob): """Read the metadata values from a file object.""" msg = message_from_file(fileob) self._fields['Metadata-Version'] = msg['metadata-version'] # When reading, get all the fields we can for field in _ALL_FIELDS: if field not in msg: continue if field in _LISTFIELDS: # we can have multiple lines values = msg.get_all(field) if field in _LISTTUPLEFIELDS and values is not None: values = [tuple(value.split(',')) for value in values] self.set(field, values) else: # single line value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) self.set_metadata_version() def write(self, filepath, skip_unknown=False): """Write the metadata fields to filepath.""" fp = codecs.open(filepath, 'w', encoding='utf-8') try: self.write_file(fp, skip_unknown) finally: fp.close() def write_file(self, fileobject, skip_unknown=False): """Write the PKG-INFO format data to a file object.""" self.set_metadata_version() for field in _version2fieldlist(self['Metadata-Version']): values = self.get(field) if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): continue if field in _ELEMENTSFIELD: self._write_field(fileobject, field, ','.join(values)) continue if field not in _LISTFIELDS: if field == 'Description': values = values.replace('\n', '\n |') values = [values] if field in _LISTTUPLEFIELDS: values = [','.join(value) for value in values] for value in values: self._write_field(fileobject, field, value) def update(self, other=None, **kwargs): """Set metadata values from the given iterable `other` and kwargs. Behavior is like `dict.update`: If `other` has a ``keys`` method, they are looped over and ``self[key]`` is assigned ``other[key]``. Else, ``other`` is an iterable of ``(key, value)`` iterables. Keys that don't match a metadata field or that have an empty value are dropped. """ def _set(key, value): if key in _ATTR2FIELD and value: self.set(self._convert_name(key), value) if not other: # other is None or empty container pass elif hasattr(other, 'keys'): for k in other.keys(): _set(k, other[k]) else: for k, v in other: _set(k, v) if kwargs: for k, v in kwargs.items(): _set(k, v) def set(self, name, value): """Control then set a metadata field.""" name = self._convert_name(name) if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))): if isinstance(value, string_types): value = [v.strip() for v in value.split(',')] else: value = [] elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): if isinstance(value, string_types): value = [value] else: value = [] if logger.isEnabledFor(logging.WARNING): project_name = self['Name'] scheme = get_scheme(self.scheme) if name in _PREDICATE_FIELDS and value is not None: for v in value: # check that the values are valid if not scheme.is_valid_matcher(v.split(';')[0]): logger.warning( '%r: %r is not valid (field %r)', project_name, v, name) # FIXME this rejects UNKNOWN, is that right? elif name in _VERSIONS_FIELDS and value is not None: if not scheme.is_valid_constraint_list(value): logger.warning('%r: %r is not a valid version (field %r)', project_name, value, name) elif name in _VERSION_FIELDS and value is not None: if not scheme.is_valid_version(value): logger.warning('%r: %r is not a valid version (field %r)', project_name, value, name) if name in _UNICODEFIELDS: if name == 'Description': value = self._remove_line_prefix(value) self._fields[name] = value def get(self, name, default=_MISSING): """Get a metadata field.""" name = self._convert_name(name) if name not in self._fields: if default is _MISSING: default = self._default_value(name) return default if name in _UNICODEFIELDS: value = self._fields[name] return value elif name in _LISTFIELDS: value = self._fields[name] if value is None: return [] res = [] for val in value: if name not in _LISTTUPLEFIELDS: res.append(val) else: # That's for Project-URL res.append((val[0], val[1])) return res elif name in _ELEMENTSFIELD: value = self._fields[name] if isinstance(value, string_types): return value.split(',') return self._fields[name] def check(self, strict=False): """Check if the metadata is compliant. If strict is True then raise if no Name or Version are provided""" self.set_metadata_version() # XXX should check the versions (if the file was loaded) missing, warnings = [], [] for attr in ('Name', 'Version'): # required by PEP 345 if attr not in self: missing.append(attr) if strict and missing != []: msg = 'missing required metadata: %s' % ', '.join(missing) raise MetadataMissingError(msg) for attr in ('Home-page', 'Author'): if attr not in self: missing.append(attr) # checking metadata 1.2 (XXX needs to check 1.1, 1.0) if self['Metadata-Version'] != '1.2': return missing, warnings scheme = get_scheme(self.scheme) def are_valid_constraints(value): for v in value: if not scheme.is_valid_matcher(v.split(';')[0]): return False return True for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), (_VERSIONS_FIELDS, scheme.is_valid_constraint_list), (_VERSION_FIELDS, scheme.is_valid_version)): for field in fields: value = self.get(field, None) if value is not None and not controller(value): warnings.append('Wrong value for %r: %s' % (field, value)) return missing, warnings def todict(self, skip_missing=False): """Return fields as a dict. Field names will be converted to use the underscore-lowercase style instead of hyphen-mixed case (i.e. home_page instead of Home-page). """ self.set_metadata_version() mapping_1_0 = ( ('metadata_version', 'Metadata-Version'), ('name', 'Name'), ('version', 'Version'), ('summary', 'Summary'), ('home_page', 'Home-page'), ('author', 'Author'), ('author_email', 'Author-email'), ('license', 'License'), ('description', 'Description'), ('keywords', 'Keywords'), ('platform', 'Platform'), ('classifier', 'Classifier'), ('download_url', 'Download-URL'), ) data = {} for key, field_name in mapping_1_0: if not skip_missing or field_name in self._fields: data[key] = self[field_name] if self['Metadata-Version'] == '1.2': mapping_1_2 = ( ('requires_dist', 'Requires-Dist'), ('requires_python', 'Requires-Python'), ('requires_external', 'Requires-External'), ('provides_dist', 'Provides-Dist'), ('obsoletes_dist', 'Obsoletes-Dist'), ('project_url', 'Project-URL'), ('maintainer', 'Maintainer'), ('maintainer_email', 'Maintainer-email'), ) for key, field_name in mapping_1_2: if not skip_missing or field_name in self._fields: if key != 'project_url': data[key] = self[field_name] else: data[key] = [','.join(u) for u in self[field_name]] elif self['Metadata-Version'] == '1.1': mapping_1_1 = ( ('provides', 'Provides'), ('requires', 'Requires'), ('obsoletes', 'Obsoletes'), ) for key, field_name in mapping_1_1: if not skip_missing or field_name in self._fields: data[key] = self[field_name] return data def add_requirements(self, requirements): if self['Metadata-Version'] == '1.1': # we can't have 1.1 metadata *and* Setuptools requires for field in ('Obsoletes', 'Requires', 'Provides'): if field in self: del self[field] self['Requires-Dist'] += requirements # Mapping API # TODO could add iter* variants def keys(self): return list(_version2fieldlist(self['Metadata-Version'])) def __iter__(self): for key in self.keys(): yield key def values(self): return [self[key] for key in self.keys()] def items(self): return [(key, self[key]) for key in self.keys()] def __repr__(self): return '<%s %s %s>' % (self.__class__.__name__, self.name, self.version) METADATA_FILENAME = 'pydist.json' class Metadata(object): """ The metadata of a release. This implementation uses 2.0 (JSON) metadata where possible. If not possible, it wraps a LegacyMetadata instance which handles the key-value metadata format. """ METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$') NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) VERSION_MATCHER = PEP426_VERSION_RE SUMMARY_MATCHER = re.compile('.{1,2047}') METADATA_VERSION = '2.0' GENERATOR = 'distlib (%s)' % __version__ MANDATORY_KEYS = { 'name': (), 'version': (), 'summary': ('legacy',), } INDEX_KEYS = ('name version license summary description author ' 'author_email keywords platform home_page classifiers ' 'download_url') DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' 'dev_requires provides meta_requires obsoleted_by ' 'supports_environments') SYNTAX_VALIDATORS = { 'metadata_version': (METADATA_VERSION_MATCHER, ()), 'name': (NAME_MATCHER, ('legacy',)), 'version': (VERSION_MATCHER, ('legacy',)), 'summary': (SUMMARY_MATCHER, ('legacy',)), } __slots__ = ('_legacy', '_data', 'scheme') def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._legacy = None self._data = None self.scheme = scheme #import pdb; pdb.set_trace() if mapping is not None: try: self._validate_mapping(mapping, scheme) self._data = mapping except MetadataUnrecognizedVersionError: self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) self.validate() else: data = None if path: with open(path, 'rb') as f: data = f.read() elif fileobj: data = fileobj.read() if data is None: # Initialised with no args - to be added self._data = { 'metadata_version': self.METADATA_VERSION, 'generator': self.GENERATOR, } else: if not isinstance(data, text_type): data = data.decode('utf-8') try: self._data = json.loads(data) self._validate_mapping(self._data, scheme) except ValueError: # Note: MetadataUnrecognizedVersionError does not # inherit from ValueError (it's a DistlibException, # which should not inherit from ValueError). # The ValueError comes from the json.load - if that # succeeds and we get a validation error, we want # that to propagate self._legacy = LegacyMetadata(fileobj=StringIO(data), scheme=scheme) self.validate() common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) none_list = (None, list) none_dict = (None, dict) mapped_keys = { 'run_requires': ('Requires-Dist', list), 'build_requires': ('Setup-Requires-Dist', list), 'dev_requires': none_list, 'test_requires': none_list, 'meta_requires': none_list, 'extras': ('Provides-Extra', list), 'modules': none_list, 'namespaces': none_list, 'exports': none_dict, 'commands': none_dict, 'classifiers': ('Classifier', list), 'source_url': ('Download-URL', None), 'metadata_version': ('Metadata-Version', None), } del none_list, none_dict def __getattribute__(self, key): common = object.__getattribute__(self, 'common_keys') mapped = object.__getattribute__(self, 'mapped_keys') if key in mapped: lk, maker = mapped[key] if self._legacy: if lk is None: result = None if maker is None else maker() else: result = self._legacy.get(lk) else: value = None if maker is None else maker() result = self._data.get(key, value) elif key not in common: result = object.__getattribute__(self, key) elif self._legacy: result = self._legacy.get(key) else: result = self._data.get(key) return result def _validate_value(self, key, value, scheme=None): if key in self.SYNTAX_VALIDATORS: pattern, exclusions = self.SYNTAX_VALIDATORS[key] if (scheme or self.scheme) not in exclusions: m = pattern.match(value) if not m: raise MetadataInvalidError('%r is an invalid value for ' 'the %r property' % (value, key)) def __setattr__(self, key, value): self._validate_value(key, value) common = object.__getattribute__(self, 'common_keys') mapped = object.__getattribute__(self, 'mapped_keys') if key in mapped: lk, _ = mapped[key] if self._legacy: if lk is None: raise NotImplementedError self._legacy[lk] = value else: self._data[key] = value elif key not in common: object.__setattr__(self, key, value) else: if key == 'keywords': if isinstance(value, string_types): value = value.strip() if value: value = value.split() else: value = [] if self._legacy: self._legacy[key] = value else: self._data[key] = value @property def name_and_version(self): return _get_name_and_version(self.name, self.version, True) @property def provides(self): if self._legacy: result = self._legacy['Provides-Dist'] else: result = self._data.setdefault('provides', []) s = '%s (%s)' % (self.name, self.version) if s not in result: result.append(s) return result @provides.setter def provides(self, value): if self._legacy: self._legacy['Provides-Dist'] = value else: self._data['provides'] = value def get_requirements(self, reqts, extras=None, env=None): """ Base method to get dependencies, given a set of extras to satisfy and an optional environment context. :param reqts: A list of sometimes-wanted dependencies, perhaps dependent on extras and environment. :param extras: A list of optional components being requested. :param env: An optional environment for marker evaluation. """ if self._legacy: result = reqts else: result = [] extras = get_extras(extras or [], self.extras) for d in reqts: if 'extra' not in d and 'environment' not in d: # unconditional include = True else: if 'extra' not in d: # Not extra-dependent - only environment-dependent include = True else: include = d.get('extra') in extras if include: # Not excluded because of extras, check environment marker = d.get('environment') if marker: include = interpret(marker, env) if include: result.extend(d['requires']) for key in ('build', 'dev', 'test'): e = ':%s:' % key if e in extras: extras.remove(e) # A recursive call, but it should terminate since 'test' # has been removed from the extras reqts = self._data.get('%s_requires' % key, []) result.extend(self.get_requirements(reqts, extras=extras, env=env)) return result @property def dictionary(self): if self._legacy: return self._from_legacy() return self._data @property def dependencies(self): if self._legacy: raise NotImplementedError else: return extract_by_key(self._data, self.DEPENDENCY_KEYS) @dependencies.setter def dependencies(self, value): if self._legacy: raise NotImplementedError else: self._data.update(value) def _validate_mapping(self, mapping, scheme): if mapping.get('metadata_version') != self.METADATA_VERSION: raise MetadataUnrecognizedVersionError() missing = [] for key, exclusions in self.MANDATORY_KEYS.items(): if key not in mapping: if scheme not in exclusions: missing.append(key) if missing: msg = 'Missing metadata items: %s' % ', '.join(missing) raise MetadataMissingError(msg) for k, v in mapping.items(): self._validate_value(k, v, scheme) def validate(self): if self._legacy: missing, warnings = self._legacy.check(True) if missing or warnings: logger.warning('Metadata: missing: %s, warnings: %s', missing, warnings) else: self._validate_mapping(self._data, self.scheme) def todict(self): if self._legacy: return self._legacy.todict(True) else: result = extract_by_key(self._data, self.INDEX_KEYS) return result def _from_legacy(self): assert self._legacy and not self._data result = { 'metadata_version': self.METADATA_VERSION, 'generator': self.GENERATOR, } lmd = self._legacy.todict(True) # skip missing ones for k in ('name', 'version', 'license', 'summary', 'description', 'classifier'): if k in lmd: if k == 'classifier': nk = 'classifiers' else: nk = k result[nk] = lmd[k] kw = lmd.get('Keywords', []) if kw == ['']: kw = [] result['keywords'] = kw keys = (('requires_dist', 'run_requires'), ('setup_requires_dist', 'build_requires')) for ok, nk in keys: if ok in lmd and lmd[ok]: result[nk] = [{'requires': lmd[ok]}] result['provides'] = self.provides author = {} maintainer = {} return result LEGACY_MAPPING = { 'name': 'Name', 'version': 'Version', 'license': 'License', 'summary': 'Summary', 'description': 'Description', 'classifiers': 'Classifier', } def _to_legacy(self): def process_entries(entries): reqts = set() for e in entries: extra = e.get('extra') env = e.get('environment') rlist = e['requires'] for r in rlist: if not env and not extra: reqts.add(r) else: marker = '' if extra: marker = 'extra == "%s"' % extra if env: if marker: marker = '(%s) and %s' % (env, marker) else: marker = env reqts.add(';'.join((r, marker))) return reqts assert self._data and not self._legacy result = LegacyMetadata() nmd = self._data for nk, ok in self.LEGACY_MAPPING.items(): if nk in nmd: result[ok] = nmd[nk] r1 = process_entries(self.run_requires + self.meta_requires) r2 = process_entries(self.build_requires + self.dev_requires) if self.extras: result['Provides-Extra'] = sorted(self.extras) result['Requires-Dist'] = sorted(r1) result['Setup-Requires-Dist'] = sorted(r2) # TODO: other fields such as contacts return result def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): if [path, fileobj].count(None) != 1: raise ValueError('Exactly one of path and fileobj is needed') self.validate() if legacy: if self._legacy: legacy_md = self._legacy else: legacy_md = self._to_legacy() if path: legacy_md.write(path, skip_unknown=skip_unknown) else: legacy_md.write_file(fileobj, skip_unknown=skip_unknown) else: if self._legacy: d = self._from_legacy() else: d = self._data if fileobj: json.dump(d, fileobj, ensure_ascii=True, indent=2, sort_keys=True) else: with codecs.open(path, 'w', 'utf-8') as f: json.dump(d, f, ensure_ascii=True, indent=2, sort_keys=True) def add_requirements(self, requirements): if self._legacy: self._legacy.add_requirements(requirements) else: self._data.setdefault('run_requires', []).extend(requirements) def __repr__(self): name = self.name or '(no name)' version = self.version or 'no version' return '<%s %s %s (%s)>' % (self.__class__.__name__, self.metadata_version, name, version)
mit
codeaudit/beast-mcmc
plugins_SDK/generate_stub.py
13
4995
#!/usr/bin/env python import sys, os, shutil, re from optparse import OptionParser PROG_NAME = os.path.basename(os.path.abspath(sys.argv[0])) VERBOSE = False def debug(msg): if VERBOSE: sys.stderr.write('%s: %s\n' % (PROG_NAME, msg)) parser = OptionParser() parser.add_option('-p', '--package', dest='package', help='The package that contains the Plugin impl') parser.add_option('-c', '--class', dest='classname', help='The name of the of class that implements Plugin') parser.add_option('-x', '--xmlelement', dest='xml', default='', type='str', help='The name of the xml element associated with the parser') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='verbose mode') (options, args) = parser.parse_args() if options.verbose: VERBOSE = True if not options.package: sys.exit('The package options must be specified') if not options.classname: sys.exit('The class options must be specified') dir = os.path.dirname(os.path.abspath(sys.argv[0])) template_par = os.path.join(dir, 'templates') if not os.path.exists(template_par): sys.exit('%s does not exist' % template_par) sp_package = options.package.split('.') def copy_with_subst(s, d, rep_dict): try: source = open(s, 'rU') except: sys.exit('Could not open %s ' % s) try: dest = open(d, 'w') except: sys.exit('Could not open %s ' % d) pat_sub_list = [(re.compile(r'@%s@' % k), v) for k, v in rep_dict.iteritems()] for line in source: for p_el in pat_sub_list: line = p_el[1].join(p_el[0].split(line)) dest.write(line) dest.close() source.close() replace_dict = {} replace_dict['PLUGIN_CLASS'] = options.classname replace_dict['PLUGIN_PACKAGE'] = sp_package[0] replace_dict['PLUGIN_SUBPACKAGES'] = '.'.join([''] + sp_package[1:] + ['']) replace_dict['PLUGIN_FULL_PACKAGE'] = options.package replace_dict['PLUGIN_SRC_PATH'] = ''' <include name="${plugin-package}/**/*.java"/>''' replace_dict['PLUGIN_CLASS_FILE_PATH'] = ''' <include name="${plugin-package}/**/*.class"/>''' replace_dict['EXAMPLE_XML_FILE'] = 'test%s.xml' % options.classname if options.xml: xml_el = options.xml elif options.classname.lower().endswith('plugin'): xml_el = options.classname[:len('plugin') - 2] else: xml_el = options.classname replace_dict['PLUGIN_XML_ELEMENT'] = xml_el replace_dict['PLUGIN_IMPORTS'] = '''import dr.evomodel.substmodel.NucModelType; import dr.evomodel.substmodel.FrequencyModel; import dr.evomodel.substmodel.HKY; import dr.inference.model.Parameter; import dr.inference.model.Variable;''' replace_dict['PLUGIN_XML_SYNTAX_RULES'] = '''new ElementRule(FrequencyModel.FREQUENCIES, new XMLSyntaxRule[]{new ElementRule(FrequencyModel.class)}), new ElementRule("kappa", new XMLSyntaxRule[]{new ElementRule(Variable.class)})''' replace_dict['PLUGIN_XML_PARSER_STUB'] = '''Variable kappaParam = (Variable) xo.getElementFirstChild("kappa"); FrequencyModel freqModel = (FrequencyModel) xo.getElementFirstChild(FrequencyModel.FREQUENCIES); Logger.getLogger("dr.evomodel").info("Creating THMM substitution model. Initial kappa = " + kappaParam.getValue(0)); return new HKY(kappaParam, freqModel);''' replace_dict['PLUGIN_PARSER_RETURN_TYPE'] = 'HKY.class' parser_name = 'DummyModelParser' debug('\n '.join(['%s = %s' % (k, v) for k, v in replace_dict.iteritems()])) #sys.exit(str(replace_dict)) dest_dir = options.classname if os.path.exists(dest_dir): sys.exit('%s already exsists' % dest_dir) dir_list = [dest_dir, 'src'] + sp_package dest_src_dir = os.path.join(*dir_list) try: os.makedirs(dest_src_dir) except: sys.exit('Could not create %s' % dest_src_dir) dest_example_dir = os.path.join(dest_dir, 'example') try: os.makedirs(dest_example_dir) except: sys.exit('Could not create %s' % dest_example_dir) plugin_src = os.path.join(template_par, 'DummyPlugin.java') dest_src = os.path.join(dest_src_dir, '%s.java' % options.classname) copy_with_subst(plugin_src, dest_src, replace_dict) plugin_src = os.path.join(template_par, '%s.java' % parser_name) dest_src = os.path.join(dest_src_dir, '%sParser.java' % options.classname) copy_with_subst(plugin_src, dest_src, replace_dict) plugin_src = os.path.join(template_par, 'build.xml') dest_src = os.path.join(dest_dir, 'build.xml') copy_with_subst(plugin_src, dest_src, replace_dict) plugin_src = os.path.join(template_par, 'beast_sdk.properties.in') dest_src = os.path.join(dest_dir, 'beast_sdk.properties.in') copy_with_subst(plugin_src, dest_src, replace_dict) plugin_src = os.path.join(template_par, 'Dummy.xml') dest_src = os.path.join(dest_example_dir, 'test%s.xml' % options.classname) copy_with_subst(plugin_src, dest_src, replace_dict)
lgpl-2.1
FlaPer87/django-nonrel
django/db/models/sql/compiler.py
3
42993
from django.core.exceptions import FieldError from django.db import connections from django.db.backends.util import truncate_name from django.db.models.sql.constants import * from django.db.models.sql.datastructures import EmptyResultSet from django.db.models.sql.expressions import SQLEvaluator from django.db.models.sql.query import get_proxied_model, get_order_dir, \ select_related_descend, Query class SQLCompiler(object): def __init__(self, query, connection, using): self.query = query self.connection = connection self.using = using self.quote_cache = {} def pre_sql_setup(self): """ Does any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ if not self.query.tables: self.query.join((None, self.query.model._meta.db_table, None, None)) if (not self.query.select and self.query.default_cols and not self.query.included_inherited_models): self.query.setup_inherited_models() if self.query.select_related and not self.query.related_select_cols: self.fill_related_selections() def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r def as_sql(self, with_limits=True, with_col_aliases=False): """ Creates the SQL for this query. Returns the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ self.pre_sql_setup() out_cols = self.get_columns(with_col_aliases) ordering, ordering_group_by = self.get_ordering() # This must come after 'select' and 'ordering' -- see docstring of # get_from_clause() for details. from_, f_params = self.get_from_clause() qn = self.quote_name_unless_alias where, w_params = self.query.where.as_sql(qn=qn, connection=self.connection) having, h_params = self.query.having.as_sql(qn=qn, connection=self.connection) params = [] for val in self.query.extra_select.itervalues(): params.extend(val[1]) result = ['SELECT'] if self.query.distinct: result.append('DISTINCT') result.append(', '.join(out_cols + self.query.ordering_aliases)) result.append('FROM') result.extend(from_) params.extend(f_params) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping, gb_params = self.get_grouping() if grouping: if ordering: # If the backend can't group by PK (i.e., any database # other than MySQL), then any fields mentioned in the # ordering clause needs to be in the group by clause. if not self.connection.features.allows_group_by_pk: for col, col_params in ordering_group_by: if col not in grouping: grouping.append(str(col)) gb_params.extend(col_params) else: ordering = self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) params.extend(gb_params) if having: result.append('HAVING %s' % having) params.extend(h_params) if ordering: result.append('ORDER BY %s' % ', '.join(ordering)) if with_limits: if self.query.high_mark is not None: result.append('LIMIT %d' % (self.query.high_mark - self.query.low_mark)) if self.query.low_mark: if self.query.high_mark is None: val = self.connection.ops.no_limit_value() if val: result.append('LIMIT %d' % val) result.append('OFFSET %d' % self.query.low_mark) return ' '.join(result), tuple(params) def as_nested_sql(self): """ Perform the same functionality as the as_sql() method, returning an SQL string and parameters. However, the alias prefixes are bumped beforehand (in a copy -- the current query isn't changed), and any ordering is removed if the query is unsliced. Used when nesting this query inside another. """ obj = self.query.clone() if obj.low_mark == 0 and obj.high_mark is None: # If there is no slicing in use, then we can safely drop all ordering obj.clear_ordering(True) obj.bump_prefix() return obj.get_compiler(connection=self.connection).as_sql() def get_columns(self, with_aliases=False): """ Returns the list of columns to use in the select statement. If no columns have been specified, returns all columns relating to fields in the model. If 'with_aliases' is true, any column names that are duplicated (without the table names) are given unique aliases. This is needed in some cases to avoid ambiguity with nested queries. """ qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name result = ['(%s) AS %s' % (col[0], qn2(alias)) for alias, col in self.query.extra_select.iteritems()] aliases = set(self.query.extra_select.keys()) if with_aliases: col_aliases = aliases.copy() else: col_aliases = set() if self.query.select: only_load = self.deferred_to_columns() for col in self.query.select: if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] if table in only_load and col not in only_load[table]: continue r = '%s.%s' % (qn(alias), qn(column)) if with_aliases: if col[1] in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (r, c_alias)) aliases.add(c_alias) col_aliases.add(c_alias) else: result.append('%s AS %s' % (r, qn2(col[1]))) aliases.add(r) col_aliases.add(col[1]) else: result.append(r) aliases.add(r) col_aliases.add(col[1]) else: result.append(col.as_sql(qn, self.connection)) if hasattr(col, 'alias'): aliases.add(col.alias) col_aliases.add(col.alias) elif self.query.default_cols: cols, new_aliases = self.get_default_columns(with_aliases, col_aliases) result.extend(cols) aliases.update(new_aliases) max_name_length = self.connection.ops.max_name_length() result.extend([ '%s%s' % ( aggregate.as_sql(qn, self.connection), alias is not None and ' AS %s' % qn(truncate_name(alias, max_name_length)) or '' ) for alias, aggregate in self.query.aggregate_select.items() ]) for table, col in self.query.related_select_cols: r = '%s.%s' % (qn(table), qn(col)) if with_aliases and col in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (r, c_alias)) aliases.add(c_alias) col_aliases.add(c_alias) else: result.append(r) aliases.add(r) col_aliases.add(col) self._select_aliases = aliases return result def get_default_columns(self, with_aliases=False, col_aliases=None, start_alias=None, opts=None, as_pairs=False, local_only=False): """ Computes the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Returns a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, returns a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] if opts is None: opts = self.query.model._meta qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name aliases = set() only_load = self.deferred_to_columns() # Skip all proxy to the root proxied model proxied_model = get_proxied_model(opts) if start_alias: seen = {None: start_alias} for field, model in opts.get_fields_with_model(): if local_only and model is not None: continue if start_alias: try: alias = seen[model] except KeyError: if model is proxied_model: alias = start_alias else: link_field = opts.get_ancestor_link(model) alias = self.query.join((start_alias, model._meta.db_table, link_field.column, model._meta.pk.column)) seen[model] = alias else: # If we're starting from the base model of the queryset, the # aliases will have already been set up in pre_sql_setup(), so # we can save time here. alias = self.query.included_inherited_models[model] table = self.query.alias_map[alias][TABLE_NAME] if table in only_load and field.column not in only_load[table]: continue if as_pairs: result.append((alias, field.column)) aliases.add(alias) continue if with_aliases and field.column in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s.%s AS %s' % (qn(alias), qn2(field.column), c_alias)) col_aliases.add(c_alias) aliases.add(c_alias) else: r = '%s.%s' % (qn(alias), qn2(field.column)) result.append(r) aliases.add(r) if with_aliases: col_aliases.add(field.column) return result, aliases def get_ordering(self): """ Returns a tuple containing a list representing the SQL elements in the "order by" clause, and the list of SQL elements that need to be added to the GROUP BY clause as a result of the ordering. Also sets the ordering_aliases attribute on this instance to a list of extra aliases needed in the select. Determining the ordering SQL can change the tables we need to include, so this should be run *before* get_from_clause(). """ if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by else: ordering = self.query.order_by or self.query.model._meta.ordering qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name distinct = self.query.distinct select_aliases = self._select_aliases result = [] group_by = [] ordering_aliases = [] if self.query.standard_ordering: asc, desc = ORDER_DIR['ASC'] else: asc, desc = ORDER_DIR['DESC'] # It's possible, due to model inheritance, that normal usage might try # to include the same field more than once in the ordering. We track # the table/column pairs we use and discard any after the first use. processed_pairs = set() for field in ordering: if field == '?': result.append(self.connection.ops.random_function_sql()) continue if isinstance(field, int): if field < 0: order = desc field = -field else: order = asc result.append('%s %s' % (field, order)) group_by.append((field, [])) continue col, order = get_order_dir(field, asc) if col in self.query.aggregate_select: result.append('%s %s' % (col, order)) continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) if (table, col) not in processed_pairs: elt = '%s.%s' % (qn(table), col) processed_pairs.add((table, col)) if not distinct or elt in select_aliases: result.append('%s %s' % (elt, order)) group_by.append((elt, [])) elif get_order_dir(field)[0] not in self.query.extra_select: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. for table, col, order in self.find_ordering_name(field, self.query.model._meta, default_order=asc): if (table, col) not in processed_pairs: elt = '%s.%s' % (qn(table), qn2(col)) processed_pairs.add((table, col)) if distinct and elt not in select_aliases: ordering_aliases.append(elt) result.append('%s %s' % (elt, order)) group_by.append((elt, [])) else: elt = qn2(col) if distinct and col not in select_aliases: ordering_aliases.append(elt) result.append('%s %s' % (elt, order)) group_by.append(self.query.extra_select[col]) self.query.ordering_aliases = ordering_aliases return result, group_by def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ Returns the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ name, order = get_order_dir(name, default_order) pieces = name.split(LOOKUP_SEP) if not alias: alias = self.query.get_initial_alias() field, target, opts, joins, last, extra = self.query.setup_joins(pieces, opts, alias, False) alias = joins[-1] col = target.column if not field.rel: # To avoid inadvertent trimming of a necessary alias, use the # refcount to show that we are referencing a non-relation field on # the model. self.query.ref_alias(alias) # Must use left outer joins for nullable fields and their relations. self.query.promote_alias_chain(joins, self.query.alias_map[joins[0]][JOIN_TYPE] == self.query.LOUTER) # If we get to this point and the field is a relation to another model, # append the default ordering for that model. if field.rel and len(joins) > 1 and opts.ordering: # Firstly, avoid infinite loops. if not already_seen: already_seen = set() join_tuple = tuple([self.query.alias_map[j][TABLE_NAME] for j in joins]) if join_tuple in already_seen: raise FieldError('Infinite loop caused by ordering.') already_seen.add(join_tuple) results = [] for item in opts.ordering: results.extend(self.find_ordering_name(item, opts, alias, order, already_seen)) return results if alias: # We have to do the same "final join" optimisation as in # add_filter, since the final column might not otherwise be part of # the select set (so we can't order on it). while 1: join = self.query.alias_map[alias] if col != join[RHS_JOIN_COL]: break self.query.unref_alias(alias) alias = join[LHS_ALIAS] col = join[LHS_JOIN_COL] return [(alias, col, order)] def get_from_clause(self): """ Returns a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that need to be included. Sub-classes, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that might change the tables we need. This means the select columns and ordering must be done first. """ result = [] qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name first = True for alias in self.query.tables: if not self.query.alias_refcount[alias]: continue try: name, alias, join_type, lhs, lhs_col, col, nullable = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue alias_str = (alias != name and ' %s' % alias or '') if join_type and not first: result.append('%s %s%s ON (%s.%s = %s.%s)' % (join_type, qn(name), alias_str, qn(lhs), qn2(lhs_col), qn(alias), qn2(col))) else: connector = not first and ', ' or '' result.append('%s%s%s' % (connector, qn(name), alias_str)) first = False for t in self.query.extra_tables: alias, unused = self.query.table_alias(t) # Only add the alias if it's not already present (the table_alias() # calls increments the refcount, so an alias refcount of one means # this is the only reference. if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: connector = not first and ', ' or '' result.append('%s%s' % (connector, qn(alias))) first = False return result, [] def get_grouping(self): """ Returns a tuple representing the SQL elements in the "group by" clause. """ qn = self.quote_name_unless_alias result, params = [], [] if self.query.group_by is not None: if len(self.query.model._meta.fields) == len(self.query.select) and \ self.connection.features.allows_group_by_pk: self.query.group_by = [(self.query.model._meta.db_table, self.query.model._meta.pk.column)] group_by = self.query.group_by or [] extra_selects = [] for extra_select, extra_params in self.query.extra_select.itervalues(): extra_selects.append(extra_select) params.extend(extra_params) for col in group_by + self.query.related_select_cols + extra_selects: if isinstance(col, (list, tuple)): result.append('%s.%s' % (qn(col[0]), qn(col[1]))) elif hasattr(col, 'as_sql'): result.append(col.as_sql(qn)) else: result.append('(%s)' % str(col)) return result, params def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1, used=None, requested=None, restricted=None, nullable=None, dupe_set=None, avoid_set=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model (for example, cur_depth=1 means we are looking at models with direct connections to the root model). """ if not restricted and self.query.max_depth and cur_depth > self.query.max_depth: # We've recursed far enough; bail out. return if not opts: opts = self.query.get_meta() root_alias = self.query.get_initial_alias() self.query.related_select_cols = [] self.query.related_select_fields = [] if not used: used = set() if dupe_set is None: dupe_set = set() if avoid_set is None: avoid_set = set() orig_dupe_set = dupe_set # Setup for the case when only particular related fields should be # included in the related selection. if requested is None: if isinstance(self.query.select_related, dict): requested = self.query.select_related restricted = True else: restricted = False for f, model in opts.get_fields_with_model(): if not select_related_descend(f, restricted, requested): continue # The "avoid" set is aliases we want to avoid just for this # particular branch of the recursion. They aren't permanently # forbidden from reuse in the related selection tables (which is # what "used" specifies). avoid = avoid_set.copy() dupe_set = orig_dupe_set.copy() table = f.rel.to._meta.db_table promote = nullable or f.null if model: int_opts = opts alias = root_alias alias_chain = [] for int_model in opts.get_base_chain(model): # Proxy model have elements in base chain # with no parents, assign the new options # object and skip to the next base in that # case if not int_opts.parents[int_model]: int_opts = int_model._meta continue lhs_col = int_opts.parents[int_model].column dedupe = lhs_col in opts.duplicate_targets if dedupe: avoid.update(self.query.dupe_avoidance.get((id(opts), lhs_col), ())) dupe_set.add((opts, lhs_col)) int_opts = int_model._meta alias = self.query.join((alias, int_opts.db_table, lhs_col, int_opts.pk.column), exclusions=used, promote=promote) alias_chain.append(alias) for (dupe_opts, dupe_col) in dupe_set: self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias) if self.query.alias_map[root_alias][JOIN_TYPE] == self.query.LOUTER: self.query.promote_alias_chain(alias_chain, True) else: alias = root_alias dedupe = f.column in opts.duplicate_targets if dupe_set or dedupe: avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ())) if dedupe: dupe_set.add((opts, f.column)) alias = self.query.join((alias, table, f.column, f.rel.get_related_field().column), exclusions=used.union(avoid), promote=promote) used.add(alias) columns, aliases = self.get_default_columns(start_alias=alias, opts=f.rel.to._meta, as_pairs=True) self.query.related_select_cols.extend(columns) if self.query.alias_map[alias][JOIN_TYPE] == self.query.LOUTER: self.query.promote_alias_chain(aliases, True) self.query.related_select_fields.extend(f.rel.to._meta.fields) if restricted: next = requested.get(f.name, {}) else: next = False new_nullable = f.null or promote for dupe_opts, dupe_col in dupe_set: self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias) self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1, used, next, restricted, new_nullable, dupe_set, avoid) if restricted: related_fields = [ (o.field, o.model) for o in opts.get_all_related_objects() if o.field.unique ] for f, model in related_fields: if not select_related_descend(f, restricted, requested, reverse=True): continue # The "avoid" set is aliases we want to avoid just for this # particular branch of the recursion. They aren't permanently # forbidden from reuse in the related selection tables (which is # what "used" specifies). avoid = avoid_set.copy() dupe_set = orig_dupe_set.copy() table = model._meta.db_table int_opts = opts alias = root_alias alias_chain = [] chain = opts.get_base_chain(f.rel.to) if chain is not None: for int_model in chain: # Proxy model have elements in base chain # with no parents, assign the new options # object and skip to the next base in that # case if not int_opts.parents[int_model]: int_opts = int_model._meta continue lhs_col = int_opts.parents[int_model].column dedupe = lhs_col in opts.duplicate_targets if dedupe: avoid.update((self.query.dupe_avoidance.get(id(opts), lhs_col), ())) dupe_set.add((opts, lhs_col)) int_opts = int_model._meta alias = self.query.join( (alias, int_opts.db_table, lhs_col, int_opts.pk.column), exclusions=used, promote=True, reuse=used ) alias_chain.append(alias) for dupe_opts, dupe_col in dupe_set: self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias) dedupe = f.column in opts.duplicate_targets if dupe_set or dedupe: avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ())) if dedupe: dupe_set.add((opts, f.column)) alias = self.query.join( (alias, table, f.rel.get_related_field().column, f.column), exclusions=used.union(avoid), promote=True ) used.add(alias) columns, aliases = self.get_default_columns(start_alias=alias, opts=model._meta, as_pairs=True, local_only=True) self.query.related_select_cols.extend(columns) self.query.related_select_fields.extend(model._meta.fields) next = requested.get(f.related_query_name(), {}) new_nullable = f.null or None self.fill_related_selections(model._meta, table, cur_depth+1, used, next, restricted, new_nullable) def deferred_to_columns(self): """ Converts the self.deferred_loading data structure to mapping of table names to sets of column names which are to be loaded. Returns the dictionary. """ columns = {} self.query.deferred_to_data(columns, self.query.deferred_to_columns_cb) return columns def results_iter(self): """ Returns an iterator over the results from executing this query. """ resolve_columns = hasattr(self, 'resolve_columns') fields = None for rows in self.execute_sql(MULTI): for row in rows: if resolve_columns: if fields is None: # We only set this up here because # related_select_fields isn't populated until # execute_sql() has been called. if self.query.select_fields: fields = self.query.select_fields + self.query.related_select_fields else: fields = self.query.model._meta.fields # If the field was deferred, exclude it from being passed # into `resolve_columns` because it wasn't selected. only_load = self.deferred_to_columns() if only_load: db_table = self.query.model._meta.db_table fields = [f for f in fields if db_table in only_load and f.column in only_load[db_table]] row = self.resolve_columns(row, fields) if self.query.aggregate_select: aggregate_start = len(self.query.extra_select.keys()) + len(self.query.select) aggregate_end = aggregate_start + len(self.query.aggregate_select) row = tuple(row[:aggregate_start]) + tuple([ self.query.resolve_aggregate(value, aggregate, self.connection) for (alias, aggregate), value in zip(self.query.aggregate_select.items(), row[aggregate_start:aggregate_end]) ]) + tuple(row[aggregate_end:]) yield row def has_results(self): # This is always executed on a query clone, so we can modify self.query self.query.add_extra({'a': 1}, None, None, None, None, None) self.query.set_extra_mask(('a',)) return bool(self.execute_sql(SINGLE)) def execute_sql(self, result_type=MULTI): """ Run the query against the database and returns the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. result_type is either MULTI (use fetchmany() to retrieve all rows), SINGLE (only retrieve a single row), or None. In this last case, the cursor is returned if any query is executed, since it's used by subclasses such as InsertQuery). It's possible, however, that no query is needed, as the filters describe an empty set. In that case, None is returned, to avoid any unnecessary database interaction. """ try: sql, params = self.as_sql() if not sql: raise EmptyResultSet except EmptyResultSet: if result_type == MULTI: return empty_iter() else: return cursor = self.connection.cursor() cursor.execute(sql, params) if not result_type: return cursor if result_type == SINGLE: if self.query.ordering_aliases: return cursor.fetchone()[:-len(self.query.ordering_aliases)] return cursor.fetchone() # The MULTI case. if self.query.ordering_aliases: result = order_modified_iter(cursor, len(self.query.ordering_aliases), self.connection.features.empty_fetchmany_value) else: result = iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)), self.connection.features.empty_fetchmany_value) if not self.connection.features.can_use_chunked_reads: # If we are using non-chunked reads, we return the same data # structure as normally, but ensure it is all read into memory # before going any further. return list(result) return result class SQLInsertCompiler(SQLCompiler): def placeholder(self, field, val): if field is None: # A field value of None means the value is raw. return val elif hasattr(field, 'get_placeholder'): # Some fields (e.g. geo fields) need special munging before # they can be inserted. return field.get_placeholder(val, self.connection) else: # Return the common case for the placeholder return '%s' def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.model._meta result = ['INSERT INTO %s' % qn(opts.db_table)] result.append('(%s)' % ', '.join([qn(c) for c in self.query.columns])) values = [self.placeholder(*v) for v in self.query.values] result.append('VALUES (%s)' % ', '.join(values)) params = self.query.params if self.return_id and self.connection.features.can_return_id_from_insert: col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column)) r_fmt, r_params = self.connection.ops.return_insert_id() result.append(r_fmt % col) params = params + r_params return ' '.join(result), params def execute_sql(self, return_id=False): self.return_id = return_id cursor = super(SQLInsertCompiler, self).execute_sql(None) if not (return_id and cursor): return if self.connection.features.can_return_id_from_insert: return self.connection.ops.fetch_returned_insert_id(cursor) return self.connection.ops.last_insert_id(cursor, self.query.model._meta.db_table, self.query.model._meta.pk.column) class SQLDeleteCompiler(SQLCompiler): def as_sql(self): """ Creates the SQL for this query. Returns the SQL string and list of parameters. """ assert len(self.query.tables) == 1, \ "Can only delete from one table at a time." qn = self.quote_name_unless_alias result = ['DELETE FROM %s' % qn(self.query.tables[0])] where, params = self.query.where.as_sql(qn=qn, connection=self.connection) result.append('WHERE %s' % where) return ' '.join(result), tuple(params) class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ Creates the SQL for this query. Returns the SQL string and list of parameters. """ from django.db.models.base import Model self.pre_sql_setup() if not self.query.values: return '', () table = self.query.tables[0] qn = self.quote_name_unless_alias result = ['UPDATE %s' % qn(table)] result.append('SET') values, update_params = [], [] for field, model, val in self.query.values: if hasattr(val, 'prepare_database_save'): val = val.prepare_database_save(field) else: val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, 'get_placeholder'): placeholder = field.get_placeholder(val, self.connection) else: placeholder = '%s' if hasattr(val, 'evaluate'): val = SQLEvaluator(val, self.query, allow_joins=False) name = field.column if hasattr(val, 'as_sql'): sql, params = val.as_sql(qn, self.connection) values.append('%s = %s' % (qn(name), sql)) update_params.extend(params) elif val is not None: values.append('%s = %s' % (qn(name), placeholder)) update_params.append(val) else: values.append('%s = NULL' % qn(name)) if not values: return '', () result.append(', '.join(values)) where, params = self.query.where.as_sql(qn=qn, connection=self.connection) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(update_params + params) def execute_sql(self, result_type): """ Execute the specified update. Returns the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. """ cursor = super(SQLUpdateCompiler, self).execute_sql(result_type) rows = cursor and cursor.rowcount or 0 is_empty = cursor is None del cursor for query in self.query.get_related_updates(): aux_rows = query.get_compiler(self.using).execute_sql(result_type) if is_empty: rows = aux_rows is_empty = False return rows def pre_sql_setup(self): """ If the update depends on results from other tables, we need to do some munging of the "where" conditions to match the format required for (portable) SQL updates. That is done here. Further, if we are going to be running multiple updates, we pull out the id values to update at this point so that they don't change as a result of the progressive updates. """ self.query.select_related = False self.query.clear_ordering(True) super(SQLUpdateCompiler, self).pre_sql_setup() count = self.query.count_active_tables() if not self.query.related_updates and count == 1: return # We need to use a sub-select in the where clause to filter on things # from other tables. query = self.query.clone(klass=Query) query.bump_prefix() query.extra = {} query.select = [] query.add_fields([query.model._meta.pk.name]) must_pre_select = count > 1 and not self.connection.features.update_can_self_select # Now we adjust the current query: reset the where clause and get rid # of all the tables we don't need (since they're in the sub-select). self.query.where = self.query.where_class() if self.query.related_updates or must_pre_select: # Either we're using the idents in multiple update queries (so # don't want them to change), or the db backend doesn't support # selecting from the updating table (e.g. MySQL). idents = [] for rows in query.get_compiler(self.using).execute_sql(MULTI): idents.extend([r[0] for r in rows]) self.query.add_filter(('pk__in', idents)) self.query.related_ids = idents else: # The fast path. Filters and updates in one query. self.query.add_filter(('pk__in', query)) for alias in self.query.tables[1:]: self.query.alias_refcount[alias] = 0 class SQLAggregateCompiler(SQLCompiler): def as_sql(self, qn=None): """ Creates the SQL for this query. Returns the SQL string and list of parameters. """ if qn is None: qn = self.quote_name_unless_alias sql = ('SELECT %s FROM (%s) subquery' % ( ', '.join([ aggregate.as_sql(qn, self.connection) for aggregate in self.query.aggregate_select.values() ]), self.query.subquery) ) params = self.query.sub_params return (sql, params) class SQLDateCompiler(SQLCompiler): def results_iter(self): """ Returns an iterator over the results from executing this query. """ resolve_columns = hasattr(self, 'resolve_columns') if resolve_columns: from django.db.models.fields import DateTimeField fields = [DateTimeField()] else: from django.db.backends.util import typecast_timestamp needs_string_cast = self.connection.features.needs_datetime_string_cast offset = len(self.query.extra_select) for rows in self.execute_sql(MULTI): for row in rows: date = row[offset] if resolve_columns: date = self.resolve_columns(row, fields)[offset] elif needs_string_cast: date = typecast_timestamp(str(date)) yield date def empty_iter(): """ Returns an iterator containing no results. """ yield iter([]).next() def order_modified_iter(cursor, trim, sentinel): """ Yields blocks of rows from a cursor. We use this iterator in the special case when extra output columns have been added to support ordering requirements. We must trim those extra columns before anything else can use the results, since they're only needed to make the SQL valid. """ for rows in iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)), sentinel): yield [r[:-trim] for r in rows]
bsd-3-clause
skurochkin/selenium
py/selenium/webdriver/firefox/firefox_binary.py
32
8169
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import platform from subprocess import Popen, STDOUT from selenium.common.exceptions import WebDriverException from selenium.webdriver.common import utils import time class FirefoxBinary(object): NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so" def __init__(self, firefox_path=None, log_file=None): """ Creates a new instance of Firefox binary. :Args: - firefox_path - Path to the Firefox executable. By default, it will be detected from the standard locations. - log_file - A file object to redirect the firefox process output to. It can be sys.stdout. Please note that with parallel run the output won't be synchronous. By default, it will be redirected to /dev/null. """ self._start_cmd = firefox_path # We used to default to subprocess.PIPE instead of /dev/null, but after # a while the pipe would fill up and Firefox would freeze. self._log_file = log_file or open(os.devnull, "wb") self.command_line = None if self._start_cmd is None: self._start_cmd = self._get_firefox_start_cmd() if not self._start_cmd.strip(): raise Exception("Failed to find firefox binary. You can set it by specifying the path to 'firefox_binary':\n\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\n\n" + "binary = FirefoxBinary('/path/to/binary')\ndriver = webdriver.Firefox(firefox_binary=binary)") # Rather than modifying the environment of the calling Python process # copy it and modify as needed. self._firefox_env = os.environ.copy() self._firefox_env["MOZ_CRASHREPORTER_DISABLE"] = "1" self._firefox_env["MOZ_NO_REMOTE"] = "1" self._firefox_env["NO_EM_RESTART"] = "1" def add_command_line_options(self, *args): self.command_line = args def launch_browser(self, profile): """Launches the browser for the given profile name. It is assumed the profile already exists. """ self.profile = profile self._start_from_profile_path(self.profile.path) self._wait_until_connectable() def kill(self): """Kill the browser. This is useful when the browser is stuck. """ if self.process: self.process.kill() self.process.wait() def _start_from_profile_path(self, path): self._firefox_env["XRE_PROFILE_PATH"] = path if platform.system().lower() == 'linux': self._modify_link_library_path() command = [self._start_cmd, "-foreground"] if self.command_line is not None: for cli in self.command_line: command.append(cli) self.process = Popen( command, stdout=self._log_file, stderr=STDOUT, env=self._firefox_env) def _wait_until_connectable(self): """Blocks until the extension is connectable in the firefox.""" count = 0 while not utils.is_connectable(self.profile.port): if self.process.poll() is not None: # Browser has exited raise WebDriverException("The browser appears to have exited " "before we could connect. If you specified a log_file in " "the FirefoxBinary constructor, check it for details.") if count == 30: self.kill() raise WebDriverException("Can't load the profile. Profile " "Dir: %s If you specified a log_file in the " "FirefoxBinary constructor, check it for details.") count += 1 time.sleep(1) return True def _find_exe_in_registry(self): try: from _winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER except ImportError: from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER import shlex keys = ( r"SOFTWARE\Classes\FirefoxHTML\shell\open\command", r"SOFTWARE\Classes\Applications\firefox.exe\shell\open\command" ) command = "" for path in keys: try: key = OpenKey(HKEY_LOCAL_MACHINE, path) command = QueryValue(key, "") break except OSError: try: key = OpenKey(HKEY_CURRENT_USER, path) command = QueryValue(key, "") break except OSError: pass else: return "" if not command: return "" return shlex.split(command)[0] def _get_firefox_start_cmd(self): """Return the command to start firefox.""" start_cmd = "" if platform.system() == "Darwin": start_cmd = ("/Applications/Firefox.app/Contents/MacOS/firefox-bin") elif platform.system() == "Windows": start_cmd = (self._find_exe_in_registry() or self._default_windows_location()) elif platform.system() == 'Java' and os._name == 'nt': start_cmd = self._default_windows_location() else: for ffname in ["firefox", "iceweasel"]: start_cmd = self.which(ffname) if start_cmd is not None: break else: # couldn't find firefox on the system path raise RuntimeError("Could not find firefox in your system PATH." + " Please specify the firefox binary location or install firefox") return start_cmd def _default_windows_location(self): program_files = [os.getenv("PROGRAMFILES", r"C:\Program Files"), os.getenv("PROGRAMFILES(X86)", r"C:\Program Files (x86)")] for path in program_files: binary_path = os.path.join(path, r"Mozilla Firefox\firefox.exe") if os.access(binary_path, os.X_OK): return binary_path return "" def _modify_link_library_path(self): existing_ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '') new_ld_lib_path = self._extract_and_check( self.profile, self.NO_FOCUS_LIBRARY_NAME, "x86", "amd64") new_ld_lib_path += existing_ld_lib_path self._firefox_env["LD_LIBRARY_PATH"] = new_ld_lib_path self._firefox_env['LD_PRELOAD'] = self.NO_FOCUS_LIBRARY_NAME def _extract_and_check(self, profile, no_focus_so_name, x86, amd64): paths = [x86, amd64] built_path = "" for path in paths: library_path = os.path.join(profile.path, path) os.makedirs(library_path) import shutil shutil.copy(os.path.join(os.path.dirname(__file__), path, self.NO_FOCUS_LIBRARY_NAME), library_path) built_path += library_path + ":" return built_path def which(self, fname): """Returns the fully qualified path by searching Path of the given name""" for pe in os.environ['PATH'].split(os.pathsep): checkname = os.path.join(pe, fname) if os.access(checkname, os.X_OK) and not os.path.isdir(checkname): return checkname return None
apache-2.0
lexus24/w16b_test
static/Brython3.1.3-20150514-095342/Lib/textwrap.py
745
16488
"""Text wrapping and filling. """ # Copyright (C) 1999-2001 Gregory P. Ward. # Copyright (C) 2002, 2003 Python Software Foundation. # Written by Greg Ward <gward@python.net> import re __all__ = ['TextWrapper', 'wrap', 'fill', 'dedent', 'indent'] # Hardcode the recognized whitespace characters to the US-ASCII # whitespace characters. The main reason for doing this is that in # ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales # that character winds up in string.whitespace. Respecting # string.whitespace in those cases would 1) make textwrap treat 0xa0 the # same as any other whitespace char, which is clearly wrong (it's a # *non-breaking* space), 2) possibly cause problems with Unicode, # since 0xa0 is not in range(128). _whitespace = '\t\n\x0b\x0c\r ' class TextWrapper: """ Object for wrapping/filling text. The public interface consists of the wrap() and fill() methods; the other methods are just there for subclasses to override in order to tweak the default behaviour. If you want to completely replace the main wrapping algorithm, you'll probably have to override _wrap_chunks(). Several instance attributes control various aspects of wrapping: width (default: 70) the maximum width of wrapped lines (unless break_long_words is false) initial_indent (default: "") string that will be prepended to the first line of wrapped output. Counts towards the line's width. subsequent_indent (default: "") string that will be prepended to all lines save the first of wrapped output; also counts towards each line's width. expand_tabs (default: true) Expand tabs in input text to spaces before further processing. Each tab will become 0 .. 'tabsize' spaces, depending on its position in its line. If false, each tab is treated as a single character. tabsize (default: 8) Expand tabs in input text to 0 .. 'tabsize' spaces, unless 'expand_tabs' is false. replace_whitespace (default: true) Replace all whitespace characters in the input text by spaces after tab expansion. Note that if expand_tabs is false and replace_whitespace is true, every tab will be converted to a single space! fix_sentence_endings (default: false) Ensure that sentence-ending punctuation is always followed by two spaces. Off by default because the algorithm is (unavoidably) imperfect. break_long_words (default: true) Break words longer than 'width'. If false, those words will not be broken, and some lines might be longer than 'width'. break_on_hyphens (default: true) Allow breaking hyphenated words. If true, wrapping will occur preferably on whitespaces and right after hyphens part of compound words. drop_whitespace (default: true) Drop leading and trailing whitespace from lines. """ unicode_whitespace_trans = {} uspace = ord(' ') for x in _whitespace: unicode_whitespace_trans[ord(x)] = uspace # This funky little regex is just the trick for splitting # text up into word-wrappable chunks. E.g. # "Hello there -- you goof-ball, use the -b option!" # splits into # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option! # (after stripping out empty strings). wordsep_re = re.compile( r'(\s+|' # any whitespace r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash # This less funky little regex just split on recognized spaces. E.g. # "Hello there -- you goof-ball, use the -b option!" # splits into # Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/ wordsep_simple_re = re.compile(r'(\s+)') # XXX this is not locale- or charset-aware -- string.lowercase # is US-ASCII only (and therefore English-only) sentence_end_re = re.compile(r'[a-z]' # lowercase letter r'[\.\!\?]' # sentence-ending punct. r'[\"\']?' # optional end-of-quote r'\Z') # end of chunk def __init__(self, width=70, initial_indent="", subsequent_indent="", expand_tabs=True, replace_whitespace=True, fix_sentence_endings=False, break_long_words=True, drop_whitespace=True, break_on_hyphens=True, tabsize=8): self.width = width self.initial_indent = initial_indent self.subsequent_indent = subsequent_indent self.expand_tabs = expand_tabs self.replace_whitespace = replace_whitespace self.fix_sentence_endings = fix_sentence_endings self.break_long_words = break_long_words self.drop_whitespace = drop_whitespace self.break_on_hyphens = break_on_hyphens self.tabsize = tabsize # -- Private methods ----------------------------------------------- # (possibly useful for subclasses to override) def _munge_whitespace(self, text): """_munge_whitespace(text : string) -> string Munge whitespace in text: expand tabs and convert all other whitespace characters to spaces. Eg. " foo\tbar\n\nbaz" becomes " foo bar baz". """ if self.expand_tabs: text = text.expandtabs(self.tabsize) if self.replace_whitespace: text = text.translate(self.unicode_whitespace_trans) return text def _split(self, text): """_split(text : string) -> [string] Split the text to wrap into indivisible chunks. Chunks are not quite the same as words; see _wrap_chunks() for full details. As an example, the text Look, goof-ball -- use the -b option! breaks into the following chunks: 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', 'use', ' ', 'the', ' ', '-b', ' ', 'option!' if break_on_hyphens is True, or in: 'Look,', ' ', 'goof-ball', ' ', '--', ' ', 'use', ' ', 'the', ' ', '-b', ' ', option!' otherwise. """ if self.break_on_hyphens is True: chunks = self.wordsep_re.split(text) else: chunks = self.wordsep_simple_re.split(text) chunks = [c for c in chunks if c] return chunks def _fix_sentence_endings(self, chunks): """_fix_sentence_endings(chunks : [string]) Correct for sentence endings buried in 'chunks'. Eg. when the original text contains "... foo.\nBar ...", munge_whitespace() and split() will convert that to [..., "foo.", " ", "Bar", ...] which has one too few spaces; this method simply changes the one space to two. """ i = 0 patsearch = self.sentence_end_re.search while i < len(chunks)-1: if chunks[i+1] == " " and patsearch(chunks[i]): chunks[i+1] = " " i += 2 else: i += 1 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): """_handle_long_word(chunks : [string], cur_line : [string], cur_len : int, width : int) Handle a chunk of text (most likely a word, not whitespace) that is too long to fit in any line. """ # Figure out when indent is larger than the specified width, and make # sure at least one character is stripped off on every pass if width < 1: space_left = 1 else: space_left = width - cur_len # If we're allowed to break long words, then do so: put as much # of the next chunk onto the current line as will fit. if self.break_long_words: cur_line.append(reversed_chunks[-1][:space_left]) reversed_chunks[-1] = reversed_chunks[-1][space_left:] # Otherwise, we have to preserve the long word intact. Only add # it to the current line if there's nothing already there -- # that minimizes how much we violate the width constraint. elif not cur_line: cur_line.append(reversed_chunks.pop()) # If we're not allowed to break long words, and there's already # text on the current line, do nothing. Next time through the # main loop of _wrap_chunks(), we'll wind up here again, but # cur_len will be zero, so the next line will be entirely # devoted to the long word that we can't handle right now. def _wrap_chunks(self, chunks): """_wrap_chunks(chunks : [string]) -> [string] Wrap a sequence of text chunks and return a list of lines of length 'self.width' or less. (If 'break_long_words' is false, some lines may be longer than this.) Chunks correspond roughly to words and the whitespace between them: each chunk is indivisible (modulo 'break_long_words'), but a line break can come between any two chunks. Chunks should not have internal whitespace; ie. a chunk is either all whitespace or a "word". Whitespace chunks will be removed from the beginning and end of lines, but apart from that whitespace is preserved. """ lines = [] if self.width <= 0: raise ValueError("invalid width %r (must be > 0)" % self.width) # Arrange in reverse order so items can be efficiently popped # from a stack of chucks. chunks.reverse() while chunks: # Start the list of chunks that will make up the current line. # cur_len is just the length of all the chunks in cur_line. cur_line = [] cur_len = 0 # Figure out which static string will prefix this line. if lines: indent = self.subsequent_indent else: indent = self.initial_indent # Maximum width for this line. width = self.width - len(indent) # First chunk on line is whitespace -- drop it, unless this # is the very beginning of the text (ie. no lines started yet). if self.drop_whitespace and chunks[-1].strip() == '' and lines: del chunks[-1] while chunks: l = len(chunks[-1]) # Can at least squeeze this chunk onto the current line. if cur_len + l <= width: cur_line.append(chunks.pop()) cur_len += l # Nope, this line is full. else: break # The current line is full, and the next chunk is too big to # fit on *any* line (not just this one). if chunks and len(chunks[-1]) > width: self._handle_long_word(chunks, cur_line, cur_len, width) # If the last chunk on this line is all whitespace, drop it. if self.drop_whitespace and cur_line and cur_line[-1].strip() == '': del cur_line[-1] # Convert current line back to a string and store it in list # of all lines (return value). if cur_line: lines.append(indent + ''.join(cur_line)) return lines # -- Public interface ---------------------------------------------- def wrap(self, text): """wrap(text : string) -> [string] Reformat the single paragraph in 'text' so it fits in lines of no more than 'self.width' columns, and return a list of wrapped lines. Tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. """ text = self._munge_whitespace(text) chunks = self._split(text) if self.fix_sentence_endings: self._fix_sentence_endings(chunks) return self._wrap_chunks(chunks) def fill(self, text): """fill(text : string) -> string Reformat the single paragraph in 'text' to fit in lines of no more than 'self.width' columns, and return a new string containing the entire wrapped paragraph. """ return "\n".join(self.wrap(text)) # -- Convenience interface --------------------------------------------- def wrap(text, width=70, **kwargs): """Wrap a single paragraph of text, returning a list of wrapped lines. Reformat the single paragraph in 'text' so it fits in lines of no more than 'width' columns, and return a list of wrapped lines. By default, tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. See TextWrapper class for available keyword args to customize wrapping behaviour. """ w = TextWrapper(width=width, **kwargs) return w.wrap(text) def fill(text, width=70, **kwargs): """Fill a single paragraph of text, returning a new string. Reformat the single paragraph in 'text' to fit in lines of no more than 'width' columns, and return a new string containing the entire wrapped paragraph. As with wrap(), tabs are expanded and other whitespace characters converted to space. See TextWrapper class for available keyword args to customize wrapping behaviour. """ w = TextWrapper(width=width, **kwargs) return w.fill(text) # -- Loosely related functionality ------------------------------------- _whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE) _leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE) def dedent(text): """Remove any common leading whitespace from every line in `text`. This can be used to make triple-quoted strings line up with the left edge of the display, while still presenting them in the source code in indented form. Note that tabs and spaces are both treated as whitespace, but they are not equal: the lines " hello" and "\thello" are considered to have no common leading whitespace. (This behaviour is new in Python 2.5; older versions of this module incorrectly expanded tabs before searching for common leading whitespace.) """ # Look for the longest leading string of spaces and tabs common to # all lines. margin = None text = _whitespace_only_re.sub('', text) indents = _leading_whitespace_re.findall(text) for indent in indents: if margin is None: margin = indent # Current line more deeply indented than previous winner: # no change (previous winner is still on top). elif indent.startswith(margin): pass # Current line consistent with and no deeper than previous winner: # it's the new winner. elif margin.startswith(indent): margin = indent # Current line and previous winner have no common whitespace: # there is no margin. else: margin = "" break # sanity check (testing/debugging only) if 0 and margin: for line in text.split("\n"): assert not line or line.startswith(margin), \ "line = %r, margin = %r" % (line, margin) if margin: text = re.sub(r'(?m)^' + margin, '', text) return text def indent(text, prefix, predicate=None): """Adds 'prefix' to the beginning of selected lines in 'text'. If 'predicate' is provided, 'prefix' will only be added to the lines where 'predicate(line)' is True. If 'predicate' is not provided, it will default to adding 'prefix' to all non-empty lines that do not consist solely of whitespace characters. """ if predicate is None: def predicate(line): return line.strip() def prefixed_lines(): for line in text.splitlines(True): yield (prefix + line if predicate(line) else line) return ''.join(prefixed_lines()) if __name__ == "__main__": #print dedent("\tfoo\n\tbar") #print dedent(" \thello there\n \t how are you?") print(dedent("Hello there.\n This is indented."))
agpl-3.0
foodszhang/kbengine
kbe/src/lib/python/Lib/ctypes/test/test_internals.py
113
2631
# This tests the internal _objects attribute import unittest from ctypes import * from sys import getrefcount as grc # XXX This test must be reviewed for correctness!!! # ctypes' types are container types. # # They have an internal memory block, which only consists of some bytes, # but it has to keep references to other objects as well. This is not # really needed for trivial C types like int or char, but it is important # for aggregate types like strings or pointers in particular. # # What about pointers? class ObjectsTestCase(unittest.TestCase): def assertSame(self, a, b): self.assertEqual(id(a), id(b)) def test_ints(self): i = 42000123 refcnt = grc(i) ci = c_int(i) self.assertEqual(refcnt, grc(i)) self.assertEqual(ci._objects, None) def test_c_char_p(self): s = b"Hello, World" refcnt = grc(s) cs = c_char_p(s) self.assertEqual(refcnt + 1, grc(s)) self.assertSame(cs._objects, s) def test_simple_struct(self): class X(Structure): _fields_ = [("a", c_int), ("b", c_int)] a = 421234 b = 421235 x = X() self.assertEqual(x._objects, None) x.a = a x.b = b self.assertEqual(x._objects, None) def test_embedded_structs(self): class X(Structure): _fields_ = [("a", c_int), ("b", c_int)] class Y(Structure): _fields_ = [("x", X), ("y", X)] y = Y() self.assertEqual(y._objects, None) x1, x2 = X(), X() y.x, y.y = x1, x2 self.assertEqual(y._objects, {"0": {}, "1": {}}) x1.a, x2.b = 42, 93 self.assertEqual(y._objects, {"0": {}, "1": {}}) def test_xxx(self): class X(Structure): _fields_ = [("a", c_char_p), ("b", c_char_p)] class Y(Structure): _fields_ = [("x", X), ("y", X)] s1 = b"Hello, World" s2 = b"Hallo, Welt" x = X() x.a = s1 x.b = s2 self.assertEqual(x._objects, {"0": s1, "1": s2}) y = Y() y.x = x self.assertEqual(y._objects, {"0": {"0": s1, "1": s2}}) ## x = y.x ## del y ## print x._b_base_._objects def test_ptr_struct(self): class X(Structure): _fields_ = [("data", POINTER(c_int))] A = c_int*4 a = A(11, 22, 33, 44) self.assertEqual(a._objects, None) x = X() x.data = a ##XXX print x._objects ##XXX print x.data[0] ##XXX print x.data._objects if __name__ == '__main__': unittest.main()
lgpl-3.0
ericlink/adms-server
playframework-dist/play-1.1/python/Lib/unittest.py
2
30209
#!/usr/bin/env python ''' Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's Smalltalk testing framework. This module contains the core framework classes that form the basis of specific test cases and suites (TestCase, TestSuite etc.), and also a text-based utility class for running the tests and reporting the results (TextTestRunner). Simple usage: import unittest class IntegerArithmenticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEquals((1 + 2), 3) self.assertEquals(0 + 1, 1) def testMultiply(self): self.assertEquals((0 * 10), 0) self.assertEquals((5 * 8), 40) if __name__ == '__main__': unittest.main() Further information is available in the bundled documentation, and from http://pyunit.sourceforge.net/ Copyright (c) 1999-2003 Steve Purcell This module is free software, and you may redistribute it and/or modify it under the same terms as Python itself, so long as this copyright message and disclaimer are retained in their original form. IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. ''' __author__ = "Steve Purcell" __email__ = "stephen_purcell at yahoo dot com" __version__ = "#Revision: 1.63 $"[11:-2] import time import sys import traceback import os import types ############################################################################## # Exported classes and functions ############################################################################## __all__ = ['TestResult', 'TestCase', 'TestSuite', 'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main', 'defaultTestLoader'] # Expose obsolete functions for backwards compatibility __all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases']) ############################################################################## # Backward compatibility ############################################################################## if sys.version_info[:2] < (2, 2): False, True = 0, 1 def isinstance(obj, clsinfo): import __builtin__ if type(clsinfo) in (tuple, list): for cls in clsinfo: if cls is type: cls = types.ClassType if __builtin__.isinstance(obj, cls): return 1 return 0 else: return __builtin__.isinstance(obj, clsinfo) ############################################################################## # Test framework core ############################################################################## # All classes defined herein are 'new-style' classes, allowing use of 'super()' __metaclass__ = type def _strclass(cls): return "%s.%s" % (cls.__module__, cls.__name__) __unittest = 1 class TestResult: """Holder for test result information. Test results are automatically managed by the TestCase and TestSuite classes, and do not need to be explicitly manipulated by writers of tests. Each instance holds the total number of tests run, and collections of failures and errors that occurred among those test runs. The collections contain tuples of (testcase, exceptioninfo), where exceptioninfo is the formatted traceback of the error that occurred. """ def __init__(self): self.failures = [] self.errors = [] self.testsRun = 0 self.shouldStop = 0 def startTest(self, test): "Called when the given test is about to be run" self.testsRun = self.testsRun + 1 def stopTest(self, test): "Called when the given test has been run" pass def addError(self, test, err): """Called when an error has occurred. 'err' is a tuple of values as returned by sys.exc_info(). """ self.errors.append((test, self._exc_info_to_string(err, test))) def addFailure(self, test, err): """Called when an error has occurred. 'err' is a tuple of values as returned by sys.exc_info().""" self.failures.append((test, self._exc_info_to_string(err, test))) def addSuccess(self, test): "Called when a test has completed successfully" pass def wasSuccessful(self): "Tells whether or not this result was a success" return len(self.failures) == len(self.errors) == 0 def stop(self): "Indicates that the tests should be aborted" self.shouldStop = True def _exc_info_to_string(self, err, test): """Converts a sys.exc_info()-style tuple of values into a string.""" exctype, value, tb = err # Skip test runner traceback levels while tb and self._is_relevant_tb_level(tb): tb = tb.tb_next if exctype is test.failureException: # Skip assert*() traceback levels length = self._count_relevant_tb_levels(tb) return ''.join(traceback.format_exception(exctype, value, tb, length)) return ''.join(traceback.format_exception(exctype, value, tb)) def _is_relevant_tb_level(self, tb): return tb.tb_frame.f_globals.has_key('__unittest') def _count_relevant_tb_levels(self, tb): length = 0 while tb and not self._is_relevant_tb_level(tb): length += 1 tb = tb.tb_next return length def __repr__(self): return "<%s run=%i errors=%i failures=%i>" % \ (_strclass(self.__class__), self.testsRun, len(self.errors), len(self.failures)) class TestCase: """A class whose instances are single test cases. By default, the test code itself should be placed in a method named 'runTest'. If the fixture may be used for many test cases, create as many test methods as are needed. When instantiating such a TestCase subclass, specify in the constructor arguments the name of the test method that the instance is to execute. Test authors should subclass TestCase for their own tests. Construction and deconstruction of the test's environment ('fixture') can be implemented by overriding the 'setUp' and 'tearDown' methods respectively. If it is necessary to override the __init__ method, the base class __init__ method must always be called. It is important that subclasses should not change the signature of their __init__ method, since instances of the classes are instantiated automatically by parts of the framework in order to be run. """ # This attribute determines which exception will be raised when # the instance's assertion methods fail; test methods raising this # exception will be deemed to have 'failed' rather than 'errored' failureException = AssertionError def __init__(self, methodName='runTest'): """Create an instance of the class that will use the named test method when executed. Raises a ValueError if the instance does not have a method with the specified name. """ try: self._testMethodName = methodName testMethod = getattr(self, methodName) self._testMethodDoc = testMethod.__doc__ except AttributeError: raise ValueError, "no such test method in %s: %s" % \ (self.__class__, methodName) def setUp(self): "Hook method for setting up the test fixture before exercising it." pass def tearDown(self): "Hook method for deconstructing the test fixture after testing it." pass def countTestCases(self): return 1 def defaultTestResult(self): return TestResult() def shortDescription(self): """Returns a one-line description of the test, or None if no description has been provided. The default implementation of this method returns the first line of the specified test method's docstring. """ doc = self._testMethodDoc return doc and doc.split("\n")[0].strip() or None def id(self): return "%s.%s" % (_strclass(self.__class__), self._testMethodName) def __str__(self): return "%s (%s)" % (self._testMethodName, _strclass(self.__class__)) def __repr__(self): return "<%s testMethod=%s>" % \ (_strclass(self.__class__), self._testMethodName) def run(self, result=None): if result is None: result = self.defaultTestResult() result.startTest(self) testMethod = getattr(self, self._testMethodName) try: try: self.setUp() except KeyboardInterrupt: raise except: result.addError(self, self._exc_info()) return ok = False try: testMethod() ok = True except self.failureException: result.addFailure(self, self._exc_info()) except KeyboardInterrupt: raise except: result.addError(self, self._exc_info()) try: self.tearDown() except KeyboardInterrupt: raise except: result.addError(self, self._exc_info()) ok = False if ok: result.addSuccess(self) finally: result.stopTest(self) def __call__(self, *args, **kwds): return self.run(*args, **kwds) def debug(self): """Run the test without collecting errors in a TestResult""" self.setUp() getattr(self, self._testMethodName)() self.tearDown() def _exc_info(self): """Return a version of sys.exc_info() with the traceback frame minimised; usually the top level of the traceback frame is not needed. """ exctype, excvalue, tb = sys.exc_info() if sys.platform[:4] == 'java': ## tracebacks look different in Jython return (exctype, excvalue, tb) return (exctype, excvalue, tb) def fail(self, msg=None): """Fail immediately, with the given message.""" raise self.failureException, msg def failIf(self, expr, msg=None): "Fail the test if the expression is true." if expr: raise self.failureException, msg def failUnless(self, expr, msg=None): """Fail the test unless the expression is true.""" if not expr: raise self.failureException, msg def failUnlessRaises(self, excClass, callableObj, *args, **kwargs): """Fail unless an exception of class excClass is thrown by callableObj when invoked with arguments args and keyword arguments kwargs. If a different type of exception is thrown, it will not be caught, and the test case will be deemed to have suffered an error, exactly as for an unexpected exception. """ try: callableObj(*args, **kwargs) except excClass: return else: if hasattr(excClass,'__name__'): excName = excClass.__name__ else: excName = str(excClass) raise self.failureException, "%s not raised" % excName def failUnlessEqual(self, first, second, msg=None): """Fail if the two objects are unequal as determined by the '==' operator. """ if not first == second: raise self.failureException, \ (msg or '%r != %r' % (first, second)) def failIfEqual(self, first, second, msg=None): """Fail if the two objects are equal as determined by the '==' operator. """ if first == second: raise self.failureException, \ (msg or '%r == %r' % (first, second)) def failUnlessAlmostEqual(self, first, second, places=7, msg=None): """Fail if the two objects are unequal as determined by their difference rounded to the given number of decimal places (default 7) and comparing to zero. Note that decimal places (from zero) are usually not the same as significant digits (measured from the most signficant digit). """ if round(second-first, places) != 0: raise self.failureException, \ (msg or '%r != %r within %r places' % (first, second, places)) def failIfAlmostEqual(self, first, second, places=7, msg=None): """Fail if the two objects are equal as determined by their difference rounded to the given number of decimal places (default 7) and comparing to zero. Note that decimal places (from zero) are usually not the same as significant digits (measured from the most signficant digit). """ if round(second-first, places) == 0: raise self.failureException, \ (msg or '%r == %r within %r places' % (first, second, places)) # Synonyms for assertion methods assertEqual = assertEquals = failUnlessEqual assertNotEqual = assertNotEquals = failIfEqual assertAlmostEqual = assertAlmostEquals = failUnlessAlmostEqual assertNotAlmostEqual = assertNotAlmostEquals = failIfAlmostEqual assertRaises = failUnlessRaises assert_ = assertTrue = failUnless assertFalse = failIf class TestSuite: """A test suite is a composite test consisting of a number of TestCases. For use, create an instance of TestSuite, then add test case instances. When all tests have been added, the suite can be passed to a test runner, such as TextTestRunner. It will run the individual test cases in the order in which they were added, aggregating the results. When subclassing, do not forget to call the base class constructor. """ def __init__(self, tests=()): self._tests = [] self.addTests(tests) def __repr__(self): return "<%s tests=%s>" % (_strclass(self.__class__), self._tests) __str__ = __repr__ def __iter__(self): return iter(self._tests) def countTestCases(self): cases = 0 for test in self._tests: cases += test.countTestCases() return cases def addTest(self, test): # sanity checks if not callable(test): raise TypeError("the test to add must be callable") if (isinstance(test, (type, types.ClassType)) and issubclass(test, (TestCase, TestSuite))): raise TypeError("TestCases and TestSuites must be instantiated " "before passing them to addTest()") self._tests.append(test) def addTests(self, tests): if isinstance(tests, basestring): raise TypeError("tests must be an iterable of tests, not a string") for test in tests: self.addTest(test) def run(self, result): for test in self._tests: if result.shouldStop: break test(result) return result def __call__(self, *args, **kwds): return self.run(*args, **kwds) def debug(self): """Run the tests without collecting errors in a TestResult""" for test in self._tests: test.debug() class FunctionTestCase(TestCase): """A test case that wraps a test function. This is useful for slipping pre-existing test functions into the PyUnit framework. Optionally, set-up and tidy-up functions can be supplied. As with TestCase, the tidy-up ('tearDown') function will always be called if the set-up ('setUp') function ran successfully. """ def __init__(self, testFunc, setUp=None, tearDown=None, description=None): TestCase.__init__(self) self.__setUpFunc = setUp self.__tearDownFunc = tearDown self.__testFunc = testFunc self.__description = description def setUp(self): if self.__setUpFunc is not None: self.__setUpFunc() def tearDown(self): if self.__tearDownFunc is not None: self.__tearDownFunc() def runTest(self): self.__testFunc() def id(self): return self.__testFunc.__name__ def __str__(self): return "%s (%s)" % (_strclass(self.__class__), self.__testFunc.__name__) def __repr__(self): return "<%s testFunc=%s>" % (_strclass(self.__class__), self.__testFunc) def shortDescription(self): if self.__description is not None: return self.__description doc = self.__testFunc.__doc__ return doc and doc.split("\n")[0].strip() or None ############################################################################## # Locating and loading tests ############################################################################## class TestLoader: """This class is responsible for loading tests according to various criteria and returning them wrapped in a Test """ testMethodPrefix = 'test' sortTestMethodsUsing = cmp suiteClass = TestSuite def loadTestsFromTestCase(self, testCaseClass): """Return a suite of all tests cases contained in testCaseClass""" if issubclass(testCaseClass, TestSuite): raise TypeError("Test cases should not be derived from TestSuite. Maybe you meant to derive from TestCase?") testCaseNames = self.getTestCaseNames(testCaseClass) if not testCaseNames and hasattr(testCaseClass, 'runTest'): testCaseNames = ['runTest'] return self.suiteClass(map(testCaseClass, testCaseNames)) def loadTestsFromModule(self, module): """Return a suite of all tests cases contained in the given module""" tests = [] for name in dir(module): obj = getattr(module, name) if (isinstance(obj, (type, types.ClassType)) and issubclass(obj, TestCase)): tests.append(self.loadTestsFromTestCase(obj)) return self.suiteClass(tests) def loadTestsFromName(self, name, module=None): """Return a suite of all tests cases given a string specifier. The name may resolve either to a module, a test case class, a test method within a test case class, or a callable object which returns a TestCase or TestSuite instance. The method optionally resolves the names relative to a given module. """ parts = name.split('.') if module is None: parts_copy = parts[:] while parts_copy: try: module = __import__('.'.join(parts_copy)) break except ImportError: del parts_copy[-1] if not parts_copy: raise parts = parts[1:] obj = module for part in parts: parent, obj = obj, getattr(obj, part) if type(obj) == types.ModuleType: return self.loadTestsFromModule(obj) elif (isinstance(obj, (type, types.ClassType)) and issubclass(obj, TestCase)): return self.loadTestsFromTestCase(obj) elif type(obj) == types.UnboundMethodType: return parent(obj.__name__) elif isinstance(obj, TestSuite): return obj elif callable(obj): test = obj() if not isinstance(test, (TestCase, TestSuite)): raise ValueError, \ "calling %s returned %s, not a test" % (obj,test) return test else: raise ValueError, "don't know how to make test from: %s" % obj def loadTestsFromNames(self, names, module=None): """Return a suite of all tests cases found using the given sequence of string specifiers. See 'loadTestsFromName()'. """ suites = [self.loadTestsFromName(name, module) for name in names] return self.suiteClass(suites) def getTestCaseNames(self, testCaseClass): """Return a sorted sequence of method names found within testCaseClass """ def isTestMethod(attrname, testCaseClass=testCaseClass, prefix=self.testMethodPrefix): return attrname.startswith(prefix) and callable(getattr(testCaseClass, attrname)) testFnNames = filter(isTestMethod, dir(testCaseClass)) for baseclass in testCaseClass.__bases__: for testFnName in self.getTestCaseNames(baseclass): if testFnName not in testFnNames: # handle overridden methods testFnNames.append(testFnName) if self.sortTestMethodsUsing: testFnNames.sort(self.sortTestMethodsUsing) return testFnNames defaultTestLoader = TestLoader() ############################################################################## # Patches for old functions: these functions should be considered obsolete ############################################################################## def _makeLoader(prefix, sortUsing, suiteClass=None): loader = TestLoader() loader.sortTestMethodsUsing = sortUsing loader.testMethodPrefix = prefix if suiteClass: loader.suiteClass = suiteClass return loader def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp): return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass) def makeSuite(testCaseClass, prefix='test', sortUsing=cmp, suiteClass=TestSuite): return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass) def findTestCases(module, prefix='test', sortUsing=cmp, suiteClass=TestSuite): return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module) ############################################################################## # Text UI ############################################################################## class _WritelnDecorator: """Used to decorate file-like objects with a handy 'writeln' method""" def __init__(self,stream): self.stream = stream def __getattr__(self, attr): return getattr(self.stream,attr) def writeln(self, arg=None): if arg: self.write(arg) self.write('\n') # text-mode streams translate to \r\n if needed class _TextTestResult(TestResult): """A test result class that can print formatted text results to a stream. Used by TextTestRunner. """ separator1 = '=' * 70 separator2 = '-' * 70 def __init__(self, stream, descriptions, verbosity): TestResult.__init__(self) self.stream = stream self.showAll = verbosity > 1 self.dots = verbosity == 1 self.descriptions = descriptions def getDescription(self, test): if self.descriptions: return test.shortDescription() or str(test) else: return str(test) def startTest(self, test): TestResult.startTest(self, test) if self.showAll: self.stream.write(self.getDescription(test)) self.stream.write(" ... ") def addSuccess(self, test): TestResult.addSuccess(self, test) if self.showAll: self.stream.writeln("ok") elif self.dots: self.stream.write('.') def addError(self, test, err): TestResult.addError(self, test, err) if self.showAll: self.stream.writeln("ERROR") elif self.dots: self.stream.write('E') def addFailure(self, test, err): TestResult.addFailure(self, test, err) if self.showAll: self.stream.writeln("FAIL") elif self.dots: self.stream.write('F') def printErrors(self): if self.dots or self.showAll: self.stream.writeln() self.printErrorList('ERROR', self.errors) self.printErrorList('FAIL', self.failures) def printErrorList(self, flavour, errors): for test, err in errors: self.stream.writeln(self.separator1) self.stream.writeln("%s: %s" % (flavour,self.getDescription(test))) self.stream.writeln(self.separator2) self.stream.writeln("%s" % err) class TextTestRunner: """A test runner class that displays results in textual form. It prints out the names of tests as they are run, errors as they occur, and a summary of the results at the end of the test run. """ def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1): self.stream = _WritelnDecorator(stream) self.descriptions = descriptions self.verbosity = verbosity def _makeResult(self): return _TextTestResult(self.stream, self.descriptions, self.verbosity) def run(self, test): "Run the given test case or test suite." result = self._makeResult() startTime = time.time() test(result) stopTime = time.time() timeTaken = stopTime - startTime result.printErrors() self.stream.writeln(result.separator2) run = result.testsRun self.stream.writeln("Ran %d test%s in %.3fs" % (run, run != 1 and "s" or "", timeTaken)) self.stream.writeln() if not result.wasSuccessful(): self.stream.write("FAILED (") failed, errored = map(len, (result.failures, result.errors)) if failed: self.stream.write("failures=%d" % failed) if errored: if failed: self.stream.write(", ") self.stream.write("errors=%d" % errored) self.stream.writeln(")") else: self.stream.writeln("OK") return result ############################################################################## # Facilities for running tests from the command line ############################################################################## class TestProgram: """A command-line program that runs a set of tests; this is primarily for making test modules conveniently executable. """ USAGE = """\ Usage: %(progName)s [options] [test] [...] Options: -h, --help Show this message -v, --verbose Verbose output -q, --quiet Minimal output Examples: %(progName)s - run default set of tests %(progName)s MyTestSuite - run suite 'MyTestSuite' %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething %(progName)s MyTestCase - run all 'test*' test methods in MyTestCase """ def __init__(self, module='__main__', defaultTest=None, argv=None, testRunner=None, testLoader=defaultTestLoader): if type(module) == type(''): self.module = __import__(module) for part in module.split('.')[1:]: self.module = getattr(self.module, part) else: self.module = module if argv is None: argv = sys.argv self.verbosity = 1 self.defaultTest = defaultTest self.testRunner = testRunner self.testLoader = testLoader self.progName = os.path.basename(argv[0]) self.parseArgs(argv) self.runTests() def usageExit(self, msg=None): if msg: print msg print self.USAGE % self.__dict__ sys.exit(2) def parseArgs(self, argv): import getopt try: options, args = getopt.getopt(argv[1:], 'hHvq', ['help','verbose','quiet']) for opt, value in options: if opt in ('-h','-H','--help'): self.usageExit() if opt in ('-q','--quiet'): self.verbosity = 0 if opt in ('-v','--verbose'): self.verbosity = 2 if len(args) == 0 and self.defaultTest is None: self.test = self.testLoader.loadTestsFromModule(self.module) return if len(args) > 0: self.testNames = args else: self.testNames = (self.defaultTest,) self.createTests() except getopt.error, msg: self.usageExit(msg) def createTests(self): self.test = self.testLoader.loadTestsFromNames(self.testNames, self.module) def runTests(self): if self.testRunner is None: self.testRunner = TextTestRunner(verbosity=self.verbosity) result = self.testRunner.run(self.test) sys.exit(not result.wasSuccessful()) main = TestProgram ############################################################################## # Executing this module from the command line ############################################################################## if __name__ == "__main__": main(module=None)
mit
magsilva/scriptLattes
scriptLattes/internacionalizacao/depuradorDOIhtml.py
3
2090
#!/usr/bin/python # encoding: utf-8 # filename: depuradorDOIhtml.py # # scriptLattes V8 # Copyright 2005-2013: Jesús P. Mena-Chalco e Roberto M. Cesar-Jr. # http://scriptlattes.sourceforge.net/ # # # Este programa é um software livre; você pode redistribui-lo e/ou # modifica-lo dentro dos termos da Licença Pública Geral GNU como # publicada pela Fundação do Software Livre (FSF); na versão 2 da # Licença, ou (na sua opinião) qualquer versão. # # Este programa é distribuído na esperança que possa ser util, # mas SEM NENHUMA GARANTIA; sem uma garantia implicita de ADEQUAÇÂO a qualquer # MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a # Licença Pública Geral GNU para maiores detalhes. # # Você deve ter recebido uma cópia da Licença Pública Geral GNU # junto com este programa, se não, escreva para a Fundação do Software # Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # import HTMLParser import re from HTMLParser import HTMLParser from htmlentitydefs import name2codepoint class DepuradorDOIhtml(HTMLParser): dadosDaPublicacao = None def __init__(self, rawDOIhtml): HTMLParser.__init__(self) self.dadosDaPublicacao = "" self.feed(rawDOIhtml) def handle_data(self, dado): texto = dado.strip() if len(texto) > 0: texto = re.sub('[ \t\r\n]+', ' ', texto) self.dadosDaPublicacao += texto + ' ' def handle_starttag(self, tag, attributes): if tag == 'p': self.dadosDaPublicacao += '\n' if tag == 'br': self.dadosDaPublicacao += '\n' if tag == 'li': self.dadosDaPublicacao += '\n' if tag == 'div': self.dadosDaPublicacao += '\n' def handle_startendtag(self, tag, attrs): if tag == 'br': self.dadosDaPublicacao += '\n' def obterDadosDaPublicacao(self): return self.dadosDaPublicacao.strip() # ---------------------------------------------------------------------------- # def stripBlanks(s): return re.sub('\s+', ' ', s).strip() def htmlentitydecode(s): return re.sub('&(%s);' % '|'.join(name2codepoint), lambda m: unichr(name2codepoint[m.group(1)]), s)
gpl-2.0
hojel/youtube-dl
youtube_dl/extractor/stitcher.py
30
2934
from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, int_or_none, js_to_json, unescapeHTML, ) class StitcherIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?stitcher\.com/podcast/(?:[^/]+/)+e/(?:(?P<display_id>[^/#?&]+?)-)?(?P<id>\d+)(?:[/#?&]|$)' _TESTS = [{ 'url': 'http://www.stitcher.com/podcast/the-talking-machines/e/40789481?autoplay=true', 'md5': '391dd4e021e6edeb7b8e68fbf2e9e940', 'info_dict': { 'id': '40789481', 'ext': 'mp3', 'title': 'Machine Learning Mastery and Cancer Clusters', 'description': 'md5:55163197a44e915a14a1ac3a1de0f2d3', 'duration': 1604, 'thumbnail': 're:^https?://.*\.jpg', }, }, { 'url': 'http://www.stitcher.com/podcast/panoply/vulture-tv/e/the-rare-hourlong-comedy-plus-40846275?autoplay=true', 'info_dict': { 'id': '40846275', 'display_id': 'the-rare-hourlong-comedy-plus', 'ext': 'mp3', 'title': "The CW's 'Crazy Ex-Girlfriend'", 'description': 'md5:04f1e2f98eb3f5cbb094cea0f9e19b17', 'duration': 2235, 'thumbnail': 're:^https?://.*\.jpg', }, 'params': { 'skip_download': True, }, }, { # escaped title 'url': 'http://www.stitcher.com/podcast/marketplace-on-stitcher/e/40910226?autoplay=true', 'only_matching': True, }, { 'url': 'http://www.stitcher.com/podcast/panoply/getting-in/e/episode-2a-how-many-extracurriculars-should-i-have-40876278?autoplay=true', 'only_matching': True, }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) audio_id = mobj.group('id') display_id = mobj.group('display_id') or audio_id webpage = self._download_webpage(url, display_id) episode = self._parse_json( js_to_json(self._search_regex( r'(?s)var\s+stitcher\s*=\s*({.+?});\n', webpage, 'episode config')), display_id)['config']['episode'] title = unescapeHTML(episode['title']) formats = [{ 'url': episode[episode_key], 'ext': determine_ext(episode[episode_key]) or 'mp3', 'vcodec': 'none', } for episode_key in ('episodeURL',) if episode.get(episode_key)] description = self._search_regex( r'Episode Info:\s*</span>([^<]+)<', webpage, 'description', fatal=False) duration = int_or_none(episode.get('duration')) thumbnail = episode.get('episodeImage') return { 'id': audio_id, 'display_id': display_id, 'title': title, 'description': description, 'duration': duration, 'thumbnail': thumbnail, 'formats': formats, }
unlicense
firebitsbr/raspberry_pwn
src/pexpect-2.3/setup.py
11
1452
''' $Revision: 485 $ $Date: 2007-07-12 15:23:15 -0700 (Thu, 12 Jul 2007) $ ''' from distutils.core import setup setup (name='pexpect', version='2.3', py_modules=['pexpect', 'pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], description='Pexpect is a pure Python Expect. It allows easy control of other applications.', author='Noah Spurrier', author_email='noah@noah.org', url='http://pexpect.sourceforge.net/', license='MIT license', platforms='UNIX', ) # classifiers = [ # 'Development Status :: 4 - Beta', # 'Environment :: Console', # 'Environment :: Console (Text Based)', # 'Intended Audience :: Developers', # 'Intended Audience :: System Administrators', # 'Intended Audience :: Quality Engineers', # 'License :: OSI Approved :: Python Software Foundation License', # 'Operating System :: POSIX', # 'Operating System :: MacOS :: MacOS X', # 'Programming Language :: Python', # 'Topic :: Software Development', # 'Topic :: Software Development :: Libraries :: Python Modules', # 'Topic :: Software Development :: Quality Assurance', # 'Topic :: Software Development :: Testing', # 'Topic :: System, System :: Archiving :: Packaging, System :: Installation/Setup', # 'Topic :: System :: Shells', # 'Topic :: System :: Software Distribution', # 'Topic :: Terminals, Utilities', # ],
gpl-3.0
kenshay/ImageScripter
Script_Runner/PYTHON/Lib/site-packages/asn1crypto/csr.py
14
2142
# coding: utf-8 """ ASN.1 type classes for certificate signing requests (CSR). Exports the following items: - CertificatationRequest() Other type classes are defined that help compose the types listed above. """ from __future__ import unicode_literals, division, absolute_import, print_function from .algos import SignedDigestAlgorithm from .core import ( Any, Integer, ObjectIdentifier, OctetBitString, Sequence, SetOf, ) from .keys import PublicKeyInfo from .x509 import DirectoryString, Extensions, Name # The structures in this file are taken from https://tools.ietf.org/html/rfc2986 # and https://tools.ietf.org/html/rfc2985 class Version(Integer): _map = { 0: 'v1', } class CSRAttributeType(ObjectIdentifier): _map = { '1.2.840.113549.1.9.7': 'challenge_password', '1.2.840.113549.1.9.9': 'extended_certificate_attributes', '1.2.840.113549.1.9.14': 'extension_request', } class SetOfDirectoryString(SetOf): _child_spec = DirectoryString class Attribute(Sequence): _fields = [ ('type', ObjectIdentifier), ('values', SetOf, {'spec': Any}), ] class SetOfAttributes(SetOf): _child_spec = Attribute class SetOfExtensions(SetOf): _child_spec = Extensions class CRIAttribute(Sequence): _fields = [ ('type', CSRAttributeType), ('values', Any), ] _oid_pair = ('type', 'values') _oid_specs = { 'challenge_password': SetOfDirectoryString, 'extended_certificate_attributes': SetOfAttributes, 'extension_request': SetOfExtensions, } class CRIAttributes(SetOf): _child_spec = CRIAttribute class CertificationRequestInfo(Sequence): _fields = [ ('version', Version), ('subject', Name), ('subject_pk_info', PublicKeyInfo), ('attributes', CRIAttributes, {'implicit': 0, 'optional': True}), ] class CertificationRequest(Sequence): _fields = [ ('certification_request_info', CertificationRequestInfo), ('signature_algorithm', SignedDigestAlgorithm), ('signature', OctetBitString), ]
gpl-3.0
litchfield/django
tests/datatypes/tests.py
305
4210
from __future__ import unicode_literals import datetime from django.test import TestCase, skipIfDBFeature from django.utils import six from django.utils.timezone import utc from .models import Donut, RumBaba class DataTypesTestCase(TestCase): def test_boolean_type(self): d = Donut(name='Apple Fritter') self.assertFalse(d.is_frosted) self.assertIsNone(d.has_sprinkles) d.has_sprinkles = True self.assertTrue(d.has_sprinkles) d.save() d2 = Donut.objects.get(name='Apple Fritter') self.assertFalse(d2.is_frosted) self.assertTrue(d2.has_sprinkles) def test_date_type(self): d = Donut(name='Apple Fritter') d.baked_date = datetime.date(year=1938, month=6, day=4) d.baked_time = datetime.time(hour=5, minute=30) d.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59) d.save() d2 = Donut.objects.get(name='Apple Fritter') self.assertEqual(d2.baked_date, datetime.date(1938, 6, 4)) self.assertEqual(d2.baked_time, datetime.time(5, 30)) self.assertEqual(d2.consumed_at, datetime.datetime(2007, 4, 20, 16, 19, 59)) def test_time_field(self): # Test for ticket #12059: TimeField wrongly handling datetime.datetime object. d = Donut(name='Apple Fritter') d.baked_time = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59) d.save() d2 = Donut.objects.get(name='Apple Fritter') self.assertEqual(d2.baked_time, datetime.time(16, 19, 59)) def test_year_boundaries(self): """Year boundary tests (ticket #3689)""" Donut.objects.create(name='Date Test 2007', baked_date=datetime.datetime(year=2007, month=12, day=31), consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59)) Donut.objects.create(name='Date Test 2006', baked_date=datetime.datetime(year=2006, month=1, day=1), consumed_at=datetime.datetime(year=2006, month=1, day=1)) self.assertEqual("Date Test 2007", Donut.objects.filter(baked_date__year=2007)[0].name) self.assertEqual("Date Test 2006", Donut.objects.filter(baked_date__year=2006)[0].name) Donut.objects.create(name='Apple Fritter', consumed_at=datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)) self.assertEqual(['Apple Fritter', 'Date Test 2007'], list(Donut.objects.filter(consumed_at__year=2007).order_by('name').values_list('name', flat=True))) self.assertEqual(0, Donut.objects.filter(consumed_at__year=2005).count()) self.assertEqual(0, Donut.objects.filter(consumed_at__year=2008).count()) def test_textfields_unicode(self): """Regression test for #10238: TextField values returned from the database should be unicode.""" d = Donut.objects.create(name='Jelly Donut', review='Outstanding') newd = Donut.objects.get(id=d.id) self.assertIsInstance(newd.review, six.text_type) @skipIfDBFeature('supports_timezones') def test_error_on_timezone(self): """Regression test for #8354: the MySQL and Oracle backends should raise an error if given a timezone-aware datetime object.""" dt = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=utc) d = Donut(name='Bear claw', consumed_at=dt) self.assertRaises(ValueError, d.save) # ValueError: MySQL backend does not support timezone-aware datetimes. def test_datefield_auto_now_add(self): """Regression test for #10970, auto_now_add for DateField should store a Python datetime.date, not a datetime.datetime""" b = RumBaba.objects.create() # Verify we didn't break DateTimeField behavior self.assertIsInstance(b.baked_timestamp, datetime.datetime) # We need to test this way because datetime.datetime inherits # from datetime.date: self.assertIsInstance(b.baked_date, datetime.date) self.assertNotIsInstance(b.baked_date, datetime.datetime)
bsd-3-clause
vpelletier/neoppod
neo/tests/testHandler.py
1
2979
# # Copyright (C) 2009-2016 Nexedi SA # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import unittest from mock import Mock from . import NeoUnitTestBase from neo.lib.handler import EventHandler from neo.lib.protocol import PacketMalformedError, UnexpectedPacketError, \ BrokenNodeDisallowedError, NotReadyError, ProtocolError class HandlerTests(NeoUnitTestBase): def setUp(self): NeoUnitTestBase.setUp(self) app = Mock() self.handler = EventHandler(app) def setFakeMethod(self, method): self.handler.fake_method = method def getFakePacket(self): p = Mock({ 'decode': (), '__repr__': 'Fake Packet', }) p.handler_method_name = 'fake_method' return p def test_dispatch(self): conn = self.getFakeConnection() packet = self.getFakePacket() # all is ok self.setFakeMethod(lambda c: None) self.handler.dispatch(conn, packet) # raise UnexpectedPacketError conn.mockCalledMethods = {} def fake(c): raise UnexpectedPacketError('fake packet') self.setFakeMethod(fake) self.handler.dispatch(conn, packet) self.checkErrorPacket(conn) self.checkAborted(conn) # raise PacketMalformedError conn.mockCalledMethods = {} def fake(c): raise PacketMalformedError('message') self.setFakeMethod(fake) self.handler.dispatch(conn, packet) self.checkClosed(conn) # raise BrokenNodeDisallowedError conn.mockCalledMethods = {} def fake(c): raise BrokenNodeDisallowedError self.setFakeMethod(fake) self.handler.dispatch(conn, packet) self.checkErrorPacket(conn) self.checkAborted(conn) # raise NotReadyError conn.mockCalledMethods = {} def fake(c): raise NotReadyError self.setFakeMethod(fake) self.handler.dispatch(conn, packet) self.checkErrorPacket(conn) self.checkAborted(conn) # raise ProtocolError conn.mockCalledMethods = {} def fake(c): raise ProtocolError self.setFakeMethod(fake) self.handler.dispatch(conn, packet) self.checkErrorPacket(conn) self.checkAborted(conn) if __name__ == '__main__': unittest.main()
gpl-2.0
morgenst/PyAnalysisTools
tests/unit/TestUtilities.py
1
2542
import os import unittest from PyAnalysisTools.base import Utilities, InvalidInputError # from pyfakefs.fake_filesystem_unittest import TestCase cwd = os.path.dirname(__file__) class TestUtilities(unittest.TestCase): def setUp(self): pass # self.setUpPyfakefs() def tearDown(self): pass # self.tearDownPyfakefs() def test_merge_dicts(self): d1 = {'foo': 1} d2 = {'bar': 2} self.assertEqual({'foo': 1, 'bar': 2}, Utilities.merge_dictionaries(d1, d2)) def test_merge_dicts_single(self): d1 = {'foo': 1} self.assertEqual(d1, Utilities.merge_dictionaries(d1)) def test_merge_dicts_fail(self): d1 = {'foo': 1} d2 = ['bar', 2] self.assertEqual({'foo': 1}, Utilities.merge_dictionaries(d1, d2)) def test_check_required_args_found(self): self.assertIsNone(Utilities.check_required_args('arg', arg=1)) def test_check_required_args_missing(self): self.assertEqual('arg', Utilities.check_required_args('arg', foo=1)) @unittest.skip("Requires fake fs") def test_cleaner_check_lifetime(self): self.fs.create_file('/foo/bar.txt') self.assertTrue(Utilities.Cleaner.check_lifetime(100, 'foo', ['bar.txt'])) def test_flatten_single_element(self): self.assertEqual(['foo/bar/1'], Utilities.flatten({'foo': {'bar': ["1"]}})) def test_flatten_more_elements(self): self.assertEqual(['foo/bar/1', 'foo/bar/2'], Utilities.flatten({'foo': {'bar': ["1", "2"]}})) @unittest.skip("Requires fake fs") def test_cleaner_default_ctor(self): cleaner = Utilities.Cleaner(base_path='foo') self.assertTrue(cleaner.safe) self.assertEqual('/foo', cleaner.base_path) self.assertEqual([".git", ".keep", ".svn", "InstallArea", "RootCoreBin", "WorkArea"], cleaner.keep_pattern) self.assertEqual([], cleaner.deletion_list) self.assertEqual(14., cleaner.touch_threshold_days) self.assertEqual(None, cleaner.trash_path) @unittest.skip("Requires fake fs") def test_cleaner_default_ctor_trash(self): cleaner = Utilities.Cleaner(base_path='foo', trash_path='bar') self.assertEqual('bar', cleaner.trash_path) def test_cleaner_default_ctor_missing_arg(self): self.assertRaises(InvalidInputError, Utilities.Cleaner) def test_cleaner_default_setup_trash(self): cleaner = Utilities.Cleaner(base_path='foo', safe=False) self.assertIsNone(cleaner.setup_temporary_trash())
mit
dledford/linux
scripts/gdb/linux/lists.py
509
3631
# # gdb helper commands and functions for Linux kernel debugging # # list tools # # Copyright (c) Thiebaud Weksteen, 2015 # # Authors: # Thiebaud Weksteen <thiebaud@weksteen.fr> # # This work is licensed under the terms of the GNU GPL version 2. # import gdb from linux import utils list_head = utils.CachedType("struct list_head") def list_for_each(head): if head.type == list_head.get_type().pointer(): head = head.dereference() elif head.type != list_head.get_type(): raise gdb.GdbError("Must be struct list_head not {}" .format(head.type)) node = head['next'].dereference() while node.address != head.address: yield node.address node = node['next'].dereference() def list_for_each_entry(head, gdbtype, member): for node in list_for_each(head): if node.type != list_head.get_type().pointer(): raise TypeError("Type {} found. Expected struct list_head *." .format(node.type)) yield utils.container_of(node, gdbtype, member) def list_check(head): nb = 0 if (head.type == list_head.get_type().pointer()): head = head.dereference() elif (head.type != list_head.get_type()): raise gdb.GdbError('argument must be of type (struct list_head [*])') c = head try: gdb.write("Starting with: {}\n".format(c)) except gdb.MemoryError: gdb.write('head is not accessible\n') return while True: p = c['prev'].dereference() n = c['next'].dereference() try: if p['next'] != c.address: gdb.write('prev.next != current: ' 'current@{current_addr}={current} ' 'prev@{p_addr}={p}\n'.format( current_addr=c.address, current=c, p_addr=p.address, p=p, )) return except gdb.MemoryError: gdb.write('prev is not accessible: ' 'current@{current_addr}={current}\n'.format( current_addr=c.address, current=c )) return try: if n['prev'] != c.address: gdb.write('next.prev != current: ' 'current@{current_addr}={current} ' 'next@{n_addr}={n}\n'.format( current_addr=c.address, current=c, n_addr=n.address, n=n, )) return except gdb.MemoryError: gdb.write('next is not accessible: ' 'current@{current_addr}={current}\n'.format( current_addr=c.address, current=c )) return c = n nb += 1 if c == head: gdb.write("list is consistent: {} node(s)\n".format(nb)) return class LxListChk(gdb.Command): """Verify a list consistency""" def __init__(self): super(LxListChk, self).__init__("lx-list-check", gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, arg, from_tty): argv = gdb.string_to_argv(arg) if len(argv) != 1: raise gdb.GdbError("lx-list-check takes one argument") list_check(gdb.parse_and_eval(argv[0])) LxListChk()
gpl-2.0
semonte/intellij-community
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py
315
1904
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for apply(). This converts apply(func, v, k) into (func)(*v, **k).""" # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Call, Comma, parenthesize class FixApply(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< 'apply' trailer< '(' arglist< (not argument<NAME '=' any>) func=any ',' (not argument<NAME '=' any>) args=any [',' (not argument<NAME '=' any>) kwds=any] [','] > ')' > > """ def transform(self, node, results): syms = self.syms assert results func = results["func"] args = results["args"] kwds = results.get("kwds") prefix = node.prefix func = func.clone() if (func.type not in (token.NAME, syms.atom) and (func.type != syms.power or func.children[-2].type == token.DOUBLESTAR)): # Need to parenthesize func = parenthesize(func) func.prefix = "" args = args.clone() args.prefix = "" if kwds is not None: kwds = kwds.clone() kwds.prefix = "" l_newargs = [pytree.Leaf(token.STAR, u"*"), args] if kwds is not None: l_newargs.extend([Comma(), pytree.Leaf(token.DOUBLESTAR, u"**"), kwds]) l_newargs[-2].prefix = u" " # that's the ** token # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t) # can be translated into f(x, y, *t) instead of f(*(x, y) + t) #new = pytree.Node(syms.power, (func, ArgList(l_newargs))) return Call(func, l_newargs, prefix=prefix)
apache-2.0
mmaruska/pybluez
setup.py
3
4200
#!/usr/bin/env python from setuptools import setup, Extension import sys import platform import os mods = list() def find_MS_SDK(): candidate_roots = (os.getenv('ProgramFiles'), os.getenv('ProgramW6432'), os.getenv('ProgramFiles(x86)')) if sys.version < '3.3': MS_SDK = r'Microsoft SDKs\Windows\v6.0A' # Visual Studio 9 else: MS_SDK = r'Microsoft SDKs\Windows\v7.0A' # Visual Studio 10 candidate_paths = (MS_SDK, 'Microsoft Platform SDK for Windows XP', 'Microsoft Platform SDK') for candidate_root in candidate_roots: for candidate_path in candidate_paths: candidate_sdk = os.path.join(candidate_root, candidate_path) if os.path.exists(candidate_sdk): return candidate_sdk if sys.platform == 'win32': PSDK_PATH = find_MS_SDK() if PSDK_PATH is None: raise SystemExit("Could not find the Windows Platform SDK") lib_path = os.path.join(PSDK_PATH, 'Lib') if '64' in platform.architecture()[0]: lib_path = os.path.join(lib_path, 'x64') mods.append(Extension('bluetooth._msbt', include_dirs=["%s\\Include" % PSDK_PATH, ".\\port3"], library_dirs=[lib_path], libraries=["WS2_32", "Irprops"], sources=['msbt\\_msbt.c'])) # widcomm WC_BASE = os.path.join(os.getenv('ProgramFiles'), r"Widcomm\BTW DK\SDK") if os.path.exists(WC_BASE): mods.append(Extension('bluetooth._widcomm', include_dirs=["%s\\Inc" % WC_BASE, ".\\port3"], define_macros=[('_BTWLIB', None)], library_dirs=["%s\\Release" % WC_BASE, "%s\\Lib" % PSDK_PATH], libraries=["WidcommSdklib", "ws2_32", "version", "user32", "Advapi32", "Winspool", "ole32", "oleaut32"], sources=["widcomm\\_widcomm.cpp", "widcomm\\inquirer.cpp", "widcomm\\rfcommport.cpp", "widcomm\\rfcommif.cpp", "widcomm\\l2capconn.cpp", "widcomm\\l2capif.cpp", "widcomm\\sdpservice.cpp", "widcomm\\util.cpp"])) elif sys.platform.startswith('linux'): mods.append(Extension('bluetooth._bluetooth', include_dirs=["./port3"], libraries=['bluetooth'], #extra_compile_args=['-O0'], sources=['bluez/btmodule.c', 'bluez/btsdp.c'])) elif sys.platform == 'darwin': mods.append(Extension('bluetooth._osxbt', include_dirs=["/System/Library/Frameworks/IOBluetooth.framework/Headers", "/System/Library/Frameworks/CoreFoundation.framework/Headers"], extra_link_args=['-framework IOBluetooth -framework CoreFoundation'], sources=['osx/_osxbt.c'])) else: raise Exception("This platform (%s) is currently not supported by pybluez." % sys.platform) setup(name='PyBluez', version='0.22', description='Bluetooth Python extension module', author="Albert Huang", author_email="ashuang@alum.mit.edu", url="http://karulis.github.io/pybluez/", ext_modules=mods, packages=["bluetooth"], # for the python cheese shop classifiers=['Development Status :: 4 - Beta', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Communications'], download_url='https://github.com/karulis/pybluez', long_description='Bluetooth Python extension module to allow Python "\ "developers to use system Bluetooth resources. PyBluez works "\ "with GNU/Linux and Windows XP.', maintainer='Piotr Karulis', license='GPL', extras_require={'ble': ['gattlib==0.20150805']})
gpl-2.0
jetskijoe/SickGear
lib/subliminal/tasks.py
170
2328
# -*- coding: utf-8 -*- # Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com> # # This file is part of subliminal. # # subliminal is free software; you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # subliminal is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with subliminal. If not, see <http://www.gnu.org/licenses/>. __all__ = ['Task', 'ListTask', 'DownloadTask', 'StopTask'] class Task(object): """Base class for tasks to use in subliminal""" pass class ListTask(Task): """List task used by the worker to search for subtitles :param video: video to search subtitles for :type video: :class:`~subliminal.videos.Video` :param list languages: languages to search for :param string service: name of the service to use :param config: configuration for the service :type config: :class:`~subliminal.services.ServiceConfig` """ def __init__(self, video, languages, service, config): super(ListTask, self).__init__() self.video = video self.service = service self.languages = languages self.config = config def __repr__(self): return 'ListTask(%r, %r, %s, %r)' % (self.video, self.languages, self.service, self.config) class DownloadTask(Task): """Download task used by the worker to download subtitles :param video: video to download subtitles for :type video: :class:`~subliminal.videos.Video` :param subtitles: subtitles to download in order of preference :type subtitles: list of :class:`~subliminal.subtitles.Subtitle` """ def __init__(self, video, subtitles): super(DownloadTask, self).__init__() self.video = video self.subtitles = subtitles def __repr__(self): return 'DownloadTask(%r, %r)' % (self.video, self.subtitles) class StopTask(Task): """Stop task that will stop the worker""" pass
gpl-3.0
srajag/nova
nova/tests/api/openstack/compute/contrib/test_extended_volumes.py
12
4779
# Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from lxml import etree import webob from nova.api.openstack.compute.contrib import extended_volumes from nova import compute from nova import db from nova import objects from nova.objects import instance as instance_obj from nova.openstack.common import jsonutils from nova import test from nova.tests.api.openstack import fakes from nova.tests import fake_block_device from nova.tests import fake_instance UUID1 = '00000000-0000-0000-0000-000000000001' UUID2 = '00000000-0000-0000-0000-000000000002' UUID3 = '00000000-0000-0000-0000-000000000003' def fake_compute_get(*args, **kwargs): inst = fakes.stub_instance(1, uuid=UUID1) return fake_instance.fake_instance_obj(args[1], **inst) def fake_compute_get_all(*args, **kwargs): db_list = [fakes.stub_instance(1), fakes.stub_instance(2)] fields = instance_obj.INSTANCE_DEFAULT_FIELDS return instance_obj._make_instance_list(args[1], objects.InstanceList(), db_list, fields) def fake_bdms_get_all_by_instance(*args, **kwargs): return [fake_block_device.FakeDbBlockDeviceDict( {'volume_id': UUID1, 'source_type': 'volume', 'destination_type': 'volume', 'id': 1}), fake_block_device.FakeDbBlockDeviceDict( {'volume_id': UUID2, 'source_type': 'volume', 'destination_type': 'volume', 'id': 2})] class ExtendedVolumesTest(test.TestCase): content_type = 'application/json' prefix = 'os-extended-volumes:' def setUp(self): super(ExtendedVolumesTest, self).setUp() fakes.stub_out_nw_api(self.stubs) self.stubs.Set(compute.api.API, 'get', fake_compute_get) self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all) self.stubs.Set(db, 'block_device_mapping_get_all_by_instance', fake_bdms_get_all_by_instance) self.flags( osapi_compute_extension=[ 'nova.api.openstack.compute.contrib.select_extensions'], osapi_compute_ext_list=['Extended_volumes']) return_server = fakes.fake_instance_get() self.stubs.Set(db, 'instance_get_by_uuid', return_server) def _make_request(self, url): req = webob.Request.blank(url) req.headers['Accept'] = self.content_type res = req.get_response(fakes.wsgi_app(init_only=('servers',))) return res def _get_server(self, body): return jsonutils.loads(body).get('server') def _get_servers(self, body): return jsonutils.loads(body).get('servers') def test_show(self): url = '/v2/fake/servers/%s' % UUID1 res = self._make_request(url) self.assertEqual(res.status_int, 200) server = self._get_server(res.body) exp_volumes = [{'id': UUID1}, {'id': UUID2}] if self.content_type == 'application/json': actual = server.get('%svolumes_attached' % self.prefix) elif self.content_type == 'application/xml': actual = [dict(elem.items()) for elem in server.findall('%svolume_attached' % self.prefix)] self.assertEqual(exp_volumes, actual) def test_detail(self): url = '/v2/fake/servers/detail' res = self._make_request(url) self.assertEqual(res.status_int, 200) exp_volumes = [{'id': UUID1}, {'id': UUID2}] for i, server in enumerate(self._get_servers(res.body)): if self.content_type == 'application/json': actual = server.get('%svolumes_attached' % self.prefix) elif self.content_type == 'application/xml': actual = [dict(elem.items()) for elem in server.findall('%svolume_attached' % self.prefix)] self.assertEqual(exp_volumes, actual) class ExtendedVolumesXmlTest(ExtendedVolumesTest): content_type = 'application/xml' prefix = '{%s}' % extended_volumes.Extended_volumes.namespace def _get_server(self, body): return etree.XML(body) def _get_servers(self, body): return etree.XML(body).getchildren()
apache-2.0
nexiles/odoo
addons/product_email_template/models/invoice.py
321
1969
# -*- coding: utf-8 -*- from openerp.osv import osv class account_invoice(osv.Model): _inherit = 'account.invoice' def invoice_validate_send_email(self, cr, uid, ids, context=None): Composer = self.pool['mail.compose.message'] for invoice in self.browse(cr, uid, ids, context=context): # send template only on customer invoice if invoice.type != 'out_invoice': continue # subscribe the partner to the invoice if invoice.partner_id not in invoice.message_follower_ids: self.message_subscribe(cr, uid, [invoice.id], [invoice.partner_id.id], context=context) for line in invoice.invoice_line: if line.product_id.email_template_id: # CLEANME: should define and use a clean API: message_post with a template composer_id = Composer.create(cr, uid, { 'model': 'account.invoice', 'res_id': invoice.id, 'template_id': line.product_id.email_template_id.id, 'composition_mode': 'comment', }, context=context) template_values = Composer.onchange_template_id( cr, uid, composer_id, line.product_id.email_template_id.id, 'comment', 'account.invoice', invoice.id )['value'] template_values['attachment_ids'] = [(4, id) for id in template_values.get('attachment_ids', [])] Composer.write(cr, uid, [composer_id], template_values, context=context) Composer.send_mail(cr, uid, [composer_id], context=context) return True def invoice_validate(self, cr, uid, ids, context=None): res = super(account_invoice, self).invoice_validate(cr, uid, ids, context=context) self.invoice_validate_send_email(cr, uid, ids, context=context) return res
agpl-3.0
mavenlin/tensorflow
tensorflow/contrib/keras/api/keras/wrappers/scikit_learn/__init__.py
57
1066
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Keras scikit-learn API wrapper.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.keras.python.keras.wrappers.scikit_learn import KerasClassifier from tensorflow.contrib.keras.python.keras.wrappers.scikit_learn import KerasRegressor del absolute_import del division del print_function
apache-2.0
andreparames/odoo
addons/account/wizard/account_report_common_partner.py
385
1999
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_common_partner_report(osv.osv_memory): _name = 'account.common.partner.report' _description = 'Account Common Partner Report' _inherit = "account.common.report" _columns = { 'result_selection': fields.selection([('customer','Receivable Accounts'), ('supplier','Payable Accounts'), ('customer_supplier','Receivable and Payable Accounts')], "Partner's", required=True), } _defaults = { 'result_selection': 'customer', } def pre_print_report(self, cr, uid, ids, data, context=None): if context is None: context = {} data['form'].update(self.read(cr, uid, ids, ['result_selection'], context=context)[0]) return data #vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
gam-phon/taiga-back
taiga/feedback/serializers.py
3
1088
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz> # Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com> # Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com> # Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from taiga.base.api import serializers from . import models class FeedbackEntrySerializer(serializers.ModelSerializer): class Meta: model = models.FeedbackEntry
agpl-3.0
bow/bioconda-recipes
recipes/phylip/phylip.py
48
8900
#!/usr/bin/env python # # Wrapper script for phylip program when installed from # bioconda. Adapted from shell scripts provided in the biolbuilds # conda recipe by Cheng H. Lee. import sys import os import subprocess def main(): print("running main") print(sys.argv) bindir = get_script_path(sys.argv[0]) sharedir= get_script_path(bindir+"/dnapars") print(sharedir) if len(sys.argv) == 1: print("Usage: {prog} <program>".format(prog=sys.argv[0])) print("Existing programs are: {progs}".format(progs=os.listdir(sharedir))) sys.exit(1) progname = sys.argv[1] program = bindir+"/"+progname if progname == "test": # hidden test of conda phylip installation test(bindir) elif(os.path.isfile(program)): subprocess.check_call(program) else: print("{prog} does not exist in Phylip".format(prog=progname)) usage() sys.exit(1) def usage(): print("Usage: {prog} <program>".format(prog=sys.argv[0])) print("Existing programs are: {progs}".format(progs=os.listdir(bindir))) def get_script_path(script): return os.path.dirname(os.path.realpath(script)) # Main function for testing the conda installation of phylip # This simply tests that phylip can process infiles without without error code def test(bindir): params = "0\ny\n" out = open("infile", "wt") out.write(infiles["testdna"]) out.close() for prog in ["dnapars","dnaml","dnadist","dnapenny","dnacomp","dnamlk"]: #,"dnainvar" testprog(prog, bindir,params) out = open("infile", "wt") out.write(infiles["testprot"]) out.close() for prog in ["protpars","protdist","proml","promlk"]: testprog(prog, bindir, params) out = open("infile", "wt") out.write(infiles["testdisc"]) out.close() for prog in ["pars","penny","dollop","dolpenny","clique","mix"]: testprog(prog, bindir,params) out = open("infile", "wt") out.write(infiles["testrest"]) out.close() for prog in ["restml","restdist"]: testprog(prog, bindir, params) out = open("infile", "wt") out.write(infiles["testdist"]) out.close() for prog in ["fitch","kitsch","neighbor"]: testprog(prog, bindir,params) out = open("intree", "wt") out.write(infiles["testtree"]) out.close() for prog in ["drawtree", "drawgram"]: params = "0\nl\nm\ny\n" testprog(prog, bindir,params) # testing the java gui versions require user interaction # Not good for automatic istallations -- comment out for now, # but keep for debug? ''' for prog in ["drawtree_gui", "drawgram_gui"]: print("testing " + prog) program = bindir+"/"+prog outfile = open(prog+".out",'wt') try: subprocess.run(program, universal_newlines=True,input=params,stdout=outfile, stderr=subprocess.PIPE, check=True) except subprocess.CalledProcessError as e: print(e) subprocess.call(["cat", prog+".out"], shell=True) raise print("passed; cleaning up") subprocess.call(["rm", "-f", "infile","plotfile.ps"])''' # Help function for testing the conda installation of phylip def testprog(prog, bindir, params): print("testing " + prog + "...",) program = bindir+"/"+prog outfile = open(prog+".out",'wt') try: process = subprocess.Popen(program, stdin=subprocess.PIPE, stdout=outfile, stderr=subprocess.STDOUT, universal_newlines=True) process.communicate(input=params) except subprocess.CalledProcessError as e: print(e) subprocess.call(["cat", prog+".out"]) raise print("passed; cleaning up") subprocess.call(["rm", "-f", "outtree", "outfile", "plotfile"]) # Content of test files for testing the conda installation of phylip infiles = { "testdna" : """ 7 232 Bovine CCAAACCTGT CCCCACCATC TAACACCAAC CCACATATAC AAGCTAAACC AAAAATACCA Mouse CCAAAAAAAC ATCCAAACAC CAACCCCAGC CCTTACGCAA TAGCCATACA AAGAATATTA Gibbon CTATACCCAC CCAACTCGAC CTACACCAAT CCCCACATAG CACACAGACC AACAACCTCC Orang CCCCACCCGT CTACACCAGC CAACACCAAC CCCCACCTAC TATACCAACC AATAACCTCT Gorilla CCCCATTTAT CCATAAAAAC CAACACCAAC CCCCATCTAA CACACAAACT AATGACCCCC Chimp CCCCATCCAC CCATACAAAC CAACATTACC CTCCATCCAA TATACAAACT AACAACCTCC Human CCCCACTCAC CCATACAAAC CAACACCACT CTCCACCTAA TATACAAATT AATAACCTCC TACTACTAAA AACTCAAATT AACTCTTTAA TCTTTATACA ACATTCCACC AACCTATCCA TACAACCATA AATAAGACTA ATCTATTAAA ATAACCCATT ACGATACAAA ATCCCTTTCG CACCTTCCAT ACCAAGCCCC GACTTTACCG CCAACGCACC TCATCAAAAC ATACCTACAA CAACCCCTAA ACCAAACACT ATCCCCAAAA CCAACACACT CTACCAAAAT ACACCCCCAA CACCCTCAAA GCCAAACACC AACCCTATAA TCAATACGCC TTATCAAAAC ACACCCCCAA CACTCTTCAG ACCGAACACC AATCTCACAA CCAACACGCC CCGTCAAAAC ACCCCTTCAG CACCTTCAGA ACTGAACGCC AATCTCATAA CCAACACACC CCATCAAAGC ACCCCTCCAA CACAAAAAAA CTCATATTTA TCTAAATACG AACTTCACAC AACCTTAACA CATAAACATA TCTAGATACA AACCACAACA CACAATTAAT ACACACCACA ATTACAATAC TAAACTCCCA CACAAACAAA TGCCCCCCCA CCCTCCTTCT TCAAGCCCAC TAGACCATCC TACCTTCCTA TTCACATCCG CACACCCCCA CCCCCCCTGC CCACGTCCAT CCCATCACCC TCTCCTCCCA CATAAACCCA CGCACCCCCA CCCCTTCCGC CCATGCTCAC CACATCATCT CTCCCCTTCA CACAAATTCA TACACCCCTA CCTTTCCTAC CCACGTTCAC CACATCATCC CCCCCTCTCA CACAAACCCG CACACCTCCA CCCCCCTCGT CTACGCTTAC CACGTCATCC CTCCCTCTCA CCCCAGCCCA ACACCCTTCC ACAAATCCTT AATATACGCA CCATAAATAA CA TCCCACCAAA TCACCCTCCA TCAAATCCAC AAATTACACA ACCATTAACC CA GCACGCCAAG CTCTCTACCA TCAAACGCAC AACTTACACA TACAGAACCA CA ACACCCTAAG CCACCTTCCT CAAAATCCAA AACCCACACA ACCGAAACAA CA ACACCTCAAT CCACCTCCCC CCAAATACAC AATTCACACA AACAATACCA CA ACATCTTGAC TCGCCTCTCT CCAAACACAC AATTCACGCA AACAACGCCA CA ACACCTTAAC TCACCTTCTC CCAAACGCAC AATTCGCACA CACAACGCCA CA """, "testprot" : """ 3 474 CAM ---TTETIQS NANLAPLPPH VPEHLVFDFD MYNPSN--LS AGVQEAWAVL TERP ----MDARAT IPEHIARTVI LPQGYADDEV IYPAFK--WL RDEQPLAMAH BM3 TIKEMPQPKT FGELKNLPLL NTDKPVQALM KIADELGEIF KFEAPGRVTR QESNVPDLVW TRCNGG---H WIATRGQLIR EAY-EDYRHF SSECPFIPRE IEGYDPMWIA TKHADV---M QIGKQPGLFS NAEGSEILYD QNNEAFMRSI YLS-SQRLIK EACDESRFDK NLSQALKFVR DFAGDGLFTS WTHEKNWKKA AGEAYDFIP- -TSMDPPEQR QFRALANQVV GMPVVDKLEN RIQELACSLI SGGCPHVIDS LTSMDPPTHT AYRGLTLNWF QPASIRKLEE NIRRIAQASV HNILLPSFS- -QQAMKGYHA MMVDIAVQLV QKWERLNADE HIEVPEDMTR ESLR-PQGQC NFTEDYAEPF PIRIFMLLAG LPEEDIPHLK YLTDQMT--- QRLLDFDGEC DFMTDCALYY PLHVVMTALG VPEDDEPLML KLTQDFFGVH LTLD-TIGLC GFNYRFNSFY RDQPHPFITS MVRALDEAMN KLQRANP--D RPD------- ------GSMT FAEAKEALYD YLIPIIEQRR QKP--GTDAI EPDEQAVAAP RQSADEAARR FHETIATFYD YFNGFTVDRR SCP--KDDVM DPAYD----- -----ENKRQ FQEDIKVMND LVDKIIADRK ASGEQSDDLL SIVANGQVN- -GRPITSDEA KRMCGLLLVG GLDTVVNFLS FSMEFLAKSP SLLANSKLD- -GNYIDDKYI NAYYVAIATA GHDTTSSSSG GAIIGLSRNP THMLNGKDPE TGEPLDDENI RYQIITFLIA GHETTSGLLS FALYFLVKNP EHRQELIERP E--------- --------RI PAACEELLRR FS-LVADGRI EQLALAKSDP A--------- --------LI PRLVDEAVRW TAPVKSFMRT HVLQKAAEEA ARVLVDPVPS YKQVKQLKYV GMVLNEALRL WPTAPAFSLY LTSDYEFHGV Q-LKKGDQIL LPQMLSGLDE REN-ACPMHV DFSRQK---- ALADTEVRGQ N-IKRGDRIM LSYPSANRDE EVF-SNPDEF DITRFP---- AKEDTVLGGE YPLEKGDELM VLIPQLHRDK TIWGDDVEEF RPERFENPSA ---VSHTTFG HGSHLCLGQH LARREIIVTL KEWLTRIPDF SIAPGAQIQH ---NRHLGFG WGAHMCLGQH LAKLEMKIFF EELLPKLKSV ELS-GPPRLV IPQHAFKPFG NGQRACIGQQ FALHEATLVL GMMLKHFDFE DHT-NYELDI KSGIVSGVQA LPLVWDPATT KAV- ATNFVGGPKN VPIRFTKA-- ---- KETLTLKPEG FVVKAKSKKI PLGG """, "testdisc" : """ 3 10 CAM 0000000000 TERP 0000011111 BM3 0001111111 """, "testrest" : """ 5 13 2 Alpha ++-+-++--+++- Beta ++++--+--+++- Gamma -+--+-++-+-++ Delta ++-+----++--- Epsilon ++++----++--- """, "testdist" : """ 7 Bovine 0.0000 1.2385 1.3472 1.2070 1.0857 1.2832 1.2402 Mouse 1.2385 0.0000 1.1231 1.0966 1.1470 1.2157 1.1530 Gibbon 1.3472 1.1231 0.0000 0.5924 0.5077 0.5466 0.5001 Orang 1.2070 1.0966 0.5924 0.0000 0.3857 0.4405 0.4092 Gorilla 1.0857 1.1470 0.5077 0.3857 0.0000 0.3170 0.2817 Chimp 1.2832 1.2157 0.5466 0.4405 0.3170 0.0000 0.2570 Human 1.2402 1.1530 0.5001 0.4092 0.2817 0.2570 0.0000 """, "testtree" : "((BM3,TERP),CAM);" } if __name__ == "__main__": print("Starting main") main() else: print("fuck")
mit
zedoude/wfrog
wfcommon/generic/include.py
5
2511
## Copyright 2009 Laurent Bovet <laurent.bovet@windmaster.ch> ## Jordi Puigsegur <jordi.puigsegur@gmail.com> ## ## This file is part of wfrog ## ## wfrog is free software: you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation, either version 3 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program. If not, see <http://www.gnu.org/licenses/>. import yaml import logging import wrapper import copy from os import path from Cheetah.Template import Template class IncludeElement(wrapper.ElementWrapper): """ Includes another yaml configuration file. The included file must define only one root element. [ Properties ] path [string]: A path to the file to include. Relative to the main config file. """ path = None target = None variables = None abs_path = None logger = logging.getLogger("generic.include") def _init(self, context=None): if not self.target: if context: config_file = context['_yaml_config_file'] else: raise Exception('Context not passed to !include element') dir_name = path.dirname(config_file) self.abs_path=path.join(dir_name, self.path) if not self.variables: self.variables={} if context: self.variables['settings']=context conf_str = str(Template(file=file(self.abs_path, "r"), searchList=[self.variables])) config = yaml.load(conf_str) self.target = config.values()[0] return self.target def _call(self, attr, *args, **keywords): if keywords.has_key('context'): self._init(keywords['context']) context = copy.copy(keywords['context']) context['_yaml_config_file'] = self.abs_path keywords['context'] = context else: self._init() self.logger.debug('Calling '+attr+' on ' + str(self.target)) return self.target.__getattribute__(attr).__call__(*args, **keywords)
gpl-3.0
402231466/cda-0512
static/Brython3.1.1-20150328-091302/Lib/browser/local_storage.py
617
2786
# local storage in browser import sys from javascript import JSObject class __UnProvided(): pass class LocalStorage(): storage_type = "local_storage" def __init__(self): if not sys.has_local_storage: raise EnvironmentError("LocalStorage not available") self.store = JSObject(__BRYTHON__.local_storage) def __delitem__(self, key): if (not isinstance(key, str)): raise TypeError("key must be string") if key not in self: raise KeyError(key) self.store.removeItem(key) def __getitem__(self, key): if (not isinstance(key, str)): raise TypeError("key must be string") res = __BRYTHON__.JSObject(self.store.getItem(key)) if res: return res raise KeyError(key) def __setitem__(self, key, value): if (not isinstance(key, str)): raise TypeError("key must be string") if (not isinstance(value, str)): raise TypeError("value must be string") self.store.setItem(key, value) # implement "in" functionality def __contains__(self, key): if (not isinstance(key, str)): raise TypeError("key must be string") res = __BRYTHON__.JSObject(self.store.getItem(key)) if res is None: return False return True def __iter__(self): keys = self.keys() return keys.__iter__() def get(self, key, default=None): if (not isinstance(key, str)): raise TypeError("key must be string") return __BRYTHON__.JSObject(self.store.getItem(key)) or default def pop(self, key, default=__UnProvided()): if (not isinstance(key, str)): raise TypeError("key must be string") if type(default) is __UnProvided: ret = self.get(key) del self[key] # will throw key error if doesn't exist return ret else: if key in self: ret = self.get(key) del self[key] return ret else: return default # while a real dict provides a view, returning a generator would less helpful than simply returning a list # and creating a custom iterator is overkill and would likely result in slower performance def keys(self): return [__BRYTHON__.JSObject(self.store.key(i)) for i in range(self.store.length)] def values(self): return [__BRYTHON__.JSObject(self.__getitem__(k)) for k in self.keys()] def items(self): return list(zip(self.keys(), self.values())) def clear(self): self.store.clear() def __len__(self): return self.store.length if sys.has_local_storage: storage = LocalStorage()
agpl-3.0
jeshaitan/Scicademy
node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
1558
4945
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import re import os def XmlToString(content, encoding='utf-8', pretty=False): """ Writes the XML content to disk, touching the file only if it has changed. Visual Studio files have a lot of pre-defined structures. This function makes it easy to represent these structures as Python data structures, instead of having to create a lot of function calls. Each XML element of the content is represented as a list composed of: 1. The name of the element, a string, 2. The attributes of the element, a dictionary (optional), and 3+. The content of the element, if any. Strings are simple text nodes and lists are child elements. Example 1: <test/> becomes ['test'] Example 2: <myelement a='value1' b='value2'> <childtype>This is</childtype> <childtype>it!</childtype> </myelement> becomes ['myelement', {'a':'value1', 'b':'value2'}, ['childtype', 'This is'], ['childtype', 'it!'], ] Args: content: The structured content to be converted. encoding: The encoding to report on the first XML line. pretty: True if we want pretty printing with indents and new lines. Returns: The XML content as a string. """ # We create a huge list of all the elements of the file. xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding] if pretty: xml_parts.append('\n') _ConstructContentList(xml_parts, content, pretty) # Convert it to a string return ''.join(xml_parts) def _ConstructContentList(xml_parts, specification, pretty, level=0): """ Appends the XML parts corresponding to the specification. Args: xml_parts: A list of XML parts to be appended to. specification: The specification of the element. See EasyXml docs. pretty: True if we want pretty printing with indents and new lines. level: Indentation level. """ # The first item in a specification is the name of the element. if pretty: indentation = ' ' * level new_line = '\n' else: indentation = '' new_line = '' name = specification[0] if not isinstance(name, str): raise Exception('The first item of an EasyXml specification should be ' 'a string. Specification was ' + str(specification)) xml_parts.append(indentation + '<' + name) # Optionally in second position is a dictionary of the attributes. rest = specification[1:] if rest and isinstance(rest[0], dict): for at, val in sorted(rest[0].iteritems()): xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) rest = rest[1:] if rest: xml_parts.append('>') all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) multi_line = not all_strings if multi_line and new_line: xml_parts.append(new_line) for child_spec in rest: # If it's a string, append a text node. # Otherwise recurse over that child definition if isinstance(child_spec, str): xml_parts.append(_XmlEscape(child_spec)) else: _ConstructContentList(xml_parts, child_spec, pretty, level + 1) if multi_line and indentation: xml_parts.append(indentation) xml_parts.append('</%s>%s' % (name, new_line)) else: xml_parts.append('/>%s' % new_line) def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, win32=False): """ Writes the XML content to disk, touching the file only if it has changed. Args: content: The structured content to be written. path: Location of the file. encoding: The encoding to report on the first line of the XML file. pretty: True if we want pretty printing with indents and new lines. """ xml_string = XmlToString(content, encoding, pretty) if win32 and os.linesep != '\r\n': xml_string = xml_string.replace('\n', '\r\n') try: xml_string = xml_string.encode(encoding) except Exception: xml_string = unicode(xml_string, 'latin-1').encode(encoding) # Get the old content try: f = open(path, 'r') existing = f.read() f.close() except: existing = None # It has changed, write it if existing != xml_string: f = open(path, 'w') f.write(xml_string) f.close() _xml_escape_map = { '"': '&quot;', "'": '&apos;', '<': '&lt;', '>': '&gt;', '&': '&amp;', '\n': '&#xA;', '\r': '&#xD;', } _xml_escape_re = re.compile( "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) def _XmlEscape(value, attr=False): """ Escape a string for inclusion in XML.""" def replace(match): m = match.string[match.start() : match.end()] # don't replace single quotes in attrs if attr and m == "'": return m return _xml_escape_map[m] return _xml_escape_re.sub(replace, value)
gpl-3.0
sanctuaryaddon/sanctuary
script.module.liveresolver/lib/liveresolver/resolvers/youtube.py
2
2372
# -*- coding: utf-8 -*- ''' Genesis Add-on Copyright (C) 2015 lambda This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re,urllib from liveresolver.modules import client def resolve(url): try: id = url.split("?v=")[-1].split("/")[-1].split("?")[0].split("&")[0] result = client.request('http://www.youtube.com/watch?v=%s' % id) message = client.parseDOM(result, 'div', attrs = {'id': 'unavailable-submessage'}) message = ''.join(message) alert = client.parseDOM(result, 'div', attrs = {'id': 'watch7-notification-area'}) if re.search('LIVE_WATCHING_NOW', result): url = live(result, id) if not url == None: return url if len(alert) > 0: raise Exception() if re.search('[a-zA-Z]', message): raise Exception() url = 'plugin://plugin.video.youtube/play/?video_id=%s' % id return url except: return def live(result, id): try: hls = re.compile('"hlsvp" *: *"(.+?)"').findall(result) if len(hls) == 0: url = 'https://www.youtube.com/watch?v=%s' % id url = 'http://translate.googleusercontent.com/translate_c?anno=2&hl=en&sl=mt&tl=en&u=%s' % url hls = client.request(url) hls = re.compile('"hlsvp" *: *"(.+?)"').findall(hls) url = urllib.unquote(hls[0]).replace('\\/', '/') result = client.request(url) result = result.replace('\n','') url = re.compile('RESOLUTION *= *(\d*)x\d{1}.+?(http.+?\.m3u8)').findall(result) url = [(int(i[0]), i[1]) for i in url] url.sort() url = url[-1][1] return url except: return
gpl-2.0
berrange/nova
nova/objects/virtual_interface.py
18
3724
# Copyright (C) 2014, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova import db from nova import exception from nova import objects from nova.objects import base from nova.objects import fields class VirtualInterface(base.NovaPersistentObject, base.NovaObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'address': fields.StringField(nullable=True), 'network_id': fields.IntegerField(), 'instance_uuid': fields.UUIDField(), 'uuid': fields.UUIDField(), } @staticmethod def _from_db_object(context, vif, db_vif): for field in vif.fields: vif[field] = db_vif[field] vif._context = context vif.obj_reset_changes() return vif @base.remotable_classmethod def get_by_id(cls, context, vif_id): db_vif = db.virtual_interface_get(context, vif_id) if db_vif: return cls._from_db_object(context, cls(), db_vif) @base.remotable_classmethod def get_by_uuid(cls, context, vif_uuid): db_vif = db.virtual_interface_get_by_uuid(context, vif_uuid) if db_vif: return cls._from_db_object(context, cls(), db_vif) @base.remotable_classmethod def get_by_address(cls, context, address): db_vif = db.virtual_interface_get_by_address(context, address) if db_vif: return cls._from_db_object(context, cls(), db_vif) @base.remotable_classmethod def get_by_instance_and_network(cls, context, instance_uuid, network_id): db_vif = db.virtual_interface_get_by_instance_and_network(context, instance_uuid, network_id) if db_vif: return cls._from_db_object(context, cls(), db_vif) @base.remotable def create(self, context): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.obj_get_changes() db_vif = db.virtual_interface_create(context, updates) self._from_db_object(context, self, db_vif) @base.remotable_classmethod def delete_by_instance_uuid(cls, context, instance_uuid): db.virtual_interface_delete_by_instance(context, instance_uuid) class VirtualInterfaceList(base.ObjectListBase, base.NovaObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('VirtualInterface'), } child_versions = { '1.0': '1.0', } @base.remotable_classmethod def get_all(cls, context): db_vifs = db.virtual_interface_get_all(context) return base.obj_make_list(context, cls(context), objects.VirtualInterface, db_vifs) @base.remotable_classmethod def get_by_instance_uuid(cls, context, instance_uuid, use_slave=False): db_vifs = db.virtual_interface_get_by_instance(context, instance_uuid, use_slave=use_slave) return base.obj_make_list(context, cls(context), objects.VirtualInterface, db_vifs)
apache-2.0
junmin-zhu/chromium-rivertrail
tools/python/google/platform_utils_mac.py
183
5676
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Platform-specific utility methods shared by several scripts.""" import os import subprocess import google.path_utils class PlatformUtility(object): def __init__(self, base_dir): """Args: base_dir: the base dir for running tests. """ self._base_dir = base_dir self._httpd_cmd_string = None # used for starting/stopping httpd self._bash = "/bin/bash" def _UnixRoot(self): """Returns the path to root.""" return "/" def GetFilesystemRoot(self): """Returns the root directory of the file system.""" return self._UnixRoot() def GetTempDirectory(self): """Returns the file system temp directory Note that this does not use a random subdirectory, so it's not intrinsically secure. If you need a secure subdir, use the tempfile package. """ return os.getenv("TMPDIR", "/tmp") def FilenameToUri(self, path, use_http=False, use_ssl=False, port=8000): """Convert a filesystem path to a URI. Args: path: For an http URI, the path relative to the httpd server's DocumentRoot; for a file URI, the full path to the file. use_http: if True, returns a URI of the form http://127.0.0.1:8000/. If False, returns a file:/// URI. use_ssl: if True, returns HTTPS URL (https://127.0.0.1:8000/). This parameter is ignored if use_http=False. port: The port number to append when returning an HTTP URI """ if use_http: protocol = 'http' if use_ssl: protocol = 'https' return "%s://127.0.0.1:%d/%s" % (protocol, port, path) return "file://" + path def GetStartHttpdCommand(self, output_dir, httpd_conf_path, mime_types_path, document_root=None, apache2=False): """Prepares the config file and output directory to start an httpd server. Returns a list of strings containing the server's command line+args. Args: output_dir: the path to the server's output directory, for log files. It will be created if necessary. httpd_conf_path: full path to the httpd.conf file to be used. mime_types_path: full path to the mime.types file to be used. document_root: full path to the DocumentRoot. If None, the DocumentRoot from the httpd.conf file will be used. Note that the httpd.conf file alongside this script does not specify any DocumentRoot, so if you're using that one, be sure to specify a document_root here. apache2: boolean if true will cause this function to return start command for Apache 2.x as opposed to Apache 1.3.x. This flag is ignored on Mac (but preserved here for compatibility in function signature with win), where httpd2 is used always """ exe_name = "httpd" cert_file = google.path_utils.FindUpward(self._base_dir, 'tools', 'python', 'google', 'httpd_config', 'httpd2.pem') ssl_enabled = os.path.exists('/etc/apache2/mods-enabled/ssl.conf') httpd_vars = { "httpd_executable_path": os.path.join(self._UnixRoot(), "usr", "sbin", exe_name), "httpd_conf_path": httpd_conf_path, "ssl_certificate_file": cert_file, "document_root" : document_root, "server_root": os.path.join(self._UnixRoot(), "usr"), "mime_types_path": mime_types_path, "output_dir": output_dir, "ssl_mutex": "file:"+os.path.join(output_dir, "ssl_mutex"), "user": os.environ.get("USER", "#%d" % os.geteuid()), "lock_file": os.path.join(output_dir, "accept.lock"), } google.path_utils.MaybeMakeDirectory(output_dir) # We have to wrap the command in bash # -C: process directive before reading config files # -c: process directive after reading config files # Apache wouldn't run CGIs with permissions==700 unless we add # -c User "<username>" httpd_cmd_string = ( '%(httpd_executable_path)s' ' -f %(httpd_conf_path)s' ' -c \'TypesConfig "%(mime_types_path)s"\'' ' -c \'CustomLog "%(output_dir)s/access_log.txt" common\'' ' -c \'ErrorLog "%(output_dir)s/error_log.txt"\'' ' -c \'PidFile "%(output_dir)s/httpd.pid"\'' ' -C \'User "%(user)s"\'' ' -C \'ServerRoot "%(server_root)s"\'' ' -c \'LockFile "%(lock_file)s"\'' ) if document_root: httpd_cmd_string += ' -C \'DocumentRoot "%(document_root)s"\'' if ssl_enabled: httpd_cmd_string += ( ' -c \'SSLCertificateFile "%(ssl_certificate_file)s"\'' ' -c \'SSLMutex "%(ssl_mutex)s"\'' ) # Save a copy of httpd_cmd_string to use for stopping httpd self._httpd_cmd_string = httpd_cmd_string % httpd_vars httpd_cmd = [self._bash, "-c", self._httpd_cmd_string] return httpd_cmd def GetStopHttpdCommand(self): """Returns a list of strings that contains the command line+args needed to stop the http server used in the http tests. This tries to fetch the pid of httpd (if available) and returns the command to kill it. If pid is not available, kill all httpd processes """ if not self._httpd_cmd_string: return ["true"] # Haven't been asked for the start cmd yet. Just pass. # Add a sleep after the shutdown because sometimes it takes some time for # the port to be available again. return [self._bash, "-c", self._httpd_cmd_string + ' -k stop && sleep 5']
bsd-3-clause
Namax0r/resistor-calculator
resistor_calculator.py
1
9566
#!/usr/bin/env python # Basic version handling try: # Python2 import Tkinter as tk except ImportError: # Python3 import tkinter as tk from tkinter.ttk import Combobox from tkinter import messagebox # Small utility that adds dot notation access to dictionary attributes class dotdict(dict): __getattr__ = dict.get __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ # Main view window root = tk.Tk() # Store width and height in variable for ease of change window_width = 300 window_height = 380 # Set min and max size of a GUI window root.minsize(window_width, window_height) root.maxsize(window_width, window_height) # Var is used to store our result var_result = tk.StringVar() var_max = tk.StringVar() var_min = tk.StringVar() # Create dictionary of colors and values d = { #Values of the band are stored as string to allow concatenation of the numbers. 'band':{ 'black': "0", 'brown': "1", 'red': "2", 'orange': "3", 'yellow': "4", 'green': "5", 'blue': "6", 'violet': "7", 'gray': "8", 'white': "9" }, 'multiplier':{ 'black': 1, 'brown': 10, 'red': 100, 'orange': 1000, 'yellow': 10000, 'green': 100000, 'blue': 1000000, 'violet': 10000000 }, 'tolerance':{ 'brown': 0.01, 'red': 0.02, 'green': 0.005, 'blue': 0.025, 'violet': 0.010, 'gray': 0.005, 'gold': 0.05, 'silver': 0.10 } } # Enable dot notation on the dictionary d = dotdict(d) class ResistorCalculator: def __init__(self, parent, title): self.parent = parent self.parent.title(title) self.parent.protocol("WM_DELETE_WINDOW", self.close_program) # Define variables to store values of comboboxes self.band1_var_result = 0 self.band2_var_result = 0 self.band3_var_result = 0 self.multiplier_var_result = 0 self.tolerance_var_result = 0 self.build_window() # Function to destroy the window when [X] is pressed def close_program(self, event=None): self.parent.destroy() # Function called when '<<ComboboxSelected>>' event is triggered def combobox_handler(self, event): #store values of comboboxes in variables. self.band1_var_result = self.band1_var.get() self.band2_var_result = self.band2_var.get() self.band3_var_result = self.band3_var.get() self.multiplier_var_result = self.multiplier_var.get() self.tolerance_var_result = self.tolerance_var.get() # Function to handle error, when there are not enough arguments for formula to calculate properly. def error_not_enough_args(self): tk.messagebox.showinfo("Error", "Not enough arguments to calculate. Please select more values.") # Function to add a mark at the end of a result def add_mark(self, val, mark): return val, mark # Function to calculate the resistors def calculate_resistor(self): try: # If there are only 2 bands to add, change the formula to skip the band3 if self.band3_var_result == " ": bands = d.band[self.band1_var_result] + d.band[self.band2_var_result] else: bands = d.band[self.band1_var_result] + d.band[self.band2_var_result] + d.band[self.band3_var_result] # Convert string into int so we can do mathematical operations on it int_bands = int(bands) # Set multiplier and tolerance multiplier = d.multiplier[self.multiplier_var_result] tolerance = d.tolerance[self.tolerance_var_result] # Calculate the resistance based on the formula formula = (int_bands * multiplier) max_resistance = formula + (formula * tolerance) min_resistance = formula - (formula * tolerance) result_max = max_resistance / multiplier result_min = min_resistance / multiplier result_normal = formula / multiplier if formula < 1000: result_max = max_resistance result_min = min_resistance result_normal = formula # if result of formula exceeds 1000 add "k" after the result. elif formula > 1000 and formula < 1000000: result_max = self.add_mark(result_max, "kΩ") result_min = self.add_mark(result_min, "kΩ") result_normal = self.add_mark(result_normal, "kΩ") else: result_max = self.add_mark(result_max, "MΩ") result_min = self.add_mark(result_min, "MΩ") result_normal = self.add_mark(result_normal, "MΩ") # Set the variables that display result in the GUI var_result.set(result_normal) var_max.set(result_max) var_min.set(result_min) # KeyError exception when there are not enough values to calculate except KeyError: self.error_not_enough_args() # Function to build a GUI window and all of it's widgets. def build_window(self): # Band 1 band1_label = tk.Label(self.parent, text="Band 1" ) band1_label.grid(row=0, column=0, ipadx=30, pady=5) self.band1_var = tk.StringVar() band1_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.band1_var) band1_combo['values']=('black', 'brown', 'red', 'orange', 'yellow', 'green', 'blue', 'violet', 'gray', 'white') band1_combo.bind('<<ComboboxSelected>>', self.combobox_handler) band1_combo.grid(row=0, column=1, padx=10) # Band 2 band2_label = tk.Label( self.parent, text="Band 2") band2_label.grid(row=2, column=0, pady=5) self.band2_var = tk.StringVar() band2_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.band2_var) band2_combo['values']=('black', 'brown', 'red', 'orange', 'yellow', 'green', 'blue', 'violet', 'gray', 'white') band2_combo.bind('<<ComboboxSelected>>', self.combobox_handler) band2_combo.grid(row=2, column=1) # Band 3 band3_label = tk.Label( self.parent, text="Band 3" ) band3_label.grid(row=4, column=0, pady=5) self.band3_var = tk.StringVar() # Setting band3 to " " helps with modification of calculation formula based on this value self.band3_var.set(" ") band3_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.band3_var) band3_combo['values']=('black', 'brown', 'red', 'orange', 'yellow', 'green', 'blue', 'violet', 'gray', 'white') band3_combo.bind('<<ComboboxSelected>>', self.combobox_handler) band3_combo.grid(row=4, column=1) # Multiplier multiplier_label = tk.Label( self.parent, text="Multiplier" ) multiplier_label.grid(row=6, column=0, pady=5) self.multiplier_var = tk.StringVar() multiplier_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.multiplier_var) multiplier_combo['values']=('black', 'brown', 'red', 'orange', 'yellow', 'green', 'blue', 'violet') multiplier_combo.bind('<<ComboboxSelected>>', self.combobox_handler) multiplier_combo.grid(row=6, column=1) # Tolerance tolerance_label = tk.Label( self.parent, text="Tolerance" ) tolerance_label.grid(row=8, column=0, pady=5) self.tolerance_var = tk.StringVar() tolerance_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.tolerance_var) tolerance_combo['values']=('brown', 'red', 'green', 'blue', 'violet', 'gray', 'gold', 'silver') tolerance_combo.bind('<<ComboboxSelected>>', self.combobox_handler) tolerance_combo.grid(row=8, column=1) # Calculate button self.calculate_button = tk.Button(self.parent, text ="Calculate", command = self.calculate_resistor) self.calculate_button.grid(row=9, column=1, pady=5, ipadx=40) # Results section result_label = tk.Message( self.parent, text="Result:") result_label.grid(row=12, column=0, pady=10) result_value = tk.Message( self.parent, textvariable=var_result, relief=tk.RAISED ) result_value.grid(row=12, column=1) max_result_label = tk.Message( self.parent, text="Max:") max_result_label.grid(row=13, column=0, pady=10, ipadx=20) max_result_value = tk.Message( self.parent, textvariable=var_max, relief=tk.RAISED) max_result_value.grid(row=13, column=1) min_result_label = tk.Message( self.parent, text="Min:") min_result_label.grid(row=14, column=0, pady=10) min_result_value = tk.Message( self.parent, textvariable=var_min, relief=tk.RAISED ) min_result_value.grid(row=14, column=1) # Author name, displayed at the bottom of a program author_name = tk.Label(self.parent, text="by Namax0r", relief=tk.SUNKEN, bd=1) author_name.place(x=window_width - 70, y=window_height - 20) if __name__ == '__main__': app = ResistorCalculator(root, "Resistor Calculator") root.mainloop()
mit
richardfergie/googleads-python-lib
examples/dfp/v201508/placement_service/deactivate_placements.py
4
2090
#!/usr/bin/python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This code example deactivates all active placements. To determine which placements exist, run get_all_placements.py. """ # Import appropriate modules from the client library. from googleads import dfp PLACEMENT_ID = 'INSERT_PLACEMENT_ID_HERE' def main(client, placement_id): # Initialize appropriate service. placement_service = client.GetService('PlacementService', version='v201508') # Create query. values = [{ 'key': 'placementId', 'value': { 'xsi_type': 'NumberValue', 'value': placement_id } }] query = 'WHERE id = :placementId' statement = dfp.FilterStatement(query, values, 1) # Get placements by statement. placements = placement_service.getPlacementsByStatement( statement.ToStatement()) for placement in placements: print ('Placement with id \'%s\', name \'%s\', and status \'%s\' will be ' 'deactivated.' % (placement['id'], placement['name'], placement['status'])) # Perform action. result = placement_service.performPlacementAction( {'xsi_type': 'DeactivatePlacements'}, statement.ToStatement()) # Display results. if result and int(result['numChanges']) > 0: print 'Number of placements deactivated: %s' % result['numChanges'] else: print 'No placements were deactivated.' if __name__ == '__main__': # Initialize client object. dfp_client = dfp.DfpClient.LoadFromStorage() main(dfp_client, PLACEMENT_ID)
apache-2.0
svn2github/libtorrent-1_0_x
tools/parse_memory_log.py
59
3492
#! /usr/bin/env python import os, sys, time # usage: memory.log memory_index.log lines = open(sys.argv[1], 'rb').readlines() index = open(sys.argv[2], 'rb').readlines() # logfile format: # #<allocation-point> <time(ms)> <key ('A' | 'F')> <address> <size> <total-size> <total-space-time> <peak-total-size> # example: # #12 38 A 0xd902a0 16 16 0 16 allocation_points_to_print = 30 def print_allocation_point(ap): print 'space_time: %d kBms' % (ap['spacetime'] / 1024) print 'allocations: %d' % ap['allocations'] print 'peak: %d kB' % (ap['peak'] / 1024) print 'stack: ' counter = 0 for e in ap['stack']: print '#%d %s' % (counter, e) counter += 1 allocation_points = [] for l in index: l = l.split('#') l.pop(0) ap = { 'allocations': 0, 'peak': 0, 'spacetime': 0, 'allocation_point': len(allocation_points), 'stack': l} allocation_points.append(ap); for l in lines: l = l.lstrip('#').rstrip('\n').split(' ') if len(l) != 8: print l continue try: ap = int(l[0]) allocation_points[ap]['allocations'] += 1 allocation_points[ap]['peak'] = int(l[7]) allocation_points[ap]['spacetime'] = int(l[6]) except Exception, e: print type(e), e, l print '=== space time ===' hot_ap = [] allocation_points.sort(key = lambda x:x['spacetime'], reverse=True); counter = 0 for ap in allocation_points[0:allocation_points_to_print]: print '== %d ==' % counter counter += 1 print_allocation_point(ap) hot_ap.append(ap['allocation_point']); print '=== allocations ===' allocation_points.sort(key = lambda x:x['allocations'], reverse=True); for ap in allocation_points[0:allocation_points_to_print]: print_allocation_point(ap) print '=== peak ===' allocation_points.sort(key = lambda x:x['peak'], reverse=True); for ap in allocation_points[0:allocation_points_to_print]: print_allocation_point(ap) # generate graph lines = open(sys.argv[1], 'rb').readlines() out = open('memory.dat', 'wb') cur_line = [0] * allocation_points_to_print prev_line = [0] * allocation_points_to_print last_time = 0 for l in lines: l = l.lstrip('#').rstrip('\n').split(' ') if len(l) != 8: print l continue try: time = int(l[1]) if time != last_time: print >>out, last_time, '\t', for i in range(allocation_points_to_print): if cur_line[i] == -1: print >>out, prev_line[i], '\t', else: print >>out, cur_line[i], '\t', prev_line[i] = cur_line[i] print >>out cur_line = [-1] * allocation_points_to_print last_time = time size = int(l[5]) ap = int(l[0]) if ap in hot_ap: index = hot_ap.index(ap) cur_line[index] = max(cur_line[index], size) except Exception, e: print type(e), e, l out.close() out = open('memory.gnuplot', 'wb') print >>out, "set term png size 1200,700" print >>out, 'set output "memory.png"' print >>out, 'set xrange [0:*]' print >>out, 'set xlabel "time (ms)"' print >>out, 'set ylabel "bytes (B)"' print >>out, "set style data lines" print >>out, "set key box" print >>out, 'plot', for k in range(allocation_points_to_print): print >>out, ' "memory.dat" using 1:(', for i in range(k, allocation_points_to_print): if i == k: print >>out, '$%d' % (i + 2), else: print >>out, '+$%d' % (i + 2), print >>out, ') title "%d" with filledcurves x1, \\' % k print >>out, 'x=0' out.close() os.system('gnuplot memory.gnuplot');
bsd-3-clause
Greennut/ostproject
django/contrib/localflavor/es/forms.py
87
7721
# -*- coding: utf-8 -*- """ Spanish-specific Form helpers """ from __future__ import absolute_import import re from django.contrib.localflavor.es.es_provinces import PROVINCE_CHOICES from django.contrib.localflavor.es.es_regions import REGION_CHOICES from django.core.validators import EMPTY_VALUES from django.forms import ValidationError from django.forms.fields import RegexField, Select from django.utils.translation import ugettext_lazy as _ class ESPostalCodeField(RegexField): """ A form field that validates its input as a spanish postal code. Spanish postal code is a five digits string, with two first digits between 01 and 52, assigned to provinces code. """ default_error_messages = { 'invalid': _('Enter a valid postal code in the range and format 01XXX - 52XXX.'), } def __init__(self, max_length=None, min_length=None, *args, **kwargs): super(ESPostalCodeField, self).__init__( r'^(0[1-9]|[1-4][0-9]|5[0-2])\d{3}$', max_length, min_length, *args, **kwargs) class ESPhoneNumberField(RegexField): """ A form field that validates its input as a Spanish phone number. Information numbers are ommited. Spanish phone numbers are nine digit numbers, where first digit is 6 (for cell phones), 8 (for special phones), or 9 (for landlines and special phones) TODO: accept and strip characters like dot, hyphen... in phone number """ default_error_messages = { 'invalid': _('Enter a valid phone number in one of the formats 6XXXXXXXX, 8XXXXXXXX or 9XXXXXXXX.'), } def __init__(self, max_length=None, min_length=None, *args, **kwargs): super(ESPhoneNumberField, self).__init__(r'^(6|7|8|9)\d{8}$', max_length, min_length, *args, **kwargs) class ESIdentityCardNumberField(RegexField): """ Spanish NIF/NIE/CIF (Fiscal Identification Number) code. Validates three diferent formats: NIF (individuals): 12345678A CIF (companies): A12345678 NIE (foreigners): X12345678A according to a couple of simple checksum algorithms. Value can include a space or hyphen separator between number and letters. Number length is not checked for NIF (or NIE), old values start with a 1, and future values can contain digits greater than 8. The CIF control digit can be a number or a letter depending on company type. Algorithm is not public, and different authors have different opinions on which ones allows letters, so both validations are assumed true for all types. """ default_error_messages = { 'invalid': _('Please enter a valid NIF, NIE, or CIF.'), 'invalid_only_nif': _('Please enter a valid NIF or NIE.'), 'invalid_nif': _('Invalid checksum for NIF.'), 'invalid_nie': _('Invalid checksum for NIE.'), 'invalid_cif': _('Invalid checksum for CIF.'), } def __init__(self, only_nif=False, max_length=None, min_length=None, *args, **kwargs): self.only_nif = only_nif self.nif_control = 'TRWAGMYFPDXBNJZSQVHLCKE' self.cif_control = 'JABCDEFGHI' self.cif_types = 'ABCDEFGHKLMNPQS' self.nie_types = 'XT' id_card_re = re.compile(r'^([%s]?)[ -]?(\d+)[ -]?([%s]?)$' % (self.cif_types + self.nie_types, self.nif_control + self.cif_control), re.IGNORECASE) super(ESIdentityCardNumberField, self).__init__(id_card_re, max_length, min_length, error_message=self.default_error_messages['invalid%s' % (self.only_nif and '_only_nif' or '')], *args, **kwargs) def clean(self, value): super(ESIdentityCardNumberField, self).clean(value) if value in EMPTY_VALUES: return u'' nif_get_checksum = lambda d: self.nif_control[int(d)%23] value = value.upper().replace(' ', '').replace('-', '') m = re.match(r'^([%s]?)[ -]?(\d+)[ -]?([%s]?)$' % (self.cif_types + self.nie_types, self.nif_control + self.cif_control), value) letter1, number, letter2 = m.groups() if not letter1 and letter2: # NIF if letter2 == nif_get_checksum(number): return value else: raise ValidationError(self.error_messages['invalid_nif']) elif letter1 in self.nie_types and letter2: # NIE if letter2 == nif_get_checksum(number): return value else: raise ValidationError(self.error_messages['invalid_nie']) elif not self.only_nif and letter1 in self.cif_types and len(number) in [7, 8]: # CIF if not letter2: number, letter2 = number[:-1], int(number[-1]) checksum = cif_get_checksum(number) if letter2 in (checksum, self.cif_control[checksum]): return value else: raise ValidationError(self.error_messages['invalid_cif']) else: raise ValidationError(self.error_messages['invalid']) class ESCCCField(RegexField): """ A form field that validates its input as a Spanish bank account or CCC (Codigo Cuenta Cliente). Spanish CCC is in format EEEE-OOOO-CC-AAAAAAAAAA where: E = entity O = office C = checksum A = account It's also valid to use a space as delimiter, or to use no delimiter. First checksum digit validates entity and office, and last one validates account. Validation is done multiplying every digit of 10 digit value (with leading 0 if necessary) by number in its position in string 1, 2, 4, 8, 5, 10, 9, 7, 3, 6. Sum resulting numbers and extract it from 11. Result is checksum except when 10 then is 1, or when 11 then is 0. TODO: allow IBAN validation too """ default_error_messages = { 'invalid': _('Please enter a valid bank account number in format XXXX-XXXX-XX-XXXXXXXXXX.'), 'checksum': _('Invalid checksum for bank account number.'), } def __init__(self, max_length=None, min_length=None, *args, **kwargs): super(ESCCCField, self).__init__(r'^\d{4}[ -]?\d{4}[ -]?\d{2}[ -]?\d{10}$', max_length, min_length, *args, **kwargs) def clean(self, value): super(ESCCCField, self).clean(value) if value in EMPTY_VALUES: return u'' control_str = [1, 2, 4, 8, 5, 10, 9, 7, 3, 6] m = re.match(r'^(\d{4})[ -]?(\d{4})[ -]?(\d{2})[ -]?(\d{10})$', value) entity, office, checksum, account = m.groups() get_checksum = lambda d: str(11 - sum([int(digit) * int(control) for digit, control in zip(d, control_str)]) % 11).replace('10', '1').replace('11', '0') if get_checksum('00' + entity + office) + get_checksum(account) == checksum: return value else: raise ValidationError(self.error_messages['checksum']) class ESRegionSelect(Select): """ A Select widget that uses a list of spanish regions as its choices. """ def __init__(self, attrs=None): super(ESRegionSelect, self).__init__(attrs, choices=REGION_CHOICES) class ESProvinceSelect(Select): """ A Select widget that uses a list of spanish provinces as its choices. """ def __init__(self, attrs=None): super(ESProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES) def cif_get_checksum(number): s1 = sum([int(digit) for pos, digit in enumerate(number) if int(pos) % 2]) s2 = sum([sum([int(unit) for unit in str(int(digit) * 2)]) for pos, digit in enumerate(number) if not int(pos) % 2]) return (10 - ((s1 + s2) % 10)) % 10
bsd-3-clause
GraemeFulton/ghost-mdl-theme
node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
1825
17014
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """GYP backend that generates Eclipse CDT settings files. This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML files that can be imported into an Eclipse CDT project. The XML file contains a list of include paths and symbols (i.e. defines). Because a full .cproject definition is not created by this generator, it's not possible to properly define the include dirs and symbols for each file individually. Instead, one set of includes/symbols is generated for the entire project. This works fairly well (and is a vast improvement in general), but may still result in a few indexer issues here and there. This generator has no automated tests, so expect it to be broken. """ from xml.sax.saxutils import escape import os.path import subprocess import gyp import gyp.common import gyp.msvs_emulation import shlex import xml.etree.cElementTree as ET generator_wants_static_library_dependencies_adjusted = False generator_default_variables = { } for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: # Some gyp steps fail if these are empty(!), so we convert them to variables generator_default_variables[dirname] = '$' + dirname for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' # Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as # part of the path when dealing with generated headers. This value will be # replaced dynamically for each configuration. generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ '$SHARED_INTERMEDIATE_DIR' def CalculateVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) for key, val in generator_flags.items(): default_variables.setdefault(key, val) flavor = gyp.common.GetFlavor(params) default_variables.setdefault('OS', flavor) if flavor == 'win': # Copy additional generator configuration data from VS, which is shared # by the Eclipse generator. import gyp.generator.msvs as msvs_generator generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by gyp).""" generator_flags = params.get('generator_flags', {}) if generator_flags.get('adjust_static_libraries', False): global generator_wants_static_library_dependencies_adjusted generator_wants_static_library_dependencies_adjusted = True def GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path): """Calculate the set of include directories to be used. Returns: A list including all the include_dir's specified for every target followed by any include directories that were added as cflag compiler options. """ gyp_includes_set = set() compiler_includes_list = [] # Find compiler's default include dirs. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-xc++', '-v', '-']) proc = subprocess.Popen(args=command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.communicate()[1] # Extract the list of include dirs from the output, which has this format: # ... # #include "..." search starts here: # #include <...> search starts here: # /usr/include/c++/4.6 # /usr/local/include # End of search list. # ... in_include_list = False for line in output.splitlines(): if line.startswith('#include'): in_include_list = True continue if line.startswith('End of search list.'): break if in_include_list: include_dir = line.strip() if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if config_name in target['configurations']: config = target['configurations'][config_name] # Look for any include dirs that were explicitly added via cflags. This # may be done in gyp files to force certain includes to come at the end. # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and # remove this. if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) cflags = msvs_settings.GetCflags(config_name) else: cflags = config['cflags'] for cflag in cflags: if cflag.startswith('-I'): include_dir = cflag[2:] if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) # Find standard gyp include dirs. if config.has_key('include_dirs'): include_dirs = config['include_dirs'] for shared_intermediate_dir in shared_intermediate_dirs: for include_dir in include_dirs: include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', shared_intermediate_dir) if not os.path.isabs(include_dir): base_dir = os.path.dirname(target_name) include_dir = base_dir + '/' + include_dir include_dir = os.path.abspath(include_dir) gyp_includes_set.add(include_dir) # Generate a list that has all the include dirs. all_includes_list = list(gyp_includes_set) all_includes_list.sort() for compiler_include in compiler_includes_list: if not compiler_include in gyp_includes_set: all_includes_list.append(compiler_include) # All done. return all_includes_list def GetCompilerPath(target_list, data, options): """Determine a command that can be used to invoke the compiler. Returns: If this is a gyp project that has explicit make settings, try to determine the compiler from that. Otherwise, see if a compiler was specified via the CC_target environment variable. """ # First, see if the compiler is configured in make's settings. build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_dict = data[build_file].get('make_global_settings', {}) for key, value in make_global_settings_dict: if key in ['CC', 'CXX']: return os.path.join(options.toplevel_dir, value) # Check to see if the compiler was specified as an environment variable. for key in ['CC_target', 'CC', 'CXX']: compiler = os.environ.get(key) if compiler: return compiler return 'gcc' def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): """Calculate the defines for a project. Returns: A dict that includes explict defines declared in gyp files along with all of the default defines that the compiler uses. """ # Get defines declared in the gyp files. all_defines = {} flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) extra_defines = msvs_settings.GetComputedDefines(config_name) else: extra_defines = [] if config_name in target['configurations']: config = target['configurations'][config_name] target_defines = config['defines'] else: target_defines = [] for define in target_defines + extra_defines: split_define = define.split('=', 1) if len(split_define) == 1: split_define.append('1') if split_define[0].strip() in all_defines: # Already defined continue all_defines[split_define[0].strip()] = split_define[1].strip() # Get default compiler defines (if possible). if flavor == 'win': return all_defines # Default defines already processed in the loop above. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-dM', '-']) cpp_proc = subprocess.Popen(args=command, cwd='.', stdin=subprocess.PIPE, stdout=subprocess.PIPE) cpp_output = cpp_proc.communicate()[0] cpp_lines = cpp_output.split('\n') for cpp_line in cpp_lines: if not cpp_line.strip(): continue cpp_line_parts = cpp_line.split(' ', 2) key = cpp_line_parts[1] if len(cpp_line_parts) >= 3: val = cpp_line_parts[2] else: val = '1' all_defines[key] = val return all_defines def WriteIncludePaths(out, eclipse_langs, include_dirs): """Write the includes section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.IncludePaths">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for include_dir in include_dirs: out.write(' <includepath workspace_path="false">%s</includepath>\n' % include_dir) out.write(' </language>\n') out.write(' </section>\n') def WriteMacros(out, eclipse_langs, defines): """Write the macros section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.Macros">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for key in sorted(defines.iterkeys()): out.write(' <macro><name>%s</name><value>%s</value></macro>\n' % (escape(key), escape(defines[key]))) out.write(' </language>\n') out.write(' </section>\n') def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] generator_flags = params.get('generator_flags', {}) # build_dir: relative path from source root to our output files. # e.g. "out/Debug" build_dir = os.path.join(generator_flags.get('output_dir', 'out'), config_name) toplevel_build = os.path.join(options.toplevel_dir, build_dir) # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the # SHARED_INTERMEDIATE_DIR. Include both possible locations. shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), os.path.join(toplevel_build, 'gen')] GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), options, shared_intermediate_dirs) GenerateClasspathFile(target_list, target_dicts, options.toplevel_dir, toplevel_build, os.path.join(toplevel_build, 'eclipse-classpath.xml')) def GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, out_name, options, shared_intermediate_dirs): gyp.common.EnsureDirExists(out_name) with open(out_name, 'w') as out: out.write('<?xml version="1.0" encoding="UTF-8"?>\n') out.write('<cdtprojectproperties>\n') eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', 'GNU C++', 'GNU C', 'Assembly'] compiler_path = GetCompilerPath(target_list, data, options) include_dirs = GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path) WriteIncludePaths(out, eclipse_langs, include_dirs) defines = GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path) WriteMacros(out, eclipse_langs, defines) out.write('</cdtprojectproperties>\n') def GenerateClasspathFile(target_list, target_dicts, toplevel_dir, toplevel_build, out_name): '''Generates a classpath file suitable for symbol navigation and code completion of Java code (such as in Android projects) by finding all .java and .jar files used as action inputs.''' gyp.common.EnsureDirExists(out_name) result = ET.Element('classpath') def AddElements(kind, paths): # First, we need to normalize the paths so they are all relative to the # toplevel dir. rel_paths = set() for path in paths: if os.path.isabs(path): rel_paths.add(os.path.relpath(path, toplevel_dir)) else: rel_paths.add(path) for path in sorted(rel_paths): entry_element = ET.SubElement(result, 'classpathentry') entry_element.set('kind', kind) entry_element.set('path', path) AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir)) AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) # Include the standard JRE container and a dummy out folder AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER']) # Include a dummy out folder so that Eclipse doesn't use the default /bin # folder in the root of the project. AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')]) ET.ElementTree(result).write(out_name) def GetJavaJars(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all .jars used as inputs.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'): if os.path.isabs(input_): yield input_ else: yield os.path.join(os.path.dirname(target_name), input_) def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all likely java package root directories.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if (os.path.splitext(input_)[1] == '.java' and not input_.startswith('$')): dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name), input_)) # If there is a parent 'src' or 'java' folder, navigate up to it - # these are canonical package root names in Chromium. This will # break if 'src' or 'java' exists in the package structure. This # could be further improved by inspecting the java file for the # package name if this proves to be too fragile in practice. parent_search = dir_ while os.path.basename(parent_search) not in ['src', 'java']: parent_search, _ = os.path.split(parent_search) if not parent_search or parent_search == toplevel_dir: # Didn't find a known root, just return the original path yield dir_ break else: yield parent_search def GenerateOutput(target_list, target_dicts, data, params): """Generate an XML settings file that can be imported into a CDT project.""" if params['options'].generator_output: raise NotImplementedError("--generator_output not implemented for eclipse") user_config = params.get('generator_flags', {}).get('config', None) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
mit
zimmermatt/flink
flink-libraries/flink-python/src/test/python/org/apache/flink/python/api/args/no_arg.py
6
1273
# ############################################################################### # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ from flink.plan.Environment import get_environment import sys from utils import Verify if __name__ == "__main__": env = get_environment() d1 = env.from_elements(len(sys.argv)) d1.map_partition(Verify([1], "NoArgument")).output() #Execution env.set_parallelism(1) env.execute(local=True)
apache-2.0
leonsio/YAHM
share/tools/ubi_reader/ubifs/walk.py
3
2731
#!/usr/bin/env python ############################################################# # ubi_reader/ubifs # (c) 2013 Jason Pruitt (jrspruitt@gmail.com) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ############################################################# from ubifs import extract from ubifs.defines import * def index(ubifs, lnum, offset, inodes={}): """Walk the index gathering Inode, Dir Entry, and File nodes. Arguments: Obj:ubifs -- UBIFS object. Int:lnum -- Logical erase block number. Int:offset -- Offset in logical erase block. Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number. Returns: Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number. 'ino' -- Inode node. 'data' -- List of data nodes if present. 'dent' -- List of directory entry nodes if present. """ chdr = extract.common_hdr(ubifs, lnum, offset) if chdr.node_type == UBIFS_IDX_NODE: idxn = extract.idx_node(ubifs, lnum, offset+UBIFS_COMMON_HDR_SZ) for branch in idxn.branches: index(ubifs, branch.lnum, branch.offs, inodes) elif chdr.node_type == UBIFS_INO_NODE: inon = extract.ino_node(ubifs, lnum, offset+UBIFS_COMMON_HDR_SZ) ino_num = inon.key['ino_num'] if not ino_num in inodes: inodes[ino_num] = {} inodes[ino_num]['ino'] = inon elif chdr.node_type == UBIFS_DATA_NODE: datn = extract.data_node(ubifs, lnum, offset+UBIFS_COMMON_HDR_SZ, chdr.len) ino_num = datn.key['ino_num'] if not ino_num in inodes: inodes[ino_num] = {} if not 'data' in inodes[ino_num]: inodes[ino_num]['data']= [] inodes[ino_num]['data'].append(datn) elif chdr.node_type == UBIFS_DENT_NODE: dn = extract.dent_node(ubifs, lnum, offset+UBIFS_COMMON_HDR_SZ) ino_num = dn.key['ino_num'] if not ino_num in inodes: inodes[ino_num] = {} if not 'dent' in inodes[ino_num]: inodes[ino_num]['dent']= [] inodes[ino_num]['dent'].append(dn)
cc0-1.0
bliz937/kivy
examples/demo/multistroke/settings.py
43
3078
__all__ = ('MultistrokeSettingsContainer', 'MultistrokeSettingItem', 'MultistrokeSettingBoolean', 'MultistrokeSettingSlider', 'MultistrokeSettingString', 'MultistrokeSettingTitle') from kivy.factory import Factory from kivy.lang import Builder from kivy.uix.gridlayout import GridLayout from kivy.uix.label import Label from kivy.properties import (StringProperty, NumericProperty, OptionProperty, BooleanProperty) from kivy.uix.popup import Popup Builder.load_file('settings.kv') class MultistrokeSettingsContainer(GridLayout): pass class MultistrokeSettingItem(GridLayout): title = StringProperty('<No title set>') desc = StringProperty('') class MultistrokeSettingTitle(Label): title = StringProperty('<No title set>') desc = StringProperty('') class MultistrokeSettingBoolean(MultistrokeSettingItem): button_text = StringProperty('') value = BooleanProperty(False) class MultistrokeSettingString(MultistrokeSettingItem): value = StringProperty('') class EditSettingPopup(Popup): def __init__(self, **kwargs): super(EditSettingPopup, self).__init__(**kwargs) self.register_event_type('on_validate') def on_validate(self, *l): pass class MultistrokeSettingSlider(MultistrokeSettingItem): min = NumericProperty(0) max = NumericProperty(100) type = OptionProperty('int', options=['float', 'int']) value = NumericProperty(0) def __init__(self, **kwargs): super(MultistrokeSettingSlider, self).__init__(**kwargs) self._popup = EditSettingPopup() self._popup.bind(on_validate=self._validate) self._popup.bind(on_dismiss=self._dismiss) def _to_numtype(self, v): try: if self.type == 'float': return round(float(v), 1) else: return int(v) except ValueError: return self.min def _dismiss(self, *l): self._popup.ids.input.focus = False def _validate(self, instance, value): self._popup.dismiss() val = self._to_numtype(self._popup.ids.input.text) if val < self.min: val = self.min elif val > self.max: val = self.max self.value = val def on_touch_down(self, touch): if not self.ids.sliderlabel.collide_point(*touch.pos): return super(MultistrokeSettingSlider, self).on_touch_down(touch) ids = self._popup.ids ids.value = str(self.value) ids.input.text = str(self._to_numtype(self.value)) self._popup.open() ids.input.focus = True ids.input.select_all() Factory.register('MultistrokeSettingsContainer', cls=MultistrokeSettingsContainer) Factory.register('MultistrokeSettingTitle', cls=MultistrokeSettingTitle) Factory.register('MultistrokeSettingBoolean', cls=MultistrokeSettingBoolean) Factory.register('MultistrokeSettingSlider', cls=MultistrokeSettingSlider) Factory.register('MultistrokeSettingString', cls=MultistrokeSettingString)
mit
jnerin/ansible
lib/ansible/modules/cloud/amazon/dynamodb_ttl.py
74
5579
#!/usr/bin/python # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: dynamodb_ttl short_description: set TTL for a given DynamoDB table. description: - Uses boto3 to set TTL. - requires botocore version 1.5.24 or higher. version_added: "2.4" options: state: description: - state to set DynamoDB table to choices: ['enable', 'disable'] required: false default: enable table_name: description: - name of the DynamoDB table to work on required: true attribute_name: description: - the name of the Time to Live attribute used to store the expiration time for items in the table - this appears to be required by the API even when disabling TTL. required: true author: "Ted (@tedder)" extends_documentation_fragment: - aws - ec2 requirements: [ botocore>=1.5.24, boto3 ] ''' EXAMPLES = ''' - name: enable TTL on my cowfacts table dynamodb_ttl: state: enable table_name: cowfacts attribute_name: cow_deleted_date - name: disable TTL on my cowfacts table dynamodb_ttl: state: disable table_name: cowfacts attribute_name: cow_deleted_date ''' RETURN = ''' current_status: description: current or new TTL specification. type: dict returned: always sample: - { "AttributeName": "deploy_timestamp", "TimeToLiveStatus": "ENABLED" } - { "AttributeName": "deploy_timestamp", "Enabled": true } ''' import distutils.version import traceback try: import botocore except ImportError: pass import ansible.module_utils.ec2 from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import ec2_argument_spec, camel_dict_to_snake_dict, HAS_BOTO3 def get_current_ttl_state(c, table_name): '''Fetch the state dict for a table.''' current_state = c.describe_time_to_live(TableName=table_name) return current_state.get('TimeToLiveDescription') def does_state_need_changing(attribute_name, desired_state, current_spec): '''Run checks to see if the table needs to be modified. Basically a dirty check.''' if not current_spec: # we don't have an entry (or a table?) return True if desired_state.lower() == 'enable' and current_spec.get('TimeToLiveStatus') not in ['ENABLING', 'ENABLED']: return True if desired_state.lower() == 'disable' and current_spec.get('TimeToLiveStatus') not in ['DISABLING', 'DISABLED']: return True if attribute_name != current_spec.get('AttributeName'): return True return False def set_ttl_state(c, table_name, state, attribute_name): '''Set our specification. Returns the update_time_to_live specification dict, which is different than the describe_* call.''' is_enabled = False if state.lower() == 'enable': is_enabled = True ret = c.update_time_to_live( TableName=table_name, TimeToLiveSpecification={ 'Enabled': is_enabled, 'AttributeName': attribute_name } ) return ret.get('TimeToLiveSpecification') def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( state=dict(choices=['enable', 'disable']), table_name=dict(required=True), attribute_name=dict(required=True)) ) module = AnsibleModule( argument_spec=argument_spec, ) if not HAS_BOTO3: module.fail_json(msg='boto3 required for this module') elif distutils.version.StrictVersion(botocore.__version__) < distutils.version.StrictVersion('1.5.24'): # TTL was added in this version. module.fail_json(msg='Found botocore in version {0}, but >= {1} is required for TTL support'.format(botocore.__version__, '1.5.24')) try: region, ec2_url, aws_connect_kwargs = ansible.module_utils.ec2.get_aws_connection_info(module, boto3=True) dbclient = ansible.module_utils.ec2.boto3_conn(module, conn_type='client', resource='dynamodb', region=region, endpoint=ec2_url, **aws_connect_kwargs) except botocore.exceptions.NoCredentialsError as e: module.fail_json(msg=str(e)) result = {'changed': False} state = module.params['state'] # wrap all our calls to catch the standard exceptions. We don't pass `module` in to the # methods so it's easier to do here. try: current_state = get_current_ttl_state(dbclient, module.params['table_name']) if does_state_need_changing(module.params['attribute_name'], module.params['state'], current_state): # changes needed new_state = set_ttl_state(dbclient, module.params['table_name'], module.params['state'], module.params['attribute_name']) result['current_status'] = new_state result['changed'] = True else: # no changes needed result['current_status'] = current_state except botocore.exceptions.ClientError as e: module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response)) except botocore.exceptions.ParamValidationError as e: module.fail_json(msg=e.message, exception=traceback.format_exc()) except ValueError as e: module.fail_json(msg=str(e)) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
mvo5/snapcraft
snapcraft/sources.py
4
1347
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2016 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys as _sys if _sys.platform == "linux": from snapcraft.internal.sources import Script # noqa from snapcraft.internal.sources import Bazaar # noqa from snapcraft.internal.sources import Git # noqa from snapcraft.internal.sources import Mercurial # noqa from snapcraft.internal.sources import Subversion # noqa from snapcraft.internal.sources import Tar # noqa from snapcraft.internal.sources import Local # noqa from snapcraft.internal.sources import Zip # noqa from snapcraft.internal.sources import get # noqa from snapcraft.internal.sources import Deb # noqa from snapcraft.internal.sources import Rpm # noqa
gpl-3.0
dlintott/gns3-ppa
src/GNS3/Ui/Form_StartupConfig.py
3
4537
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'Form_StartupConfig.ui' # # Created: Mon Sep 9 21:29:21 2013 # by: PyQt4 UI code generator 4.8.6 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_StartupConfigDialog(object): def setupUi(self, StartupConfigDialog): StartupConfigDialog.setObjectName(_fromUtf8("StartupConfigDialog")) StartupConfigDialog.resize(660, 376) StartupConfigDialog.setWindowTitle(QtGui.QApplication.translate("StartupConfigDialog", "Startup-config", None, QtGui.QApplication.UnicodeUTF8)) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/images/logo_icon.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) StartupConfigDialog.setWindowIcon(icon) self.gridLayout = QtGui.QGridLayout(StartupConfigDialog) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.label = QtGui.QLabel(StartupConfigDialog) self.label.setText(QtGui.QApplication.translate("StartupConfigDialog", "Config file:", None, QtGui.QApplication.UnicodeUTF8)) self.label.setObjectName(_fromUtf8("label")) self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.lineEditStartupConfig = QtGui.QLineEdit(StartupConfigDialog) self.lineEditStartupConfig.setObjectName(_fromUtf8("lineEditStartupConfig")) self.gridLayout.addWidget(self.lineEditStartupConfig, 0, 1, 1, 1) self.StartupConfigPath_browser = QtGui.QToolButton(StartupConfigDialog) self.StartupConfigPath_browser.setText(QtGui.QApplication.translate("StartupConfigDialog", "...", None, QtGui.QApplication.UnicodeUTF8)) self.StartupConfigPath_browser.setToolButtonStyle(QtCore.Qt.ToolButtonTextOnly) self.StartupConfigPath_browser.setObjectName(_fromUtf8("StartupConfigPath_browser")) self.gridLayout.addWidget(self.StartupConfigPath_browser, 0, 2, 1, 1) self.LoadStartupConfig = QtGui.QToolButton(StartupConfigDialog) self.LoadStartupConfig.setText(_fromUtf8("")) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/edit-redo.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.LoadStartupConfig.setIcon(icon1) self.LoadStartupConfig.setToolButtonStyle(QtCore.Qt.ToolButtonTextOnly) self.LoadStartupConfig.setObjectName(_fromUtf8("LoadStartupConfig")) self.gridLayout.addWidget(self.LoadStartupConfig, 0, 3, 1, 1) self.pushButtonConfigFromNvram = QtGui.QPushButton(StartupConfigDialog) self.pushButtonConfigFromNvram.setText(QtGui.QApplication.translate("StartupConfigDialog", "Load config from nvram", None, QtGui.QApplication.UnicodeUTF8)) self.pushButtonConfigFromNvram.setObjectName(_fromUtf8("pushButtonConfigFromNvram")) self.gridLayout.addWidget(self.pushButtonConfigFromNvram, 0, 5, 1, 1) self.EditStartupConfig = QtGui.QPlainTextEdit(StartupConfigDialog) self.EditStartupConfig.setObjectName(_fromUtf8("EditStartupConfig")) self.gridLayout.addWidget(self.EditStartupConfig, 1, 0, 1, 6) self.checkBoxSaveIntoConfigFile = QtGui.QCheckBox(StartupConfigDialog) self.checkBoxSaveIntoConfigFile.setText(QtGui.QApplication.translate("StartupConfigDialog", "Save changes into the config file", None, QtGui.QApplication.UnicodeUTF8)) self.checkBoxSaveIntoConfigFile.setChecked(True) self.checkBoxSaveIntoConfigFile.setObjectName(_fromUtf8("checkBoxSaveIntoConfigFile")) self.gridLayout.addWidget(self.checkBoxSaveIntoConfigFile, 2, 0, 1, 2) self.buttonBox = QtGui.QDialogButtonBox(StartupConfigDialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Apply|QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.gridLayout.addWidget(self.buttonBox, 3, 0, 1, 6) self.retranslateUi(StartupConfigDialog) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), StartupConfigDialog.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), StartupConfigDialog.reject) QtCore.QMetaObject.connectSlotsByName(StartupConfigDialog) def retranslateUi(self, StartupConfigDialog): pass import svg_resources_rc
gpl-2.0
mammadori/pyglet
experimental/ninepatch.py
29
8273
# Copyright 2009 Joe Wreschnig and others. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. The names of the authors and contributors may be used to endorse or # promote products derived from this software without specific prior # written permission. # # All warranties and liabilities, express or implied, are hereby # disclaimed. """Draw a NinePatch image. NinePatch is a format for storing how to cut up a 9-part resizable rectangular image within the image pixel data directly. For more information on the NinePatch format, see http://developer.android.com/guide/topics/graphics/2d-graphics.html#nine-patch. """ __all__ = ["NinePatch"] from pyglet.gl import * class PixelData(object): def __init__(self, image): image_data = image.get_image_data() self.has_alpha = 'A' in image_data.format self.data = image_data.get_data("RGBA", image.width * 4) self.width = image.width self.height = image.height def is_black(self, x, y): p = (y * self.width + x) * 4 if self.has_alpha: if self.data[p+3] == '\x00': return False # Fully transparent return self.data[p:p+3] == '\x00\x00\x00' class NinePatch(object): """A scalable 9-patch image. """ # Content area of the image, in pixels from the edge. padding_top = None padding_bottom = None padding_right = None padding_left = None # Resizable area of the image, in pixels from the closest edge stretch_top = None stretch_left = None stretch_right = None stretch_bottom = None def __init__(self, image): """Create NinePatch cuts of an image Arguments: image - an ImageData (Texture, TextureRegion, etc) texture - force cut ImageDatas to be Textures (or Regions) """ data = PixelData(image) width = data.width height = data.height # Texture dimensions after removing the 9patch outline. self.width = width - 2 self.height = height - 2 # Only need to retain the texture for drawing self.texture = image.get_texture() # Find stretch area markers for x in range(1, width - 1): if data.is_black(x, height - 1): self.stretch_left = x break else: self.stretch_left = 1 for x in range(width - 2, 0, -1): if data.is_black(x, height - 1): self.stretch_right = width - x break else: self.stretch_right = 1 for y in range(1, height - 1): if data.is_black(0, y): self.stretch_bottom = y break else: self.stretch_bottom = 1 for y in range(height - 2, 0, -1): if data.is_black(0, y): self.stretch_top = height - y break else: self.stretch_top = 1 # Find content area markers, if any for x in range(1, width - 1): if data.is_black(x, 0): self.padding_left = x - 1 break for x in range(width - 2, 0, -1): if data.is_black(x, 0): self.padding_right = self.width - x break for y in range(1, height - 1): if data.is_black(width - 1, y): self.padding_bottom = y - 1 break for y in range(height - 2, 0, -1): if data.is_black(width - 1, y): self.padding_top = self.height - y break # Texture coordinates, in pixels u1 = 1 v1 = 1 u2 = self.stretch_left + 1 v2 = self.stretch_bottom + 1 u3 = width - self.stretch_right - 1 v3 = height - self.stretch_top - 1 u4 = width - 1 v4 = height - 1 # Texture coordinates as ratio of image size (0 to 1) u1, u2, u3, u4 = [s / float(width) for s in (u1, u2, u3, u4)] v1, v2, v3, v4 = [s / float(height) for s in (v1, v2, v3, v4)] # Scale texture coordinates to match the tex_coords pyglet gives us # (these aren't necessarily 0-1 as the texture may have been packed) (tu1, tv1, _, _, _, _, tu2, tv2, _, _, _, _) = self.texture.tex_coords u_scale = tu2 - tu1 u_bias = tu1 v_scale = tv2 - tv1 v_bias = tv1 u1, u2, u3, u4 = [u_bias + u_scale * s for s in (u1, u2, u3, u4)] v1, v2, v3, v4 = [v_bias + v_scale * s for s in (v1, v2, v3, v4)] # 2D texture coordinates, bottom-left to top-right self.tex_coords = ( u1, v1, u2, v1, u3, v1, u4, v1, u1, v2, u2, v2, u3, v2, u4, v2, u1, v3, u2, v3, u3, v3, u4, v3, u1, v4, u2, v4, u3, v4, u4, v4, ) # Quad indices self.indices = [] for y in range(3): for x in range(3): self.indices.extend([ x + y * 4, (x + 1) + y * 4, (x + 1) + (y + 1) * 4, x + (y + 1) * 4, ]) def get_vertices(self, x, y, width, height): """Get 16 2D vertices for the given image region""" x1 = x y1 = y x2 = x + self.stretch_left y2 = y + self.stretch_bottom x3 = x + width - self.stretch_right y3 = y + height - self.stretch_top x4 = x + width y4 = y + height # To match tex coords, vertices are bottom-left to top-right return ( x1, y1, x2, y1, x3, y1, x4, y1, x1, y2, x2, y2, x3, y2, x4, y2, x1, y3, x2, y3, x3, y3, x4, y3, x1, y4, x2, y4, x3, y4, x4, y4, ) def draw(self, x, y, width, height): """Draw the nine-patch at the given image dimensions.""" width = max(width, self.width + 2) height = max(height, self.height + 2) vertices = self.get_vertices(x, y, width, height) glPushAttrib(GL_ENABLE_BIT) glEnable(GL_BLEND) glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) glEnable(self.texture.target) glBindTexture(self.texture.target, self.texture.id) pyglet.graphics.draw_indexed( 16, GL_QUADS, self.indices, ('v2i', vertices), ('t2f', self.tex_coords)) glPopAttrib() def draw_around(self, x, y, width, height): """Draw the nine-patch around the given content area""" self.draw(x - self.padding_left, y - self.padding_bottom, width + self.padding_left + self.padding_right, height + self.padding_bottom + self.padding_top) if __name__ == '__main__': import sys image = pyglet.image.load(sys.argv[1]) ninepatch = NinePatch(image) window = pyglet.window.Window(resizable=True) label = pyglet.text.Label('Hello, NinePatch', font_size=16, anchor_y='bottom', color=(0,0,0,255)) @window.event def on_draw(): glClearColor(.7, .7, .7, 1.) window.clear() width = label.content_width height = label.content_height label.x = window.width / 2 - width / 2 label.y = window.height / 2 - height / 2 ninepatch.draw_around(label.x, label.y, width, height) label.draw() pyglet.app.run()
bsd-3-clause
igemsoftware/SYSU-Software2013
project/Python27/Lib/shelve.py
225
8078
"""Manage shelves of pickled objects. A "shelf" is a persistent, dictionary-like object. The difference with dbm databases is that the values (not the keys!) in a shelf can be essentially arbitrary Python objects -- anything that the "pickle" module can handle. This includes most class instances, recursive data types, and objects containing lots of shared sub-objects. The keys are ordinary strings. To summarize the interface (key is a string, data is an arbitrary object): import shelve d = shelve.open(filename) # open, with (g)dbm filename -- no suffix d[key] = data # store data at key (overwrites old data if # using an existing key) data = d[key] # retrieve a COPY of the data at key (raise # KeyError if no such key) -- NOTE that this # access returns a *copy* of the entry! del d[key] # delete data stored at key (raises KeyError # if no such key) flag = d.has_key(key) # true if the key exists; same as "key in d" list = d.keys() # a list of all existing keys (slow!) d.close() # close it Dependent on the implementation, closing a persistent dictionary may or may not be necessary to flush changes to disk. Normally, d[key] returns a COPY of the entry. This needs care when mutable entries are mutated: for example, if d[key] is a list, d[key].append(anitem) does NOT modify the entry d[key] itself, as stored in the persistent mapping -- it only modifies the copy, which is then immediately discarded, so that the append has NO effect whatsoever. To append an item to d[key] in a way that will affect the persistent mapping, use: data = d[key] data.append(anitem) d[key] = data To avoid the problem with mutable entries, you may pass the keyword argument writeback=True in the call to shelve.open. When you use: d = shelve.open(filename, writeback=True) then d keeps a cache of all entries you access, and writes them all back to the persistent mapping when you call d.close(). This ensures that such usage as d[key].append(anitem) works as intended. However, using keyword argument writeback=True may consume vast amount of memory for the cache, and it may make d.close() very slow, if you access many of d's entries after opening it in this way: d has no way to check which of the entries you access are mutable and/or which ones you actually mutate, so it must cache, and write back at close, all of the entries that you access. You can call d.sync() to write back all the entries in the cache, and empty the cache (d.sync() also synchronizes the persistent dictionary on disk, if feasible). """ # Try using cPickle and cStringIO if available. try: from cPickle import Pickler, Unpickler except ImportError: from pickle import Pickler, Unpickler try: from cStringIO import StringIO except ImportError: from StringIO import StringIO import UserDict __all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"] class _ClosedDict(UserDict.DictMixin): 'Marker for a closed dict. Access attempts raise a ValueError.' def closed(self, *args): raise ValueError('invalid operation on closed shelf') __getitem__ = __setitem__ = __delitem__ = keys = closed def __repr__(self): return '<Closed Dictionary>' class Shelf(UserDict.DictMixin): """Base class for shelf implementations. This is initialized with a dictionary-like object. See the module's __doc__ string for an overview of the interface. """ def __init__(self, dict, protocol=None, writeback=False): self.dict = dict if protocol is None: protocol = 0 self._protocol = protocol self.writeback = writeback self.cache = {} def keys(self): return self.dict.keys() def __len__(self): return len(self.dict) def has_key(self, key): return key in self.dict def __contains__(self, key): return key in self.dict def get(self, key, default=None): if key in self.dict: return self[key] return default def __getitem__(self, key): try: value = self.cache[key] except KeyError: f = StringIO(self.dict[key]) value = Unpickler(f).load() if self.writeback: self.cache[key] = value return value def __setitem__(self, key, value): if self.writeback: self.cache[key] = value f = StringIO() p = Pickler(f, self._protocol) p.dump(value) self.dict[key] = f.getvalue() def __delitem__(self, key): del self.dict[key] try: del self.cache[key] except KeyError: pass def close(self): self.sync() try: self.dict.close() except AttributeError: pass # Catch errors that may happen when close is called from __del__ # because CPython is in interpreter shutdown. try: self.dict = _ClosedDict() except (NameError, TypeError): self.dict = None def __del__(self): if not hasattr(self, 'writeback'): # __init__ didn't succeed, so don't bother closing return self.close() def sync(self): if self.writeback and self.cache: self.writeback = False for key, entry in self.cache.iteritems(): self[key] = entry self.writeback = True self.cache = {} if hasattr(self.dict, 'sync'): self.dict.sync() class BsdDbShelf(Shelf): """Shelf implementation using the "BSD" db interface. This adds methods first(), next(), previous(), last() and set_location() that have no counterpart in [g]dbm databases. The actual database must be opened using one of the "bsddb" modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or bsddb.rnopen) and passed to the constructor. See the module's __doc__ string for an overview of the interface. """ def __init__(self, dict, protocol=None, writeback=False): Shelf.__init__(self, dict, protocol, writeback) def set_location(self, key): (key, value) = self.dict.set_location(key) f = StringIO(value) return (key, Unpickler(f).load()) def next(self): (key, value) = self.dict.next() f = StringIO(value) return (key, Unpickler(f).load()) def previous(self): (key, value) = self.dict.previous() f = StringIO(value) return (key, Unpickler(f).load()) def first(self): (key, value) = self.dict.first() f = StringIO(value) return (key, Unpickler(f).load()) def last(self): (key, value) = self.dict.last() f = StringIO(value) return (key, Unpickler(f).load()) class DbfilenameShelf(Shelf): """Shelf implementation using the "anydbm" generic dbm interface. This is initialized with the filename for the dbm database. See the module's __doc__ string for an overview of the interface. """ def __init__(self, filename, flag='c', protocol=None, writeback=False): import anydbm Shelf.__init__(self, anydbm.open(filename, flag), protocol, writeback) def open(filename, flag='c', protocol=None, writeback=False): """Open a persistent dictionary for reading and writing. The filename parameter is the base filename for the underlying database. As a side-effect, an extension may be added to the filename and more than one file may be created. The optional flag parameter has the same interpretation as the flag parameter of anydbm.open(). The optional protocol parameter specifies the version of the pickle protocol (0, 1, or 2). See the module's __doc__ string for an overview of the interface. """ return DbfilenameShelf(filename, flag, protocol, writeback)
mit
janebeckman/gpdb
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/walreceiver/__init__.py
12
2660
""" Copyright (c) 2004-Present Pivotal Software, Inc. This program and the accompanying materials are made available under the terms of the under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from mpp.models import MPPTestCase from mpp.lib.config import GPDBConfig from mpp.gpdb.tests.storage.walrepl import lib as walrepl from mpp.gpdb.tests.storage.walrepl.lib.pqwrap import * from mpp.gpdb.tests.storage.walrepl.lib.pg_util import GpUtility import subprocess import os gputil = GpUtility() gputil.check_and_start_gpdb() config = GPDBConfig() class GPLibPQTestCase(MPPTestCase): """This test case is to use gplibpq library, which is SQL function interface for walreceiver's libpq part. List of functions Schema | Name | Result data type | Argument data types --------+-----------------+------------------+---------------------- public | test_connect | boolean | text public | test_disconnect | boolean | public | test_receive | boolean | public | test_scenario1 | boolean | text public | test_send | boolean | """ @classmethod def setUpClass(cls): """ Install gplibpq module. """ if not config.is_multinode(): # Build it before testing. thisdir = os.path.dirname(__file__) builddir = os.path.join(thisdir, 'gplibpq') subprocess.check_call(['make', '-C', builddir, 'install']) # execute CREATE FUNCTION statements install_sql = os.path.join(builddir, 'install.sql') subprocess.check_call(['psql', '-f', install_sql]) else: pass def tearDown(self): # each test should wait for the wal sender to disappear for i in walrepl.polling(30, 0.5): with PGconn("") as conn: cnt = self.count_walsender(conn) if cnt <= 0: break def count_walsender(self, conn): res = conn.execute("""SELECT count(*) FROM pg_stat_replication""") if res.status() != PGRES_TUPLES_OK: return -1 return res.getpyvalue(0, 0)
apache-2.0
mjasher/gac
GAC/flopy/modflow/mfdrn.py
1
7133
""" mfdrn module. Contains the ModflowDrn class. Note that the user can access the ModflowDrn class as `flopy.modflow.ModflowDrn`. Additional information for this MODFLOW package can be found at the `Online MODFLOW Guide <http://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/index.html?drn.htm>`_. """ import sys import numpy as np from flopy.mbase import Package from flopy.utils.util_list import mflist class ModflowDrn(Package): """ MODFLOW Drain Package Class. Parameters ---------- model : model object The model object (of type :class:`flopy.modflow.mf.Modflow`) to which this package will be added. ipakcb : int is a flag and a unit number. (default is 0). stress_period_data : list of boundaries or recarray of boundaries or dictionary of boundaries Each drain cell is defined through definition of layer(int), row(int), column(int), elevation(float), conductance(float) The simplest form is a dictionary with a lists of boundaries for each stress period, where each list of boundaries itself is a list of boundaries. Indices of the dictionary are the numbers of the stress period. This gives the form of stress_period_data = {0: [ [lay, row, col, stage, cond], [lay, row, col, stage, cond], [lay, row, col, stage, cond], ], 1: [ [lay, row, col, stage, cond], [lay, row, col, stage, cond], [lay, row, col, stage, cond], ], ... kper: [ [lay, row, col, stage, cond], [lay, row, col, stage, cond], [lay, row, col, stage, cond], ] } Note that if no values are specified for a certain stress period, then the list of boundaries for the previous stress period for which values were defined is used. Full details of all options to specify stress_period_data can be found in the flopy3boundaries Notebook in the basic subdirectory of the examples directory dtype : dtype definition if data type is different from default options : list of strings Package options. (default is None). extension : string Filename extension (default is 'drn') unitnumber : int File unit number (default is 21). Attributes ---------- Methods ------- See Also -------- Notes ----- Parameters are not supported in FloPy. Examples -------- >>> import flopy >>> ml = flopy.modflow.Modflow() >>> lrcec = {0:[2, 3, 4, 10., 100.]} #this drain will be applied to all >>> #stress periods >>> drn = flopy.modflow.ModflowDrn(ml, stress_period_data=lrcec) """ def __init__(self, model, ipakcb=0, stress_period_data=None, dtype=None, extension='drn', unitnumber=21, options=None, **kwargs): """ Package constructor """ Package.__init__(self, model, extension, 'DRN', unitnumber) # Call ancestor's init to set self.parent, extension, name and unit number self.heading = '# DRN for MODFLOW, generated by Flopy.' self.url = 'drn.htm' self.ipakcb = ipakcb # 0: no cell by cell terms are written self.np = 0 if options is None: options = [] self.options = options if dtype is not None: self.dtype = dtype else: self.dtype = self.get_default_dtype(structured=self.parent.structured) self.stress_period_data = mflist(self, stress_period_data) self.parent.add_package(self) def __repr__(self): return 'Drain class' @staticmethod def get_default_dtype(structured=True): if structured: dtype = np.dtype([("k", np.int), ("i", np.int), ("j", np.int), ("elev", np.float32), ("cond", np.float32)]) else: dtype = np.dtype([("node", np.int), ("elev", np.float32), ("cond", np.float32)]) return dtype def ncells(self): # Returns the maximum number of cells that have drains (developed for MT3DMS SSM package) # print 'Function must be implemented properly for drn package' return self.stress_period_data.mxact def write_file(self): """ Write the file. """ f_drn = open(self.fn_path, 'w') f_drn.write('{0}\n'.format(self.heading)) # f_drn.write('%10i%10i\n' % (self.mxactd, self.idrncb)) line = '{0:10d}{1:10d}'.format(self.stress_period_data.mxact, self.ipakcb) for opt in self.options: line += ' ' + str(opt) line += '\n' f_drn.write(line) self.stress_period_data.write_transient(f_drn) f_drn.close() def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: raise Exception("mfdrn error adding record to list: " + str(e)) @staticmethod def get_empty(ncells=0, aux_names=None, structured=True): # get an empty recaray that correponds to dtype dtype = ModflowDrn.get_default_dtype(structured=structured) if aux_names is not None: dtype = Package.add_to_dtype(dtype, aux_names, np.float32) d = np.zeros((ncells, len(dtype)), dtype=dtype) d[:, :] = -1.0E+10 return np.core.records.fromarrays(d.transpose(), dtype=dtype) @staticmethod def load(f, model, nper=None, ext_unit_dict=None): """ Load an existing package. Parameters ---------- f : filename or file handle File to load. model : model object The model object (of type :class:`flopy.modflow.mf.Modflow`) to which this package will be added. ext_unit_dict : dictionary, optional If the arrays in the file are specified using EXTERNAL, or older style array control records, then `f` should be a file handle. In this case ext_unit_dict is required, which can be constructed using the function :class:`flopy.utils.mfreadnam.parsenamefile`. Returns ------- drn : ModflowDrn object ModflowDrn object. Examples -------- >>> import flopy >>> m = flopy.modflow.Modflow() >>> drn = flopy.modflow.ModflowDrn.load('test.drn', m) """ if model.verbose: sys.stdout.write('loading drn package file...\n') return Package.load(model, ModflowDrn, f, nper)
gpl-2.0
ceibal-tatu/software-center
softwarecenter/ui/gtk3/aptd_gtk3.py
3
2642
# Copyright (C) 2011 Canonical # # Authors: # Michael Vogt # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; version 3. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA from gi.repository import Gtk from aptdaemon.gtk3widgets import (AptMediumRequiredDialog, AptConfigFileConflictDialog) from softwarecenter.backend.installbackend import InstallBackendUI class InstallBackendUI(InstallBackendUI): def ask_config_file_conflict(self, old, new): dia = AptConfigFileConflictDialog(old, new) res = dia.run() dia.hide() dia.destroy() # send result to the daemon if res == Gtk.ResponseType.YES: return "replace" else: return "keep" def ask_medium_required(self, medium, drive): dialog = AptMediumRequiredDialog(medium, drive) res = dialog.run() dialog.hide() if res == Gtk.ResponseType.YES: return True else: return False def error(self, parent, primary, secondary, details=None, alternative_action=None): from dialogs import error res = "ok" res = error(parent=parent, primary=primary, secondary=secondary, details=details, alternative_action=alternative_action) if res == Gtk.ResponseType.YES: res = "yes" return res if __name__ == "__main__": from softwarecenter.backend.installbackend import get_install_backend from mock import Mock aptd = get_install_backend() aptd.ui = InstallBackendUI() # test config file prompt trans = Mock() res = aptd._config_file_conflict(trans, "/etc/group", "/etc/group-") print (res) # test medium required trans = Mock() res = aptd._medium_required(trans, "medium", "drive") print (res) # test error dialog trans = Mock() trans.error_code = 102 trans.error_details = "details" enum = 101 res = aptd._show_transaction_failed_dialog(trans, enum) print (res)
lgpl-3.0
xhava/hippyvm
hippy/rpath.py
2
3264
# This contains an RPython version of functions from the UNIX file # posixpath.py. It would give nonsense on other platforms like Windows. import os from os.path import isabs, islink, abspath, normpath from rpython.rlib.rpath import rabspath from rpython.rlib.objectmodel import enforceargs @enforceargs(str) def dirname(p): """Returns the directory component of a pathname""" i = p.rfind('/') + 1 assert i >= 0 head = p[:i] if head and head != '/' * len(head): head = head.rstrip('/') return head @enforceargs(str) def basename(p): """Returns the final component of a pathname""" i = p.rfind('/') + 1 assert i >= 0 return p[i:] @enforceargs(str) def split(p): """Split a pathname. Returns tuple "(head, tail)" where "tail" is everything after the final slash. Either part may be empty.""" i = p.rfind('/') + 1 assert i >= 0 head, tail = p[:i], p[i:] if head and head != '/' * len(head): head = head.rstrip('/') return head, tail @enforceargs(str) def exists(path): """Test whether a path exists. Returns False for broken symbolic links""" try: assert path is not None os.stat(path) except os.error: return False return True @enforceargs(str, [str]) def join(a, p): """Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded. An empty last part will result in a path that ends with a separator.""" path = a for b in p: if b.startswith('/'): path = b elif path == '' or path.endswith('/'): path += b else: path += '/' + b return path @enforceargs(str) def realpath(filename): """Return the canonical path of the specified filename, eliminating any symbolic links encountered in the path.""" if isabs(filename): bits = ['/'] + filename.split('/')[1:] else: bits = [''] + filename.split('/') for i in range(2, len(bits)+1): component = join(bits[0], bits[1:i]) # Resolve symbolic links. if islink(component): resolved = _resolve_link(component) if resolved is None: # Infinite loop -- return original component + rest of the path return abspath(join(component, bits[i:])) else: newpath = join(resolved, bits[i:]) return realpath(newpath) return abspath(filename) @enforceargs(str) def _resolve_link(path): """Internal helper function. Takes a path and follows symlinks until we either arrive at something that isn't a symlink, or encounter a path we've seen before (meaning that there's a loop). """ paths_seen = {} while islink(path): if path in paths_seen: # Already seen this path, so we must have a symlink loop return None paths_seen[path] = None # Resolve where the link points to resolved = os.readlink(path) if not isabs(resolved): dir = dirname(path) path = normpath(join(dir, [resolved])) else: path = normpath(resolved) return path
mit
emidln/django_roa
env/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/errcheck.py
623
3522
""" Error checking functions for GEOS ctypes prototype functions. """ import os from ctypes import c_void_p, string_at, CDLL from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import GEOS_VERSION from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc # Getting the `free` routine used to free the memory allocated for # string pointers returned by GEOS. if GEOS_VERSION >= (3, 1, 1): # In versions 3.1.1 and above, `GEOSFree` was added to the C API # because `free` isn't always available on all platforms. free = GEOSFunc('GEOSFree') free.argtypes = [c_void_p] free.restype = None else: # Getting the `free` routine from the C library of the platform. if os.name == 'nt': # On NT, use the MS C library. libc = CDLL('msvcrt') else: # On POSIX platforms C library is obtained by passing None into `CDLL`. libc = CDLL(None) free = libc.free ### ctypes error checking routines ### def last_arg_byref(args): "Returns the last C argument's value by reference." return args[-1]._obj.value def check_dbl(result, func, cargs): "Checks the status code and returns the double value passed in by reference." # Checking the status code if result != 1: return None # Double passed in by reference, return its value. return last_arg_byref(cargs) def check_geom(result, func, cargs): "Error checking on routines that return Geometries." if not result: raise GEOSException('Error encountered checking Geometry returned from GEOS C function "%s".' % func.__name__) return result def check_minus_one(result, func, cargs): "Error checking on routines that should not return -1." if result == -1: raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__) else: return result def check_predicate(result, func, cargs): "Error checking for unary/binary predicate functions." val = ord(result) # getting the ordinal from the character if val == 1: return True elif val == 0: return False else: raise GEOSException('Error encountered on GEOS C predicate function "%s".' % func.__name__) def check_sized_string(result, func, cargs): """ Error checking for routines that return explicitly sized strings. This frees the memory allocated by GEOS at the result pointer. """ if not result: raise GEOSException('Invalid string pointer returned by GEOS C function "%s"' % func.__name__) # A c_size_t object is passed in by reference for the second # argument on these routines, and its needed to determine the # correct size. s = string_at(result, last_arg_byref(cargs)) # Freeing the memory allocated within GEOS free(result) return s def check_string(result, func, cargs): """ Error checking for routines that return strings. This frees the memory allocated by GEOS at the result pointer. """ if not result: raise GEOSException('Error encountered checking string return value in GEOS C function "%s".' % func.__name__) # Getting the string value at the pointer address. s = string_at(result) # Freeing the memory allocated within GEOS free(result) return s def check_zero(result, func, cargs): "Error checking on routines that should not return 0." if result == 0: raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__) else: return result
bsd-3-clause
ukanga/SickRage
lib/unidecode/x098.py
252
4645
data = ( 'Hu ', # 0x00 'Ye ', # 0x01 'Ding ', # 0x02 'Qing ', # 0x03 'Pan ', # 0x04 'Xiang ', # 0x05 'Shun ', # 0x06 'Han ', # 0x07 'Xu ', # 0x08 'Yi ', # 0x09 'Xu ', # 0x0a 'Gu ', # 0x0b 'Song ', # 0x0c 'Kui ', # 0x0d 'Qi ', # 0x0e 'Hang ', # 0x0f 'Yu ', # 0x10 'Wan ', # 0x11 'Ban ', # 0x12 'Dun ', # 0x13 'Di ', # 0x14 'Dan ', # 0x15 'Pan ', # 0x16 'Po ', # 0x17 'Ling ', # 0x18 'Ce ', # 0x19 'Jing ', # 0x1a 'Lei ', # 0x1b 'He ', # 0x1c 'Qiao ', # 0x1d 'E ', # 0x1e 'E ', # 0x1f 'Wei ', # 0x20 'Jie ', # 0x21 'Gua ', # 0x22 'Shen ', # 0x23 'Yi ', # 0x24 'Shen ', # 0x25 'Hai ', # 0x26 'Dui ', # 0x27 'Pian ', # 0x28 'Ping ', # 0x29 'Lei ', # 0x2a 'Fu ', # 0x2b 'Jia ', # 0x2c 'Tou ', # 0x2d 'Hui ', # 0x2e 'Kui ', # 0x2f 'Jia ', # 0x30 'Le ', # 0x31 'Tian ', # 0x32 'Cheng ', # 0x33 'Ying ', # 0x34 'Jun ', # 0x35 'Hu ', # 0x36 'Han ', # 0x37 'Jing ', # 0x38 'Tui ', # 0x39 'Tui ', # 0x3a 'Pin ', # 0x3b 'Lai ', # 0x3c 'Tui ', # 0x3d 'Zi ', # 0x3e 'Zi ', # 0x3f 'Chui ', # 0x40 'Ding ', # 0x41 'Lai ', # 0x42 'Yan ', # 0x43 'Han ', # 0x44 'Jian ', # 0x45 'Ke ', # 0x46 'Cui ', # 0x47 'Jiong ', # 0x48 'Qin ', # 0x49 'Yi ', # 0x4a 'Sai ', # 0x4b 'Ti ', # 0x4c 'E ', # 0x4d 'E ', # 0x4e 'Yan ', # 0x4f 'Hun ', # 0x50 'Kan ', # 0x51 'Yong ', # 0x52 'Zhuan ', # 0x53 'Yan ', # 0x54 'Xian ', # 0x55 'Xin ', # 0x56 'Yi ', # 0x57 'Yuan ', # 0x58 'Sang ', # 0x59 'Dian ', # 0x5a 'Dian ', # 0x5b 'Jiang ', # 0x5c 'Ku ', # 0x5d 'Lei ', # 0x5e 'Liao ', # 0x5f 'Piao ', # 0x60 'Yi ', # 0x61 'Man ', # 0x62 'Qi ', # 0x63 'Rao ', # 0x64 'Hao ', # 0x65 'Qiao ', # 0x66 'Gu ', # 0x67 'Xun ', # 0x68 'Qian ', # 0x69 'Hui ', # 0x6a 'Zhan ', # 0x6b 'Ru ', # 0x6c 'Hong ', # 0x6d 'Bin ', # 0x6e 'Xian ', # 0x6f 'Pin ', # 0x70 'Lu ', # 0x71 'Lan ', # 0x72 'Nie ', # 0x73 'Quan ', # 0x74 'Ye ', # 0x75 'Ding ', # 0x76 'Qing ', # 0x77 'Han ', # 0x78 'Xiang ', # 0x79 'Shun ', # 0x7a 'Xu ', # 0x7b 'Xu ', # 0x7c 'Wan ', # 0x7d 'Gu ', # 0x7e 'Dun ', # 0x7f 'Qi ', # 0x80 'Ban ', # 0x81 'Song ', # 0x82 'Hang ', # 0x83 'Yu ', # 0x84 'Lu ', # 0x85 'Ling ', # 0x86 'Po ', # 0x87 'Jing ', # 0x88 'Jie ', # 0x89 'Jia ', # 0x8a 'Tian ', # 0x8b 'Han ', # 0x8c 'Ying ', # 0x8d 'Jiong ', # 0x8e 'Hai ', # 0x8f 'Yi ', # 0x90 'Pin ', # 0x91 'Hui ', # 0x92 'Tui ', # 0x93 'Han ', # 0x94 'Ying ', # 0x95 'Ying ', # 0x96 'Ke ', # 0x97 'Ti ', # 0x98 'Yong ', # 0x99 'E ', # 0x9a 'Zhuan ', # 0x9b 'Yan ', # 0x9c 'E ', # 0x9d 'Nie ', # 0x9e 'Man ', # 0x9f 'Dian ', # 0xa0 'Sang ', # 0xa1 'Hao ', # 0xa2 'Lei ', # 0xa3 'Zhan ', # 0xa4 'Ru ', # 0xa5 'Pin ', # 0xa6 'Quan ', # 0xa7 'Feng ', # 0xa8 'Biao ', # 0xa9 'Oroshi ', # 0xaa 'Fu ', # 0xab 'Xia ', # 0xac 'Zhan ', # 0xad 'Biao ', # 0xae 'Sa ', # 0xaf 'Ba ', # 0xb0 'Tai ', # 0xb1 'Lie ', # 0xb2 'Gua ', # 0xb3 'Xuan ', # 0xb4 'Shao ', # 0xb5 'Ju ', # 0xb6 'Bi ', # 0xb7 'Si ', # 0xb8 'Wei ', # 0xb9 'Yang ', # 0xba 'Yao ', # 0xbb 'Sou ', # 0xbc 'Kai ', # 0xbd 'Sao ', # 0xbe 'Fan ', # 0xbf 'Liu ', # 0xc0 'Xi ', # 0xc1 'Liao ', # 0xc2 'Piao ', # 0xc3 'Piao ', # 0xc4 'Liu ', # 0xc5 'Biao ', # 0xc6 'Biao ', # 0xc7 'Biao ', # 0xc8 'Liao ', # 0xc9 '[?] ', # 0xca 'Se ', # 0xcb 'Feng ', # 0xcc 'Biao ', # 0xcd 'Feng ', # 0xce 'Yang ', # 0xcf 'Zhan ', # 0xd0 'Biao ', # 0xd1 'Sa ', # 0xd2 'Ju ', # 0xd3 'Si ', # 0xd4 'Sou ', # 0xd5 'Yao ', # 0xd6 'Liu ', # 0xd7 'Piao ', # 0xd8 'Biao ', # 0xd9 'Biao ', # 0xda 'Fei ', # 0xdb 'Fan ', # 0xdc 'Fei ', # 0xdd 'Fei ', # 0xde 'Shi ', # 0xdf 'Shi ', # 0xe0 'Can ', # 0xe1 'Ji ', # 0xe2 'Ding ', # 0xe3 'Si ', # 0xe4 'Tuo ', # 0xe5 'Zhan ', # 0xe6 'Sun ', # 0xe7 'Xiang ', # 0xe8 'Tun ', # 0xe9 'Ren ', # 0xea 'Yu ', # 0xeb 'Juan ', # 0xec 'Chi ', # 0xed 'Yin ', # 0xee 'Fan ', # 0xef 'Fan ', # 0xf0 'Sun ', # 0xf1 'Yin ', # 0xf2 'Zhu ', # 0xf3 'Yi ', # 0xf4 'Zhai ', # 0xf5 'Bi ', # 0xf6 'Jie ', # 0xf7 'Tao ', # 0xf8 'Liu ', # 0xf9 'Ci ', # 0xfa 'Tie ', # 0xfb 'Si ', # 0xfc 'Bao ', # 0xfd 'Shi ', # 0xfe 'Duo ', # 0xff )
gpl-3.0
jzoldak/edx-platform
lms/djangoapps/grades/tests/utils.py
9
2464
""" Utilities for grades related tests """ from contextlib import contextmanager from mock import patch from courseware.module_render import get_module from courseware.model_data import FieldDataCache from xmodule.graders import ProblemScore @contextmanager def mock_passing_grade(grade_pass='Pass', percent=0.75, ): """ Mock the grading function to always return a passing grade. """ with patch('lms.djangoapps.grades.new.course_grade.CourseGrade._compute_letter_grade') as mock_letter_grade: with patch('lms.djangoapps.grades.new.course_grade.CourseGrade._calc_percent') as mock_percent_grade: mock_letter_grade.return_value = grade_pass mock_percent_grade.return_value = percent yield @contextmanager def mock_get_score(earned=0, possible=1): """ Mocks the get_score function to return a valid grade. """ with patch('lms.djangoapps.grades.new.subsection_grade.get_score') as mock_score: mock_score.return_value = ProblemScore( raw_earned=earned, raw_possible=possible, weighted_earned=earned, weighted_possible=possible, weight=1, graded=True, attempted=True, ) yield mock_score @contextmanager def mock_get_submissions_score(earned=0, possible=1, attempted=True): """ Mocks the _get_submissions_score function to return the specified values """ with patch('lms.djangoapps.grades.scores._get_score_from_submissions') as mock_score: mock_score.return_value = (earned, possible, earned, possible, attempted) yield mock_score def answer_problem(course, request, problem, score=1, max_value=1): """ Records a correct answer for the given problem. Arguments: course (Course): Course object, the course the required problem is in request (Request): request Object problem (xblock): xblock object, the problem to be answered """ user = request.user grade_dict = {'value': score, 'max_value': max_value, 'user_id': user.id} field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course.id, user, course, depth=2 ) # pylint: disable=protected-access module = get_module( user, request, problem.scope_ids.usage_id, field_data_cache, )._xmodule module.system.publish(problem, 'grade', grade_dict)
agpl-3.0
mosen/salt-osx
_modules/deprecated/mac_shadow.py
1
10388
# -*- coding: utf-8 -*- ''' Manage Mac OSX local directory passwords and policies. Note that it is usually better to apply password policies through the creation of a configuration profile. Tech Notes: Usually when a password is changed by the system, there's a responsibility to check the hash list and generate hashes for each. Many osx password changing scripts/modules only deal with the SHA-512 PBKDF2 hash when working with the local node. ''' # Authentication concepts reference: # https://developer.apple.com/library/mac/documentation/Networking/Conceptual/Open_Directory/openDirectoryConcepts/openDirectoryConcepts.html#//apple_ref/doc/uid/TP40000917-CH3-CIFCAIBB from __future__ import absolute_import import logging log = logging.getLogger(__name__) # Start logging import os import base64 import salt.utils import string import binascii import salt.exceptions try: from passlib.utils import pbkdf2, ab64_encode, ab64_decode HAS_PASSLIB = True except ImportError: HAS_PASSLIB = False def __virtual__(): if HAS_PASSLIB and salt.utils.platform.is_darwin(): return True else: return False def _pl_salted_sha512_pbkdf2_from_string(strvalue, salt_bin=None, iterations=1000): ''' Create a PBKDF2-SHA512 hash with a 128 byte key length. The standard passlib.hash.pbkdf2_sha512 functions assume a 64 byte key length which does not match OSX's implementation. :param strvalue: The string to derive the hash from :param salt: The (randomly generated) salt :param iterations: The number of iterations, for Mac OS X it's normally between 23000-25000? need to confirm. :return: (binary digest, binary salt, number of iterations used) ''' if salt_bin is None: salt_bin = os.urandom(32) key_length = 128 hmac_sha512, dsize = pbkdf2.get_prf("hmac-sha512") digest_bin = pbkdf2.pbkdf2(strvalue, salt_bin, iterations, key_length, hmac_sha512) return digest_bin, salt_bin, iterations def _extract_authdata(item): ''' Extract version, authority tag, and authority data from a single array item of AuthenticationAuthority item The NSString instance representing the authority string returns version (default 1.0.0), tag, data as a tuple ''' parts = string.split(item, ';', 2) if not parts[0]: parts[0] = '1.0.0' return { 'version': parts[0], 'tag': parts[1], 'data': parts[2] } def authorities(name): ''' Read the list of authentication authorities for the given user. name Short username of the local user. ''' authorities_plist = __salt__['cmd.run']('/usr/bin/dscl -plist . read /Users/{0} AuthenticationAuthority'.format(name)) plist = __salt__['plist.parse_string'](authorities_plist) authorities_list = [_extract_authdata(item) for item in plist.objectForKey_('dsAttrTypeStandard:AuthenticationAuthority')] return authorities_list def user_shadowhash(name): ''' Read the existing hash for the named user. Returns a dict with the ShadowHash content for the named user in the form: { 'HASH_TYPE': { 'entropy': <base64 hash>, 'salt': <base64 salt>, 'iterations': <n iterations> }} Hash types are hard coded to SALTED-SHA-PBKDF2, CRAM-MD5, NT, RECOVERABLE. In future releases the AuthenticationAuthority property should be checked for the hash list name The username associated with the local directory user. ''' # We have to strip the output string, convert hex back to binary data, read that plist and get our specific # key/value property to find the hash. I.E there's a lot of unwrapping to do. log.debug('Reading ShadowHashData') data = __salt__['dscl.read']('.', '/Users/{0}'.format(name), 'ShadowHashData') log.debug('Got ShadowHashData') log.debug(data) if data is None: log.debug('No such record/attribute found, returning None') return None if 'dsAttrTypeNative:ShadowHashData' not in data: raise salt.exceptions.SaltInvocationError( 'Expected to find ShadowHashData in user record: {0}'.format(name) ) plist_hex = string.replace(data['dsAttrTypeNative:ShadowHashData'], ' ', '') plist_bin = binascii.unhexlify(plist_hex) # plistlib is not used, because mavericks ships without binary plist support from plistlib. plist = __salt__['plist.parse_string'](plist_bin) log.debug(plist) pbkdf = plist.objectForKey_('SALTED-SHA512-PBKDF2') cram_md5 = plist.objectForKey_('CRAM-MD5') nt = plist.objectForKey_('NT') recoverable = plist.objectForKey_('RECOVERABLE') hashes = {} if pbkdf is not None: hashes['SALTED-SHA512-PBKDF2'] = { 'entropy': pbkdf.objectForKey_('entropy').base64EncodedStringWithOptions_(0), 'salt': pbkdf.objectForKey_('salt').base64EncodedStringWithOptions_(0), 'iterations': pbkdf.objectForKey_('iterations') } if cram_md5 is not None: hashes['CRAM-MD5'] = cram_md5.base64EncodedStringWithOptions_(0) if nt is not None: hashes['NT'] = nt.base64EncodedStringWithOptions_(0) if recoverable is not None: hashes['RECOVERABLE'] = recoverable.base64EncodedStringWithOptions_(0) return hashes def info(name): ''' Return information for the specified user CLI Example: .. code-block:: bash salt '*' mac_shadow.info admin ''' # dscl -plist . -read /Users/<User> ShadowHashData # Read out name from dscl # Read out passwd hash from decrypted ShadowHashData in dslocal # Read out lstchg/min/max/warn/inact/expire from PasswordPolicy pass def gen_password(password, salt=None, iterations=None): ''' Generate hashed (PBKDF2-SHA512) password Returns a dict containing values for 'entropy', 'salt' and 'iterations'. password Plaintext password to be hashed. salt Cryptographic salt (base64 encoded). If not given, a random 32-character salt will be generated. (32 bytes is the standard salt length for OSX) iterations Number of iterations for the key derivation function, default is 1000 CLI Example: .. code-block:: bash salt '*' mac_shadow.gen_password 'I_am_password' salt '*' mac_shadow.gen_password 'I_am_password' 'Ausrbk5COuB9V4ata6muoj+HPjA92pefPfbW9QPnv9M=' 23000 ''' if iterations is None: iterations = 1000 if salt is None: salt_bin = os.urandom(32) else: salt_bin = base64.b64decode(salt, '+/') entropy, used_salt, used_iterations = _pl_salted_sha512_pbkdf2_from_string(password, salt_bin, iterations) result = { 'entropy': base64.b64encode(entropy, '+/'), 'salt': base64.b64encode(used_salt, '+/'), 'iterations': used_iterations } return {'SALTED-SHA512-PBKDF2': result} def set_password_hash(name, hashtype, hash, salt=None, iterations=None): ''' Set the given hash as the shadow hash data for the named user. name The name of the local user, which is assumed to be in the local directory service. hashtype A valid hash type, one of: PBKDF2, CRAM-MD5, NT, RECOVERABLE hash The computed hash salt (optional) The salt to use, if applicable. iterations The number of iterations to use, if applicable. ''' # current_hashes = user_shadowhash(name) # current_pbkdf2 = current_hashes['SALTED-SHA512-PBKDF2'] # # log.debug('Current ShadowHashdata follows') # log.debug(current_hashes) shd = {'SALTED-SHA512-PBKDF2': {'entropy': hash, 'salt': salt, 'iterations': iterations}} log.debug('Encoding following dict as bplist') log.debug(shd) # if shd['SALTED-SHA512-PBKDF2']['entropy'] == current_pbkdf2['entropy']: # log.debug('Entropy IS EQUAL!') shd_bplist = __salt__['plist.gen_string'](shd, 'binary') shd_bplist_b64 = base64.b64encode(shd_bplist, '+/') log.debug('Flushing directory services cache') __salt__['dscl.flushcache']() log.debug('Writing directly to dslocal') __salt__['plist.append_key']('/var/db/dslocal/nodes/Default/users/{0}.plist'.format(name), 'ShadowHashData', 'data', shd_bplist_b64) log.debug('Flushing directory services cache') __salt__['dscl.flushcache']() return True def set_password(name, password, salt=None, iterations=None): ''' Set the password for a named user (insecure). Use mac_shadow.set_password_hash to supply pre-computed hash values. For the moment this sets only the PBKDF2-SHA512 salted hash. To be a good citizen we should set every hash in the authority list. name The name of the local user, which is assumed to be in the local directory service. password The plaintext password to set (warning: insecure, used for testing) salt The salt to use, defaults to automatically generated. iterations The number of iterations to use, defaults to an automatically generated random number. CLI Example: .. code-block:: bash salt '*' mac_shadow.set_password macuser macpassword ''' #current_hashes = user_shadowhash(name) #current_pbkdf2 = current_hashes['SALTED-SHA512-PBKDF2'] # hash = gen_password(password, current_pbkdf2['salt'], current_pbkdf2['iterations']) hash = gen_password(password, salt, iterations) # # log.debug('Current ShadowHashData follows') # if current_hashes: # log.debug(current_hashes) # # if hash['SALTED-SHA512-PBKDF2']['entropy'] == current_pbkdf2['entropy']: # return False # No change required # else: # log.debug('No Shadow Hash Data exists for User: {0}'.format(name)) set_password_hash( name, 'PBKDF2', hash['SALTED-SHA512-PBKDF2']['entropy'], hash['SALTED-SHA512-PBKDF2']['salt'], hash['SALTED-SHA512-PBKDF2']['iterations'] ) return True def del_password(name): ''' Delete the password from name user CLI Example: .. code-block:: bash salt '*' shadow.del_password username ''' pass # Re-order authentication authority and remove ShadowHashData
mit
anryko/ansible
lib/ansible/modules/cloud/cloudstack/cs_region.py
13
4901
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2016, René Moser <mail@renemoser.net> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cs_region short_description: Manages regions on Apache CloudStack based clouds. description: - Add, update and remove regions. version_added: '2.3' author: René Moser (@resmo) options: id: description: - ID of the region. - Must be an number (int). type: int required: true name: description: - Name of the region. - Required if I(state=present) type: str endpoint: description: - Endpoint URL of the region. - Required if I(state=present) type: str state: description: - State of the region. type: str default: present choices: [ present, absent ] extends_documentation_fragment: cloudstack ''' EXAMPLES = ''' - name: create a region cs_region: id: 2 name: geneva endpoint: https://cloud.gva.example.com delegate_to: localhost - name: remove a region with ID 2 cs_region: id: 2 state: absent delegate_to: localhost ''' RETURN = ''' --- id: description: ID of the region. returned: success type: int sample: 1 name: description: Name of the region. returned: success type: str sample: local endpoint: description: Endpoint of the region. returned: success type: str sample: http://cloud.example.com gslb_service_enabled: description: Whether the GSLB service is enabled or not. returned: success type: bool sample: true portable_ip_service_enabled: description: Whether the portable IP service is enabled or not. returned: success type: bool sample: true ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.cloudstack import ( AnsibleCloudStack, cs_argument_spec, cs_required_together ) class AnsibleCloudStackRegion(AnsibleCloudStack): def __init__(self, module): super(AnsibleCloudStackRegion, self).__init__(module) self.returns = { 'endpoint': 'endpoint', 'gslbserviceenabled': 'gslb_service_enabled', 'portableipserviceenabled': 'portable_ip_service_enabled', } def get_region(self): id = self.module.params.get('id') regions = self.query_api('listRegions', id=id) if regions: return regions['region'][0] return None def present_region(self): region = self.get_region() if not region: region = self._create_region(region=region) else: region = self._update_region(region=region) return region def _create_region(self, region): self.result['changed'] = True args = { 'id': self.module.params.get('id'), 'name': self.module.params.get('name'), 'endpoint': self.module.params.get('endpoint') } if not self.module.check_mode: res = self.query_api('addRegion', **args) region = res['region'] return region def _update_region(self, region): args = { 'id': self.module.params.get('id'), 'name': self.module.params.get('name'), 'endpoint': self.module.params.get('endpoint') } if self.has_changed(args, region): self.result['changed'] = True if not self.module.check_mode: res = self.query_api('updateRegion', **args) region = res['region'] return region def absent_region(self): region = self.get_region() if region: self.result['changed'] = True if not self.module.check_mode: self.query_api('removeRegion', id=region['id']) return region def main(): argument_spec = cs_argument_spec() argument_spec.update(dict( id=dict(required=True, type='int'), name=dict(), endpoint=dict(), state=dict(choices=['present', 'absent'], default='present'), )) module = AnsibleModule( argument_spec=argument_spec, required_together=cs_required_together(), required_if=[ ('state', 'present', ['name', 'endpoint']), ], supports_check_mode=True ) acs_region = AnsibleCloudStackRegion(module) state = module.params.get('state') if state == 'absent': region = acs_region.absent_region() else: region = acs_region.present_region() result = acs_region.get_result(region) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
axelspringer/ansible-modules-core
system/mount.py
3
12181
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2012, Red Hat, inc # Written by Seth Vidal # based on the mount modules from salt and puppet # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: mount short_description: Control active and configured mount points description: - This module controls active and configured mount points in C(/etc/fstab). version_added: "0.6" options: name: description: - "path to the mount point, eg: C(/mnt/files)" required: true src: description: - device to be mounted on I(name). Required when C(state=present) or C(state=mounted) required: false default: null fstype: description: - file-system type. Required when C(state=present) or C(state=mounted) required: false default: null opts: description: - mount options (see fstab(5)) required: false default: null dump: description: - "dump (see fstab(5)), Note that if nulled, C(state=present) will cease to work and duplicate entries will be made with subsequent runs." required: false default: 0 passno: description: - "passno (see fstab(5)), Note that if nulled, C(state=present) will cease to work and duplicate entries will be made with subsequent runs." required: false default: 0 state: description: - If C(mounted) or C(unmounted), the device will be actively mounted or unmounted as needed and appropriately configured in I(fstab). - C(absent) and C(present) only deal with I(fstab) but will not affect current mounting. - If specifying C(mounted) and the mount point is not present, the mount point will be created. Similarly. - Specifying C(absent) will remove the mount point directory. required: true choices: [ "present", "absent", "mounted", "unmounted" ] fstab: description: - file to use instead of C(/etc/fstab). You shouldn't use that option unless you really know what you are doing. This might be useful if you need to configure mountpoints in a chroot environment. required: false default: /etc/fstab author: - Ansible Core Team - Seth Vidal ''' EXAMPLES = ''' # Mount DVD read-only - mount: name=/mnt/dvd src=/dev/sr0 fstype=iso9660 opts=ro state=present # Mount up device by label - mount: name=/srv/disk src='LABEL=SOME_LABEL' fstype=ext4 state=present # Mount up device by UUID - mount: name=/home src='UUID=b3e48f45-f933-4c8e-a700-22a159ec9077' fstype=xfs opts=noatime state=present ''' from ansible.module_utils.six import iteritems def write_fstab(lines, dest): fs_w = open(dest, 'w') for l in lines: fs_w.write(l) fs_w.flush() fs_w.close() def _escape_fstab(v): """ escape space (040), ampersand (046) and backslash (134) which are invalid in fstab fields """ if isinstance(v, int): return v else: return v.replace('\\', '\\134').replace(' ', '\\040').replace('&', '\\046') def set_mount(module, **kwargs): """ set/change a mount point location in fstab """ # kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab args = dict( opts = 'defaults', dump = '0', passno = '0', fstab = '/etc/fstab' ) args.update(kwargs) new_line = '%(src)s %(name)s %(fstype)s %(opts)s %(dump)s %(passno)s\n' to_write = [] exists = False changed = False escaped_args = dict([(k, _escape_fstab(v)) for k, v in iteritems(args)]) for line in open(args['fstab'], 'r').readlines(): if not line.strip(): to_write.append(line) continue if line.strip().startswith('#'): to_write.append(line) continue if len(line.split()) != 6: # not sure what this is or why it is here # but it is not our fault so leave it be to_write.append(line) continue ld = {} ld['src'], ld['name'], ld['fstype'], ld['opts'], ld['dump'], ld['passno'] = line.split() if ld['name'] != escaped_args['name']: to_write.append(line) continue # it exists - now see if what we have is different exists = True for t in ('src', 'fstype','opts', 'dump', 'passno'): if ld[t] != escaped_args[t]: changed = True ld[t] = escaped_args[t] if changed: to_write.append(new_line % ld) else: to_write.append(line) if not exists: to_write.append(new_line % escaped_args) changed = True if changed and not module.check_mode: write_fstab(to_write, args['fstab']) return (args['name'], changed) def unset_mount(module, **kwargs): """ remove a mount point from fstab """ # kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab args = dict( opts = 'default', dump = '0', passno = '0', fstab = '/etc/fstab' ) args.update(kwargs) to_write = [] changed = False escaped_name = _escape_fstab(args['name']) for line in open(args['fstab'], 'r').readlines(): if not line.strip(): to_write.append(line) continue if line.strip().startswith('#'): to_write.append(line) continue if len(line.split()) != 6: # not sure what this is or why it is here # but it is not our fault so leave it be to_write.append(line) continue ld = {} ld['src'], ld['name'], ld['fstype'], ld['opts'], ld['dump'], ld['passno'] = line.split() if ld['name'] != escaped_name: to_write.append(line) continue # if we got here we found a match - continue and mark changed changed = True if changed and not module.check_mode: write_fstab(to_write, args['fstab']) return (args['name'], changed) def mount(module, **kwargs): """ mount up a path or remount if needed """ # kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab args = dict( opts = 'default', dump = '0', passno = '0', fstab = '/etc/fstab' ) args.update(kwargs) mount_bin = module.get_bin_path('mount') name = kwargs['name'] cmd = [ mount_bin, ] if ismount(name): cmd += [ '-o', 'remount', ] if get_platform().lower() == 'freebsd': cmd += [ '-F', args['fstab'], ] if get_platform().lower() == 'linux': cmd += [ '-T', args['fstab'], ] cmd += [ name, ] rc, out, err = module.run_command(cmd) if rc == 0: return 0, '' else: return rc, out+err def umount(module, **kwargs): """ unmount a path """ umount_bin = module.get_bin_path('umount') name = kwargs['name'] cmd = [umount_bin, name] rc, out, err = module.run_command(cmd) if rc == 0: return 0, '' else: return rc, out+err def main(): module = AnsibleModule( argument_spec = dict( state = dict(required=True, choices=['present', 'absent', 'mounted', 'unmounted']), name = dict(required=True), opts = dict(default=None), passno = dict(default=None, type='str'), dump = dict(default=None), src = dict(required=False), fstype = dict(required=False), fstab = dict(default='/etc/fstab') ), supports_check_mode=True, required_if = ( ['state', 'mounted', ['src', 'fstype']], ['state', 'present', ['src', 'fstype']] ) ) changed = False rc = 0 args = {'name': module.params['name']} if module.params['src'] is not None: args['src'] = module.params['src'] if module.params['fstype'] is not None: args['fstype'] = module.params['fstype'] if module.params['passno'] is not None: args['passno'] = module.params['passno'] if module.params['opts'] is not None: args['opts'] = module.params['opts'] if module.params['dump'] is not None: args['dump'] = module.params['dump'] if module.params['fstab'] is not None: args['fstab'] = module.params['fstab'] # if fstab file does not exist, we first need to create it. This mainly # happens when fstab optin is passed to the module. if not os.path.exists(args['fstab']): if not os.path.exists(os.path.dirname(args['fstab'])): os.makedirs(os.path.dirname(args['fstab'])) open(args['fstab'],'a').close() # absent == remove from fstab and unmounted # unmounted == do not change fstab state, but unmount # present == add to fstab, do not change mount state # mounted == add to fstab if not there and make sure it is mounted, if it has changed in fstab then remount it state = module.params['state'] name = module.params['name'] if state == 'absent': name, changed = unset_mount(module, **args) if changed and not module.check_mode: if ismount(name): res,msg = umount(module, **args) if res: module.fail_json(msg="Error unmounting %s: %s" % (name, msg)) if os.path.exists(name): try: os.rmdir(name) except (OSError, IOError): e = get_exception() module.fail_json(msg="Error rmdir %s: %s" % (name, str(e))) module.exit_json(changed=changed, **args) if state == 'unmounted': if ismount(name): if not module.check_mode: res,msg = umount(module, **args) if res: module.fail_json(msg="Error unmounting %s: %s" % (name, msg)) changed = True module.exit_json(changed=changed, **args) if state in ['mounted', 'present']: if state == 'mounted': if not os.path.exists(name) and not module.check_mode: try: os.makedirs(name) except (OSError, IOError): e = get_exception() module.fail_json(msg="Error making dir %s: %s" % (name, str(e))) name, changed = set_mount(module, **args) if state == 'mounted': res = 0 if ismount(name): if changed and not module.check_mode: res,msg = mount(module, **args) elif 'bind' in args.get('opts', []): changed = True cmd = 'mount -l' rc, out, err = module.run_command(cmd) allmounts = out.split('\n') for mounts in allmounts[:-1]: arguments = mounts.split() if arguments[0] == args['src'] and arguments[2] == args['name'] and arguments[4] == args['fstype']: changed = False if changed: res,msg = mount(module, **args) else: changed = True if not module.check_mode: res,msg = mount(module, **args) if res: module.fail_json(msg="Error mounting %s: %s" % (name, msg)) module.exit_json(changed=changed, **args) module.fail_json(msg='Unexpected position reached') # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.ismount import * main()
gpl-3.0
rodrigosurita/GDAd
sdaps/model/questionnaire.py
1
9008
# -*- coding: utf8 -*- # SDAPS - Scripts for data acquisition with paper based surveys # Copyright(C) 2008, Christoph Simon <post@christoph-simon.eu> # Copyright(C) 2008, Benjamin Berg <benjamin@sipsolutions.net> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. u''' Hinweis zu den Diamantstrukturen Bei Klassen mit mehreren Basisklassen definiert maximal eine Basisklasse eine eigene __init__ - Funktion. Die anderen Klassen sind "nur" Mixin - Klassen. Dadurch werden die Probleme der Diamantstruktur umgangen. ''' import buddy import data import struct class DataObject(object): u'''Mixin ''' def get_data(self): if not self.id in self.sheet.data: self.sheet.data[self.id] = getattr(data, self.__class__.__name__)(self) return self.sheet.data[self.id] data = property(get_data) class Questionnaire(buddy.Object): ''' Identification: There is only one. Reference: survey.questionnaire Parent: self.survey ''' def __init__(self): self.survey = None self.qobjects = list() self.last_id = (0, 0) self.init_attributes() def init_attributes(self): self.page_count = 0 def add_qobject(self, qobject, new_id=None): qobject.questionnaire = self # XXX: Is this any good? if new_id is not None: assert new_id > self.last_id self.last_id = new_id qobject.id = new_id else: self.last_id = qobject.init_id(self.last_id) self.qobjects.append(qobject) def get_sheet(self): return self.survey.sheet sheet = property(get_sheet) def __unicode__(self): return unicode().join( [u'%s\n' % self.__class__.__name__] + [unicode(qobject) for qobject in self.qobjects] ) class QObject(buddy.Object): ''' Identification: id ==(major, minor) Reference: survey.questionnaire.qobjects[i](i != id) Parent: self.questionnaire ''' def __init__(self): self.questionnaire = None self.boxes = list() self.last_id = -1 self.init_attributes() def init_attributes(self): pass def init_id(self, id): self.id = (id[0], id[1] + 1) return self.id def add_box(self, box): box.question = self self.last_id = box.init_id(self.last_id) self.boxes.append(box) def get_sheet(self): return self.questionnaire.sheet sheet = property(get_sheet) def calculate_survey_id(self, md5): pass def id_str(self): ids = [str(x) for x in self.id] return u'.'.join(ids) def id_csv(self, theid=None): if theid is None: theid = self.id ids = [str(x) for x in theid] return u'_'.join(ids) def id_filter(self): ids = [str(x) for x in self.id] return u'_' + u'_'.join(ids) def __unicode__(self): return u'(%s)\n' % ( self.__class__.__name__, ) class Head(QObject): def init_attributes(self): QObject.init_attributes(self) self.title = unicode() def init_id(self, id): self.id = (id[0] + 1, 0) return self.id def __unicode__(self): return u'%s(%s) %s\n' % ( self.id_str(), self.__class__.__name__, self.title, ) class Question(QObject): def init_attributes(self): QObject.init_attributes(self) self.page_number = 0 self.question = unicode() def calculate_survey_id(self, md5): for box in self.boxes: box.calculate_survey_id(md5) def __unicode__(self): return u'%s(%s) %s {%i}\n' % ( self.id_str(), self.__class__.__name__, self.question, self.page_number ) class Choice(Question): def __unicode__(self): return unicode().join( [Question.__unicode__(self)] + [unicode(box) for box in self.boxes] ) def get_answer(self): '''it's a list containing all selected values ''' answer = list() for box in self.boxes: if box.data.state: answer.append(box.value) return answer class Mark(Question): def init_attributes(self): Question.init_attributes(self) self.answers = list() def __unicode__(self): if len(self.answers) == 2: return unicode().join( [Question.__unicode__(self)] + [u'\t%s - %s\n' % tuple(self.answers)] + [unicode(box) for box in self.boxes] ) else: return unicode().join( [Question.__unicode__(self)] + [u'\t? - ?\n'] + [unicode(box) for box in self.boxes] ) def get_answer(self): '''it's an integer between 0 and 5 1 till 5 are valid marks, 0 is returned if there's something wrong ''' # box.value is zero based, a mark is based 1 answer = list() for box in self.boxes: if box.data.state: answer.append(box.value) if len(answer) == 1: return answer[0] + 1 else: return 0 def set_answer(self, answer): for box in self.boxes: box.data.state = box.value == answer - 1 class Text(Question): def __unicode__(self): return unicode().join( [Question.__unicode__(self)] + [unicode(box) for box in self.boxes] ) def get_answer(self): '''it's a bool, wether there is content in the textbox ''' assert len(self.boxes) == 1 return self.boxes[0].data.state class Additional_Head(Head): pass class Additional_Mark(Question, DataObject): def init_attributes(self): Question.init_attributes(self) self.answers = list() def __unicode__(self): return unicode().join( [Question.__unicode__(self)] + [u'\t%s - %s\n' % tuple(self.answers)] ) def get_answer(self): return self.data.value def set_answer(self, answer): self.data.value = answer class Additional_FilterHistogram(Question, DataObject): def init_attributes(self): Question.init_attributes(self) self.answers = list() self.filters = list() def __unicode__(self): result = [] result.append(Question.__unicode__(self)) for i in xrange(len(self.answers)): result.append(u'\t%s - %s\n' % (self.answers[i], self.filters[i])) return unicode().join(result) def get_answer(self): return self.data.value def set_answer(self, answer): raise NotImplemented() class Box(buddy.Object, DataObject): ''' Identification: id of the parent and value of the box :: id == (major, minor, value) Reference: survey.questionnaire.qobjects[i].boxes[j] Parent: self.question ''' def __init__(self): self.question = None self.init_attributes() def init_attributes(self): self.page_number = 0 self.x = 0 self.y = 0 self.width = 0 self.height = 0 self.text = unicode() def init_id(self, id): self.value = id + 1 self.id = self.question.id + (self.value,) return self.value def id_str(self): ids = [str(x) for x in self.id] return u'.'.join(ids) def get_sheet(self): return self.question.sheet sheet = property(get_sheet) def calculate_survey_id(self, md5): tmp = struct.pack('!ffff', self.x, self.y, self.width, self.height) md5.update(tmp) def __unicode__(self): return u'\t%i(%s) %s %s %s %s %s\n' % ( self.value, (self.__class__.__name__).ljust(8), (u'%.1f' % self.x).rjust(5), (u'%.1f' % self.y).rjust(5), (u'%.1f' % self.width).rjust(5), (u'%.1f' % self.height).rjust(5), self.text ) class Checkbox(Box): def init_attributes(self): Box.init_attributes(self) self.form = "box" def calculate_survey_id(self, md5): Box.calculate_survey_id(self, md5) md5.update(self.form) class Textbox(Box): pass
gpl-3.0
gauribhoite/personfinder
env/google_appengine/lib/django-1.3/django/contrib/localflavor/fr/fr_department.py
314
3326
# -*- coding: utf-8 -*- DEPARTMENT_ASCII_CHOICES = ( ('01', '01 - Ain'), ('02', '02 - Aisne'), ('03', '03 - Allier'), ('04', '04 - Alpes-de-Haute-Provence'), ('05', '05 - Hautes-Alpes'), ('06', '06 - Alpes-Maritimes'), ('07', '07 - Ardeche'), ('08', '08 - Ardennes'), ('09', '09 - Ariege'), ('10', '10 - Aube'), ('11', '11 - Aude'), ('12', '12 - Aveyron'), ('13', '13 - Bouches-du-Rhone'), ('14', '14 - Calvados'), ('15', '15 - Cantal'), ('16', '16 - Charente'), ('17', '17 - Charente-Maritime'), ('18', '18 - Cher'), ('19', '19 - Correze'), ('21', '21 - Cote-d\'Or'), ('22', '22 - Cotes-d\'Armor'), ('23', '23 - Creuse'), ('24', '24 - Dordogne'), ('25', '25 - Doubs'), ('26', '26 - Drome'), ('27', '27 - Eure'), ('28', '28 - Eure-et-Loire'), ('29', '29 - Finistere'), ('2A', '2A - Corse-du-Sud'), ('2B', '2B - Haute-Corse'), ('30', '30 - Gard'), ('31', '31 - Haute-Garonne'), ('32', '32 - Gers'), ('33', '33 - Gironde'), ('34', '34 - Herault'), ('35', '35 - Ille-et-Vilaine'), ('36', '36 - Indre'), ('37', '37 - Indre-et-Loire'), ('38', '38 - Isere'), ('39', '39 - Jura'), ('40', '40 - Landes'), ('41', '41 - Loir-et-Cher'), ('42', '42 - Loire'), ('43', '43 - Haute-Loire'), ('44', '44 - Loire-Atlantique'), ('45', '45 - Loiret'), ('46', '46 - Lot'), ('47', '47 - Lot-et-Garonne'), ('48', '48 - Lozere'), ('49', '49 - Maine-et-Loire'), ('50', '50 - Manche'), ('51', '51 - Marne'), ('52', '52 - Haute-Marne'), ('53', '53 - Mayenne'), ('54', '54 - Meurthe-et-Moselle'), ('55', '55 - Meuse'), ('56', '56 - Morbihan'), ('57', '57 - Moselle'), ('58', '58 - Nievre'), ('59', '59 - Nord'), ('60', '60 - Oise'), ('61', '61 - Orne'), ('62', '62 - Pas-de-Calais'), ('63', '63 - Puy-de-Dome'), ('64', '64 - Pyrenees-Atlantiques'), ('65', '65 - Hautes-Pyrenees'), ('66', '66 - Pyrenees-Orientales'), ('67', '67 - Bas-Rhin'), ('68', '68 - Haut-Rhin'), ('69', '69 - Rhone'), ('70', '70 - Haute-Saone'), ('71', '71 - Saone-et-Loire'), ('72', '72 - Sarthe'), ('73', '73 - Savoie'), ('74', '74 - Haute-Savoie'), ('75', '75 - Paris'), ('76', '76 - Seine-Maritime'), ('77', '77 - Seine-et-Marne'), ('78', '78 - Yvelines'), ('79', '79 - Deux-Sevres'), ('80', '80 - Somme'), ('81', '81 - Tarn'), ('82', '82 - Tarn-et-Garonne'), ('83', '83 - Var'), ('84', '84 - Vaucluse'), ('85', '85 - Vendee'), ('86', '86 - Vienne'), ('87', '87 - Haute-Vienne'), ('88', '88 - Vosges'), ('89', '89 - Yonne'), ('90', '90 - Territoire de Belfort'), ('91', '91 - Essonne'), ('92', '92 - Hauts-de-Seine'), ('93', '93 - Seine-Saint-Denis'), ('94', '94 - Val-de-Marne'), ('95', '95 - Val-d\'Oise'), ('971', '971 - Guadeloupe'), ('972', '972 - Martinique'), ('973', '973 - Guyane'), ('974', '974 - La Reunion'), ('975', '975 - Saint-Pierre-et-Miquelon'), ('976', '976 - Mayotte'), ('984', '984 - Terres Australes et Antarctiques'), ('986', '986 - Wallis et Futuna'), ('987', '987 - Polynesie Francaise'), ('988', '988 - Nouvelle-Caledonie'), )
apache-2.0
mornsun/javascratch
src/topcoder.py/LC_330_Patching_Array.py
1
1807
#!/usr/bin/env python #coding=utf8 ''' Given a sorted positive integer array nums and an integer n, add/patch elements to the array such that any number in range [1, n] inclusive can be formed by the sum of some elements in the array. Return the minimum number of patches required. Example 1: nums = [1, 3], n = 6 Return 1. Combinations of nums are [1], [3], [1,3], which form possible sums of: 1, 3, 4. Now if we add/patch 2 to nums, the combinations are: [1], [2], [3], [1,3], [2,3], [1,2,3]. Possible sums are 1, 2, 3, 4, 5, 6, which now covers the range [1, 6]. So we only need 1 patch. Example 2: nums = [1, 5, 10], n = 20 Return 2. The two patches can be [2, 4]. Example 3: nums = [1, 2, 2], n = 5 Return 0. @author: Chauncey beat 92.56% ''' import heapq import datetime import time import sys class Solution(object): def minPatches(self, nums, n): """ :type nums: List[int] :type n: int :rtype: int """ if n<=0: return 0 if nums is None: nums = [] miss = 1 index = 0 patch = 0 while miss<=n: if index>=len(nums) or miss<nums[index]: miss <<= 1 patch += 1 continue if miss>=nums[index]: miss += nums[index] index += 1 continue return patch if __name__ == '__main__': solution = Solution() start_time = datetime.datetime.now() print solution.minPatches([1, 3], 6) #1 print solution.minPatches([1, 5, 10], 20) #2 print solution.minPatches([1, 2, 2], 5) #0 print solution.minPatches([], 7) #3 elapsed = datetime.datetime.now() - start_time print 'elapsed: ', elapsed.total_seconds() #transactions = [buy, sell, cooldown, buy, sell]
gpl-2.0
cloud9UG/odoo
addons/hr_holidays/wizard/__init__.py
442
1122
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import hr_holidays_summary_department import hr_holidays_summary_employees # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
timotheosh/python_koans
python3/libs/mock.py
249
8036
# mock.py # Test tools for mocking and patching. # Copyright (C) 2007-2009 Michael Foord # E-mail: fuzzyman AT voidspace DOT org DOT uk # mock 0.6.0 # http://www.voidspace.org.uk/python/mock/ # Released subject to the BSD License # Please see http://www.voidspace.org.uk/python/license.shtml # Scripts maintained at http://www.voidspace.org.uk/python/index.shtml # Comments, suggestions and bug reports welcome. __all__ = ( 'Mock', 'patch', 'patch_object', 'sentinel', 'DEFAULT' ) __version__ = '0.6.0 modified by Greg Malcolm' class SentinelObject(object): def __init__(self, name): self.name = name def __repr__(self): return '<SentinelObject "{0!s}">'.format(self.name) class Sentinel(object): def __init__(self): self._sentinels = {} def __getattr__(self, name): return self._sentinels.setdefault(name, SentinelObject(name)) sentinel = Sentinel() DEFAULT = sentinel.DEFAULT class OldStyleClass: pass ClassType = type(OldStyleClass) def _is_magic(name): return '__{0!s}__'.format(name[2:-2]) == name def _copy(value): if type(value) in (dict, list, tuple, set): return type(value)(value) return value class Mock(object): def __init__(self, spec=None, side_effect=None, return_value=DEFAULT, name=None, parent=None, wraps=None): self._parent = parent self._name = name if spec is not None and not isinstance(spec, list): spec = [member for member in dir(spec) if not _is_magic(member)] self._methods = spec self._children = {} self._return_value = return_value self.side_effect = side_effect self._wraps = wraps self.reset_mock() def reset_mock(self): self.called = False self.call_args = None self.call_count = 0 self.call_args_list = [] self.method_calls = [] for child in self._children.values(): child.reset_mock() if isinstance(self._return_value, Mock): self._return_value.reset_mock() def __get_return_value(self): if self._return_value is DEFAULT: self._return_value = Mock() return self._return_value def __set_return_value(self, value): self._return_value = value return_value = property(__get_return_value, __set_return_value) def __call__(self, *args, **kwargs): self.called = True self.call_count += 1 self.call_args = (args, kwargs) self.call_args_list.append((args, kwargs)) parent = self._parent name = self._name while parent is not None: parent.method_calls.append((name, args, kwargs)) if parent._parent is None: break name = parent._name + '.' + name parent = parent._parent ret_val = DEFAULT if self.side_effect is not None: if (isinstance(self.side_effect, Exception) or isinstance(self.side_effect, (type, ClassType)) and issubclass(self.side_effect, Exception)): raise self.side_effect ret_val = self.side_effect(*args, **kwargs) if ret_val is DEFAULT: ret_val = self.return_value if self._wraps is not None and self._return_value is DEFAULT: return self._wraps(*args, **kwargs) if ret_val is DEFAULT: ret_val = self.return_value return ret_val def __getattr__(self, name): if self._methods is not None: if name not in self._methods: raise AttributeError("Mock object has no attribute '{0!s}'".format(name)) elif _is_magic(name): raise AttributeError(name) if name not in self._children: wraps = None if self._wraps is not None: wraps = getattr(self._wraps, name) self._children[name] = Mock(parent=self, name=name, wraps=wraps) return self._children[name] def assert_called_with(self, *args, **kwargs): assert self.call_args == (args, kwargs), 'Expected: {0!s}\nCalled with: {1!s}'.format((args, kwargs), self.call_args) def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) def _importer(target): components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".{0!s}".format(comp) thing = _dot_lookup(thing, comp, import_path) return thing class _patch(object): def __init__(self, target, attribute, new, spec, create): self.target = target self.attribute = attribute self.new = new self.spec = spec self.create = create self.has_local = False def __call__(self, func): if hasattr(func, 'patchings'): func.patchings.append(self) return func def patched(*args, **keywargs): # don't use a with here (backwards compatability with 2.5) extra_args = [] for patching in patched.patchings: arg = patching.__enter__() if patching.new is DEFAULT: extra_args.append(arg) args += tuple(extra_args) try: return func(*args, **keywargs) finally: for patching in getattr(patched, 'patchings', []): patching.__exit__() patched.patchings = [self] patched.__name__ = func.__name__ patched.compat_co_firstlineno = getattr(func, "compat_co_firstlineno", func.func_code.co_firstlineno) return patched def get_original(self): target = self.target name = self.attribute create = self.create original = DEFAULT if _has_local_attr(target, name): try: original = target.__dict__[name] except AttributeError: # for instances of classes with slots, they have no __dict__ original = getattr(target, name) elif not create and not hasattr(target, name): raise AttributeError("{0!s} does not have the attribute {1!r}".format(target, name)) return original def __enter__(self): new, spec, = self.new, self.spec original = self.get_original() if new is DEFAULT: # XXXX what if original is DEFAULT - shouldn't use it as a spec inherit = False if spec == True: # set spec to the object we are replacing spec = original if isinstance(spec, (type, ClassType)): inherit = True new = Mock(spec=spec) if inherit: new.return_value = Mock(spec=spec) self.temp_original = original setattr(self.target, self.attribute, new) return new def __exit__(self, *_): if self.temp_original is not DEFAULT: setattr(self.target, self.attribute, self.temp_original) else: delattr(self.target, self.attribute) del self.temp_original def patch_object(target, attribute, new=DEFAULT, spec=None, create=False): return _patch(target, attribute, new, spec, create) def patch(target, new=DEFAULT, spec=None, create=False): try: target, attribute = target.rsplit('.', 1) except (TypeError, ValueError): raise TypeError("Need a valid target to patch. You supplied: {0!r}".format(target,)) target = _importer(target) return _patch(target, attribute, new, spec, create) def _has_local_attr(obj, name): try: return name in vars(obj) except TypeError: # objects without a __dict__ return hasattr(obj, name)
mit
ratschlab/oqtans_tools
EasySVM/0.3.3/build/lib/esvm/experiment.py
3
32654
############################################################################################# # # # This program is free software; you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation; either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program; if not, see http://www.gnu.org/licenses # # or write to the Free Software Foundation, Inc., 51 Franklin Street, # # Fifth Floor, Boston, MA 02110-1301 USA # # # ############################################################################################# import sys import random import shutil import numpy from numpy import sign, where, array, ones import parse import utils from poim import compute_poims import shogun from shogun.Kernel import GaussianKernel, WeightedDegreePositionStringKernel from shogun.Kernel import WeightedDegreeStringKernel from shogun.Kernel import LinearKernel, PolyKernel, LocalAlignmentStringKernel from shogun.Kernel import LocalityImprovedStringKernel from shogun.Kernel import CommWordStringKernel, WeightedCommWordStringKernel, CommUlongStringKernel from shogun.Kernel import CombinedKernel from shogun.Kernel import SLOWBUTMEMEFFICIENT from shogun.Kernel import AvgDiagKernelNormalizer from shogun.Features import RealFeatures, BinaryLabels, StringCharFeatures, DNA, StringWordFeatures, StringUlongFeatures, PROTEIN from shogun.Features import CombinedFeatures from shogun.Classifier import LibSVM,GPBTSVM DefaultSVM = LibSVM try: from shogun.Classifier import SVMLight LinAddSVM = SVMLight LinearSVM = SVMLight except: LinAddSVM = GPBTSVM LinearSVM = LibSVM from shogun.Preprocessor import SortWordString, SortUlongString from utils import calcprc, calcroc, accuracy from utils import getPartitionedSet, getCurrentSplit import plots import re from poim import reshape_normalize_contribs, compute_weight_mass ################################################################################ def non_atcg_convert(seq, nuc_con): """ Converts Non ATCG characters from DNA sequence """ if nuc_con == '':sys.stderr.write("usage: Provide a choice for non ACGT nucleotide conversion [T|A|C|G|R|Y|N] at last\n");sys.exit(-1) if re.match(r'[^ATCGRYN]', nuc_con):sys.stderr.write("usage: Conversion nucleotide choice -"+ nuc_con +"- failed. pick one from [T|A|C|G|R|Y|N]\n");sys.exit(-1) nuc_con = nuc_con.upper() mod_seq = [] for i in range(len(seq)): if re.search(r'[^ACTG]', seq[i], re.IGNORECASE): if nuc_con == 'A' or nuc_con == 'T' or nuc_con == 'C' or nuc_con == 'G': seq[i] = re.sub(r'[^ATCG|actg]', nuc_con, seq[i]) seq[i] = seq[i].upper() mod_seq.append(seq[i]) continue if nuc_con == 'N':(nucleotide, line) = ('ATCG', '') if nuc_con == 'R':(nucleotide, line) = ('AG', '') if nuc_con == 'Y':(nucleotide, line) = ('TC', '') for single_nuc in seq[i]: if re.match(r'[^ACGT]', single_nuc, re.IGNORECASE): line += random.choice(nucleotide) else: line += single_nuc.upper() mod_seq.append(line) else: seq[i] = seq[i].upper() mod_seq.append(seq[i]) return mod_seq def non_aminoacid_converter(seq, amino_con): """ Converts Non amino acid characters from protein sequence """ if amino_con == '':sys.stderr.write("usage: Provide a choice for replacing non amino acid characters\n");sys.exit(-1) flag = 0 if len(amino_con)>1: if amino_con != 'random':flag = 1 else: if re.match(r'[^GPAVLIMCFYWHKRQNEDST]', amino_con, re.IGNORECASE):flag = 1 if flag == 1:sys.stderr.write("usage: Replace aminoacid chioce -"+ amino_con +"- failed. Pick a valid aminoacid single letter code/random\n");sys.exit(-1) amino_con = amino_con.upper() opt_seq = [] for i in range(len(seq)): if re.search(r'[^GPAVLIMCFYWHKRQNEDST]', seq[i], re.IGNORECASE): if amino_con == 'RANDOM': aminoacid = 'GPAVLIMCFYWHKRQNEDST' line = '' for single_amino in seq[i]: if re.match(r'[^GPAVLIMCFYWHKRQNEDST]', single_amino, re.IGNORECASE): r_amino = random.choice(aminoacid) line += r_amino else: single_amino = single_amino.upper() line += single_amino opt_seq.append(line) else: seq[i] = re.sub(r'[^GPAVLIMCFYWHKRQNEDST|gpavlimcfywhkrqnedst]', amino_con, seq[i]) seq[i] = seq[i].upper() opt_seq.append(seq[i]) else: seq[i] = seq[i].upper() opt_seq.append(seq[i]) return opt_seq # helper functions def create_features(kname, examples, kparam, train_mode, preproc, seq_source, nuc_con): """Converts numpy arrays or sequences into shogun features""" if kname == 'gauss' or kname == 'linear' or kname == 'poly': examples = numpy.array(examples) feats = RealFeatures(examples) elif kname == 'wd' or kname == 'localalign' or kname == 'localimprove': if seq_source == 'dna': examples = non_atcg_convert(examples, nuc_con) feats = StringCharFeatures(examples, DNA) elif seq_source == 'protein': examples = non_aminoacid_converter(examples, nuc_con) feats = StringCharFeatures(examples, PROTEIN) else: sys.stderr.write("Sequence source -"+seq_source+"- is invalid. select [dna|protein]\n") sys.exit(-1) elif kname == 'spec' or kname == 'cumspec': if seq_source == 'dna': examples = non_atcg_convert(examples, nuc_con) feats = StringCharFeatures(examples, DNA) elif seq_source == 'protein': examples = non_aminoacid_converter(examples, nuc_con) feats = StringCharFeatures(examples, PROTEIN) else: sys.stderr.write("Sequence source -"+seq_source+"- is invalid. select [dna|protein]\n") sys.exit(-1) wf = StringUlongFeatures( feats.get_alphabet() ) wf.obtain_from_char(feats, kparam['degree']-1, kparam['degree'], 0, kname=='cumspec') del feats if train_mode: preproc = SortUlongString() preproc.init(wf) wf.add_preprocessor(preproc) ret = wf.apply_preprocessor() #assert(ret) feats = wf elif kname == 'spec2' or kname == 'cumspec2': # spectrum kernel on two sequences feats = {} feats['combined'] = CombinedFeatures() reversed = kname=='cumspec2' (ex0,ex1) = zip(*examples) f0 = StringCharFeatures(list(ex0), DNA) wf = StringWordFeatures(f0.get_alphabet()) wf.obtain_from_char(f0, kparam['degree']-1, kparam['degree'], 0, reversed) del f0 if train_mode: preproc = SortWordString() preproc.init(wf) wf.add_preprocessor(preproc) ret = wf.apply_preprocessor() assert(ret) feats['combined'].append_feature_obj(wf) feats['f0'] = wf f1 = StringCharFeatures(list(ex1), DNA) wf = StringWordFeatures( f1.get_alphabet() ) wf.obtain_from_char(f1, kparam['degree']-1, kparam['degree'], 0, reversed) del f1 if train_mode: preproc = SortWordString() preproc.init(wf) wf.add_preprocessor(preproc) ret = wf.apply_preprocessor() assert(ret) feats['combined'].append_feature_obj(wf) feats['f1'] = wf else: print 'Unknown kernel %s' % kname return (feats,preproc) def create_kernel(kname,kparam,feats_train): """Call the corresponding constructor for the kernel""" if kname == 'gauss': kernel = GaussianKernel(feats_train, feats_train, kparam['width']) elif kname == 'linear': kernel = LinearKernel(feats_train, feats_train) kernel.set_normalizer(AvgDiagKernelNormalizer(kparam['scale'])) elif kname == 'poly': kernel = PolyKernel(feats_train, feats_train, kparam['degree'], kparam['inhomogene'], kparam['normal']) elif kname == 'wd': kernel=WeightedDegreePositionStringKernel(feats_train, feats_train, kparam['degree']) kernel.set_normalizer(AvgDiagKernelNormalizer(float(kparam['seqlength']))) kernel.set_shifts(kparam['shift']*numpy.ones(kparam['seqlength'],dtype=numpy.int32)) #kernel=WeightedDegreeStringKernel(feats_train, feats_train, kparam['degree']) elif kname == 'spec': kernel = CommUlongStringKernel(feats_train, feats_train) elif kname == 'cumspec': kernel = WeightedCommWordStringKernel(feats_train, feats_train) kernel.set_weights(numpy.ones(kparam['degree'])) elif kname == 'spec2': kernel = CombinedKernel() k0 = CommWordStringKernel(feats_train['f0'], feats_train['f0']) k0.io.disable_progress() kernel.append_kernel(k0) k1 = CommWordStringKernel(feats_train['f1'], feats_train['f1']) k1.io.disable_progress() kernel.append_kernel(k1) elif kname == 'cumspec2': kernel = CombinedKernel() k0 = WeightedCommWordStringKernel(feats_train['f0'], feats_train['f0']) k0.set_weights(numpy.ones(kparam['degree'])) k0.io.disable_progress() kernel.append_kernel(k0) k1 = WeightedCommWordStringKernel(feats_train['f1'], feats_train['f1']) k1.set_weights(numpy.ones(kparam['degree'])) k1.io.disable_progress() kernel.append_kernel(k1) elif kname == 'localalign': kernel = LocalAlignmentStringKernel(feats_train, feats_train) elif kname == 'localimprove': kernel = LocalityImprovedStringKernel(feats_train, feats_train, kparam['length'],\ kparam['indeg'], kparam['outdeg']) else: print 'Unknown kernel %s' % kname kernel.set_cache_size(32) return kernel def create_combined_kernel(kname, kparam, examples, train_mode, preproc): """A wrapper for creating combined kernels. kname, kparam and examples are lists. """ num_kernels = len(kname) feats['combined'] = CombinedFeatures() kernel = CombinedKernel() for kix in xrange(num_kernels): cur_kname = '%s%d' % (kname[kix],kix) (cur_feats, cur_preproc) = create_features(kname[kix], examples[kix], kparam[kix], train_mode, preproc) feats[cur_kname] = cur_feats cur_kernel = create_kernel(kname[kix], kparam[kix], cur_feats) kernel.append_kernel(cur_kernel) return (feats,kernel) def model2str(kparam,C,kp,shownames=True): """Generates a string describing the model parameters""" if kparam["modelsel_name"]==None or len(kparam["modelsel_params"])==1: if shownames: str="\tC=%1.1f" % C else: str="\t%1.2f" % C else: if type(kp)==type(int(0)): if shownames: str="\tC=%1.1f\t%s=%i" %(C, kparam["modelsel_name"], kp) else: str="\t%1.1f\t%i" %(C, kp) else: if shownames: str="\tC=%1.1f\t%s=%1.2f" %(C, kparam["modelsel_name"], kp) else: str="\t%1.1f\t%1.2f" %(C, kp) return str def train(trainex,trainlab,C,kname,kparam,seq_source,nuc_con): """Trains a SVM with the given kernel""" (feats_train, preproc) = create_features(kname,trainex, kparam, True, None, seq_source, nuc_con) if kname == 'wd': kparam['seqlength'] = len(trainex[0]) kernel = create_kernel(kname,kparam,feats_train) if kname == 'spec2' or kname == 'cumspec2': kernel.init(feats_train['combined'], feats_train['combined']) else: kernel.init(feats_train, feats_train) kernel.io.disable_progress() kernel.set_optimization_type(SLOWBUTMEMEFFICIENT) labels = BinaryLabels(numpy.array(trainlab,numpy.double)) # libsvm is fine for most kernels if kname in ('wd', 'spec', 'cumspec', 'spec2', 'cumspec2'): # for the string kernels there exist specific optimizations that are only effective when using # a LinAdd SVM implementation (e.g. SVM-light or GPBT-SVM) SVMClass = LinAddSVM elif kname == 'linear': SVMClass = LinearSVM else: SVMClass=DefaultSVM svm = SVMClass(C, kernel, labels) svm.io.disable_progress() svm.set_batch_computation_enabled(True) svm.set_linadd_enabled(True) svm.set_epsilon(1e-5) svm.parallel.set_num_threads(svm.parallel.get_num_cpus()) svm.train() return (svm, kernel, feats_train, preproc) def train_and_test(trainex,trainlab,testex,C,kname,kparam, seq_source, nuc_con): """Trains a SVM with the given kernel, and predict on the test examples""" (svm, kernel, feats_train, preproc) = train(trainex,trainlab,C,kname,kparam,seq_source,nuc_con) (feats_test, preproc) = create_features(kname, testex, kparam, False, preproc, seq_source, nuc_con) if kname == 'spec2' or kname == 'cumspec2': for feats in feats_train.values(): feats.io.disable_progress() for feats in feats_test.values(): feats.io.disable_progress() kernel.init(feats_train['combined'], feats_test['combined']) else: feats_train.io.disable_progress() feats_test.io.disable_progress() kernel.init(feats_train, feats_test) kernel.set_optimization_type(SLOWBUTMEMEFFICIENT) output = svm.apply().get_labels() return output def crossvalidation(cv, kname, kparam, C, all_examples, all_labels, seq_source, nuc_con): """Perform cross validation using an SVM cv -- the number of folds kernel -- the kernel used data -- the dataset, assumed to be compatible to kernel, label is in the first column """ print 'Using %i-fold crossvalidation' % cv partitions = getPartitionedSet(len(all_labels), cv) error = [] sum_accuracy = 0.0 sum_roc = 0.0 all_outputs=[0.0] * len(all_labels) all_split=[-1] * len(all_labels) for repetition in xrange(cv): XT, LT, XTE, LTE = getCurrentSplit(repetition, partitions, all_labels, all_examples) numpos = len(where(array(LTE)>0)[0]) svmout = train_and_test(XT, LT, XTE, C, kname, kparam, seq_source, nuc_con) for i in xrange(len(svmout)): all_outputs[partitions[repetition][i]] = svmout[i] all_split[partitions[repetition][i]] = repetition ; return (all_outputs, all_split) def evaluate(predictions, splitassignments, labels, roc_fname=None, prc_fname=None): """Evaluate prediction results """ res_str = "" cv = 1 if splitassignments!=None: for split in splitassignments: if split+1>cv: cv=int(split+1) if cv>1: res_str = "Evaluating on %i examples in %i splits\n" % (len(labels),cv) else: res_str = "Evaluating on %i examples\n" % len(labels) output_splits = cv* [[]] label_splits = cv* [[]] for i in xrange(cv): label_splits[i]=[] output_splits[i]=[] for i in xrange(0,len(labels)): if cv>1: split=int(splitassignments[i]) else: split=0 output_splits[split].append(predictions[i]) label_splits[split].append(labels[i]) error = [] sum_accuracy = 0.0 sum_roc = 0.0 sum_prc = 0.0 for split in xrange(cv): res_str += 'Split %d\n' % (split+1) LTE = label_splits[split] ; svmout = output_splits[split] numpos=0 for l in LTE: if l==1: numpos+=1 istwoclass = numpos>0 and numpos<len(LTE) res_str += ' number of positive examples = %i\n' % numpos res_str += ' number of negative examples = %i\n' % (len(LTE)-numpos) if istwoclass: auROC = calcroc(svmout,LTE) res_str += ' Area under ROC curve = %2.1f %%\n' % (100.0*auROC) sum_roc += auROC if roc_fname!=None: if split!=cv-1: plots.plotroc(svmout, LTE, split==cv-1, None, "ROC curve of SVM, split %i" % (split+1)) else: plots.plotroc(svmout, LTE, split==cv-1, roc_fname, "ROC curve of SVM, split %i" % (split+1)) auPRC = calcprc(svmout,LTE) res_str += ' Area under PRC curve = %2.1f %%\n' % (100.0*auPRC) sum_prc += auPRC if prc_fname!=None: if split!=cv-1: plots.plotprc(svmout, LTE, None, "PRC curve of SVM, split %i" % (split+1)) else: plots.plotprc(svmout, LTE, prc_fname, "PRC curve of SVM, split %i" % (split+1)) acc = accuracy(svmout, LTE) res_str += ' accuracy (at threshold 0) = %2.1f %% \n' % (100.0*acc) sum_accuracy += acc numpos=0 for l in labels: if l==1: numpos+=1 mean_roc = sum_roc/cv mean_prc = sum_prc/cv mean_acc = sum_accuracy/cv res_str += 'Averages\n' res_str += ' number of positive examples = %i\n' % round(numpos/cv) res_str += ' number of negative examples = %i\n' % round((len(labels)-numpos)/cv) res_str += ' Area under ROC curve = %2.1f %%\n' % (100.0*mean_roc) res_str += ' Area under PRC curve = %2.1f %%\n' % (100.0*mean_prc) res_str += ' accuracy (at threshold 0) = %2.1f %% \n' % (100.0*mean_acc) return (res_str,mean_roc,mean_prc,mean_acc) def svm_cv(argv): """A top level script to parse input parameters and run cross validation""" assert(argv[1]=='cv') if len(argv)<5:sys.stderr.write("usage: %s cv repeat C kernelname [kernelparameters] [arff|fasta] inputfiles outputfile [dna|protein] non(nucleotide|amino)converter \n" % argv[0]);sys.exit(-1) # parse input parameters cv = int(argv[2]) C = float(argv[3]) (kernelname,kparam,argv_rest) = parse.parse_kernel_param(argv[4:],False) (examples,labels,argv_rest) = parse.parse_input_file_train(kernelname, argv_rest) (seq_source, nuc_con) = ('', '') if kernelname == 'spec' or kernelname == 'wd': if len(argv_rest)<1:sys.stderr.write("outputfile [dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<2:sys.stderr.write("[dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<3: if argv_rest[-1] == 'dna': sys.stderr.write("non-nucleotide converter like [A|T|C|G|R|Y|N] is missing. Cannot continue.\n") sys.exit(-1) elif argv_rest[-1] == 'protein': sys.stderr.write("non-amino acid converter like [G|P|A|V|L|I|M|C|F|Y|W|H|K|R|Q|N|E|D|S|T|random] is missing. Cannot continue.\n") sys.exit(-1) else: sys.stderr.write("Here expect FASTA sequence type as [dna|protein] instead of -"+ argv_rest[-1] +"- Cannot continue.\n") sys.exit(-1) if len(argv_rest)>3:sys.stderr.write("Too many arguments\n");sys.exit(-1) seq_source = argv_rest[1] nuc_con = argv_rest[2] if kernelname == 'linear' or kernelname == 'gauss' or kernelname == 'poly': if len(argv_rest)<1:sys.stderr.write("outputfile misssing\n");sys.exit(-1) if len(argv_rest)>1:sys.stderr.write("Too many arguments\n");sys.exit(-1) outfilename = argv_rest[0] utils.check_params(kparam, C, len(examples[0])) # run cross-validation (all_outputs, all_split) = crossvalidation(cv, kernelname, kparam, C, examples, labels, seq_source, nuc_con) try: f = open(outfilename, 'w+') except: sys.stderr.write('Fails to open the outputfile at ' + outfilename + ' Cannot continue.\n') sys.exit(-1) res_str = '#example\toutput\tsplit\n' f.write(res_str) for ix in xrange(len(all_outputs)): res_str = '%d\t%2.7f\t%d\n' % (ix,all_outputs[ix],all_split[ix]) f.write(res_str) f.close() def svm_modelsel(argv): """A top level script to parse input parameters and run model selection""" assert(argv[1]=='modelsel') if len(argv)<5:sys.stderr.write("usage: %s modelsel repeat Cs kernelname [kernelparameters] [arff|fasta] inputfiles outputfile [dna|protein] non(nucleotide|amino)converter\n" % argv[0]);sys.exit(-1) # parse input parameters cv = int(argv[2]) Cs = parse.parse_float_list(argv[3]) (kernelname,kparam,argv_rest) = parse.parse_kernel_param(argv[4:], True) (examples,labels,argv_rest) = parse.parse_input_file_train(kernelname, argv_rest) (seq_source, nuc_con) = ('', '') if kernelname == 'spec' or kernelname == 'wd': if len(argv_rest)<1:sys.stderr.write("outputfile [dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<2:sys.stderr.write("[dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<3: if argv_rest[-1] == 'dna': sys.stderr.write("non-nucleotide converter like [A|T|C|G|R|Y|N] is missing. Cannot continue.\n") sys.exit(-1) elif argv_rest[-1] == 'protein': sys.stderr.write("non-amino acid converter like [G|P|A|V|L|I|M|C|F|Y|W|H|K|R|Q|N|E|D|S|T|random] is missing. Cannot continue.\n") sys.exit(-1) else: sys.stderr.write("Here expect FASTA sequence type as [dna|protein] instead of -"+ argv_rest[-1] +"- Cannot continue.\n") sys.exit(-1) if len(argv_rest)>3:sys.stderr.write("Too many arguments\n");sys.exit(-1) seq_source = argv_rest[1] nuc_con = argv_rest[2] if kernelname == 'linear' or kernelname == 'gauss' or kernelname== 'poly': if len(argv_rest)<1:sys.stderr.write("outputfile missing\n");sys.exit(-1) if len(argv_rest)>1:sys.stderr.write("Too many arguments\n");sys.exit(-1) outfilename = argv_rest[0] # run cross-validation mean_rocs=[] ; mean_prcs=[] ; mean_accs=[] ; all_Cs = [] ; all_kparam=[] ; if kparam["modelsel_name"]==None: for C in Cs: utils.check_params(kparam, C, len(examples[0])) (all_outputs, all_split) = crossvalidation(cv, kernelname, kparam, C, examples, labels, seq_source, nuc_con) (res_str, mean_roc, mean_prc, mean_acc) = evaluate(all_outputs, all_split, labels) mean_rocs.append(mean_roc) mean_prcs.append(mean_prc) mean_accs.append(mean_acc) all_Cs.append(C) all_kparam.append(None) else: # also optimize one kernel parameter for C in Cs: for kp in kparam["modelsel_params"]: kparam[kparam["modelsel_name"]] = kp utils.check_params(kparam, C, len(examples[0])) (all_outputs, all_split) = crossvalidation(cv, kernelname, kparam, C, examples, labels, seq_source, nuc_con) (res_str, mean_roc, mean_prc, mean_acc) = evaluate(all_outputs, all_split, labels) mean_rocs.append(mean_roc) mean_prcs.append(mean_prc) mean_accs.append(mean_acc) all_Cs.append(C) all_kparam.append(kp) max_roc=numpy.max(numpy.array(mean_rocs)) max_prc=numpy.max(numpy.array(mean_prcs)) max_acc=numpy.max(numpy.array(mean_accs)) try: f = open(outfilename, 'w+') except: sys.stderr.write('Fails to open the outputfile at ' + outfilename + ' Cannot continue.\n') sys.exit(-1) if kparam["modelsel_name"]==None or len(kparam["modelsel_params"])==1: detail_str = "\tC\tROC\tPRC\tAccuracy (at threshold 0)\n" else: detail_str = "\tC\t%s\tROC\tPRC\tAccuracy (at threshold 0)\n" % kparam["modelsel_name"] best_roc_str='' best_prc_str='' best_acc_str='' for i in xrange(len(all_Cs)): # determine the best parameter combinations if mean_rocs[i]==max_roc: rocsym='+' best_roc_str+=model2str(kparam, all_Cs[i], all_kparam[i])+'\n' else: rocsym=' ' if mean_prcs[i]==max_prc: prcsym='+' best_prc_str+=model2str(kparam, all_Cs[i], all_kparam[i])+'\n' else: prcsym=' ' if mean_accs[i]==max_acc: accsym='+' best_acc_str+=model2str(kparam, all_Cs[i], all_kparam[i])+'\n' else: accsym=' ' detail_str+=model2str(kparam, all_Cs[i], all_kparam[i], False)+'\t' if kparam["modelsel_name"]==None or len(kparam["modelsel_params"])==1: detail_str += '%c%2.1f%%\t%c%2.1f%%\t%c%2.1f%%\n' % (rocsym, 100*mean_rocs[i], prcsym, 100*mean_prcs[i], accsym, 100*mean_accs[i]) else: detail_str += '%c%2.1f%%\t%c%2.1f%%\t%c%2.1f%%\n' % (rocsym, 100*mean_rocs[i], prcsym, 100*mean_prcs[i], accsym, 100*mean_accs[i]) f.write('Best model(s) according to ROC measure:\n%s' % best_roc_str) f.write('\nBest model(s) according to PRC measure:\n%s' % best_prc_str) f.write('\nBest model(s) according to accuracy measure:\n%s' % best_acc_str) f.write('\nDetailed results:\n') f.write(detail_str) f.close() def svm_pred(argv): """A top level script to parse input parameters and train and predict""" assert(argv[1]=='pred') if len(argv)<6:sys.stderr.write("usage: %s pred C kernelname kernelparameters [arff|fasta] inputfiles outputfile [dna|protein] non(nucleotide|amino)converter\n" % argv[0]);sys.exit(-1) # parse input parameters C = float(argv[2]) (kernelname,kparam,argv_rest) = parse.parse_kernel_param(argv[3:],False) (trainex, trainlab, testex, argv_rest) = parse.parse_input_file_train_test(kernelname, argv_rest) (seq_source, nuc_con) = ('', '') if kernelname == 'spec' or kernelname == 'wd' or kernelname == 'localimprove' or kernelname == 'localalign': if len(argv_rest)<1:sys.stderr.write("outputfile [dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<2:sys.stderr.write("[dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<3: if argv_rest[-1] == 'dna': sys.stderr.write("non-nucleotide converter like [A|T|C|G|R|Y|N] is missing. Cannot continue.\n") sys.exit(-1) elif argv_rest[-1] == 'protein': sys.stderr.write("non-amino acid converter like [G|P|A|V|L|I|M|C|F|Y|W|H|K|R|Q|N|E|D|S|T|random] is missing. Cannot continue.\n") sys.exit(-1) else: sys.stderr.write("Here expect FASTA sequence type as [dna|protein] instead of -"+ argv_rest[-1] +"- Cannot continue.\n") sys.exit(-1) if len(argv_rest)>3:sys.stderr.write("Too many arguments\n");sys.exit(-1) seq_source = argv_rest[1] nuc_con = argv_rest[2] if kernelname == 'linear' or kernelname== 'poly' or kernelname == 'gauss': if len(argv_rest)<1:sys.stderr.write("outputfile missing\n");sys.exit(-1) if len(argv_rest)>1:sys.stderr.write("Too many arguments\n");sys.exit(-1) outfilename = argv_rest[0] utils.check_params(kparam, C, len(trainex[0])) # run training and testing svmout = train_and_test(trainex, trainlab, testex, C, kernelname, kparam, seq_source, nuc_con) # write output file try: f = open(outfilename,'w') except: sys.stderr.write('Fails to open the outputfile at ' + outfilename + ' Cannot continue.\n') sys.exit(-1) res_str = '#example\toutput\n' f.write(res_str) for ix in xrange(len(svmout)): res_str = str(ix)+'\t'+str(svmout[ix])+'\n' f.write(res_str) f.close() def svm_eval(argv): """A top level script to parse input parameters and evaluate""" assert(argv[1]=='eval') if len(argv)<6:sys.stderr.write("usage: %s eval predictionfile [arff|fasta] inputfiles outputfile [roc|prc figure.png]\n" % argv[0]);sys.exit(-1) # parse input parameters (predictions, splitassignments) = parse.parse_prediction_file(argv[2]) (trainex, trainlab, argv_rest) = parse.parse_input_file_train(None, argv[3:]) if len(argv_rest)<1:sys.stderr.write("Output file missing\n");sys.exit(-1) if len(argv_rest)>3:sys.stderr.write("Too many arguments\n");sys.exit(-1) outfilename = argv_rest[0] roc_fname = None prc_fname = None if len(argv_rest)>2: if argv_rest[1]=='roc': roc_fname=argv_rest[2] elif argv_rest[1]=='prc': prc_fname=argv_rest[2] else: sys.stderr.write('Usage: [roc|prc]') sys.exit(-1) # run training and testing (res_str,mean_roc,mean_prc,mean_acc) = evaluate(predictions, splitassignments, trainlab, roc_fname, prc_fname) # write output file try: f = open(outfilename,'w') except: sys.stderr.write('Fails to open the outputfile at ' + outfilename + ' Cannot continue.\n') sys.exit(-1) f.write(res_str) f.close() def svm_poim(argv): """A top level script to parse input parameters and plot poims""" assert(argv[1]=='poim') if len(argv)<7:sys.stderr.write("usage: %s poim C poimdegree wd [kernelparameters] [arff|fasta] inputfiles poim.png [dna|protein] non(nucleotide|amino)converter\n" % argv[0]);sys.exit(-1) # parse input parameters C = float(argv[2]) poimdegree = int(argv[3]) (kernelname,kparam,argv_rest) = parse.parse_kernel_param(argv[4:], False) (examples,labels,argv_rest) = parse.parse_input_file_train(kernelname, argv_rest) if len(argv_rest)<1:sys.stderr.write("poim.png [dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<2:sys.stderr.write("[dna|protein] non(nucleotide|amino)converter are missing\n");sys.exit(-1) if len(argv_rest)<3: if argv_rest[-1] == 'dna': sys.stderr.write("non-nucleotide converter like [A|T|C|G|R|Y|N] is missing. Cannot continue.\n") sys.exit(-1) elif argv_rest[-1] == 'protein': sys.stderr.write("non-amino acid converter like [G|P|A|V|L|I|M|C|F|Y|W|H|K|R|Q|N|E|D|S|T|random] is missing. Cannot continue.\n") sys.exit(-1) else: sys.stderr.write("Here expect FASTA sequence type as [dna|protein] instead of -"+ argv_rest[-1] +"- Cannot continue.\n") sys.exit(-1) if len(argv_rest)>3:sys.stderr.write("Too many arguments\n");sys.exit(-1) poimfilename = argv_rest[0] seq_source = argv_rest[1] nuc_con = argv_rest[2] utils.check_params(kparam, C, len(examples[0])) # train svm and compute POIMs (svm, kernel, feats_train, preproc) = train(examples,labels,C,kernelname,kparam,seq_source,nuc_con) (poim, max_poim, diff_poim, poim_totalmass) = compute_poims(svm, kernel, poimdegree, len(examples[0])) # plot poims plots.plot_poims(poimfilename, poim, max_poim, diff_poim, poim_totalmass, poimdegree, len(examples[0]))
mit
SaltusVita/ReoGrab
Spiders.py
1
6942
''' Created on 2 сент. 2016 г. @author: garet ''' import urllib.request import queue import sqlite3 import re import json from urllib.parse import urlparse from Parser import HtmlPage import lxml class BaseSpider: def __init__(self): self.urls = QueueUrls() self.cache = SqliteCache('some_db') def add_urls(self, urls): self.urls.add_urls(urls) def add_urls_routed(self, urls): result = [] for url in urls: if self.fetch_route(url) is not None: result.append(url) self.add_urls(result) def add_route(self, route): self.routes.append(route) def add_routes(self, routes): pass def fetch_route(self, url): if not hasattr(self, 'routes'): return for route in self.routes: part_url = re.match(route['re'], url) if part_url is not None and part_url.group(0) == url: if 'skip' in route and route['skip'] is True: break return route return None def save_cache(self, url, data=None): pass def get_cache(self, url): pass def run(self): self.init() self.work() # self.clear() def init(self): if hasattr(self, 'start_urls'): self.add_urls(self.start_urls) if hasattr(self, 'routes'): self.add_routes(self.routes) def work(self): while not self.urls.empty(): url = self.urls.get_url() response = self.get_page(url) route = self.fetch_route(url) if route is None: continue if 'type' in route and route['type'] == 'sitemap': urls = self.sitemap(response) self.add_urls_routed(urls) continue if 'name' in route and hasattr(self, route['name']): getattr(self, route['name'])(response) pass def sitemap(self, data): sitemap_text = data.text.replace('<?xml version="1.0" encoding="UTF-8"?>', '') doc = lxml.etree.XML(sitemap_text) ns = {"d": "http://www.sitemaps.org/schemas/sitemap/0.9"} return doc.xpath("//d:loc/text()", namespaces=ns) def charset(self, headers): encode = 'UTF-8' if hasattr(headers, 'Content-Type'): m = re.search('charset=([a-z 0-9\-\_]+)', self.headers, re.IGNORECASE) if m: encode = m.group(1) return encode def get_page(self, url): r = self.cache.get(url) if r is not None: print(r['url']) return Response(r) r = self.get_data(url) self.cache.set(r) print('{0} --- {1}'.format(url, r['url'])) return Response(r) @staticmethod def get_data(url): try: r = urllib.request.urlopen(url) out = { 'url': r.geturl(), 'code': r.getcode(), 'headers': json.dumps(r.getheaders()), 'data': r.read() } return out except urllib.error.HTTPError as e: out = { 'url': e.geturl(), 'code': e.getcode(), 'headers': json.dumps(e.getheaders()), 'data': e.read() } return out class QueueUrls: def __init__(self): self._urls_queue = queue.Queue() self._urls_set = set() def add_url(self, url): u = urlparse(url) url = u[0] + '://' + u[1] + u[2] + u[3] if u[4] != '': url += '?' + u[4] if url not in self._urls_set: self._urls_queue.put(url) self._urls_set.add(url) def add_urls(self, urls): urls_type = type(urls) if urls_type is str: self.add_url(urls) return for url in urls: self.add_url(url) def exist_url(self, url): if url in self._urls_set: return True return False def get_url(self): return self._urls_queue.get() def empty(self): return self._urls_queue.empty() class SqliteCache: def __init__(self, db_name): self.db_name = db_name self.init_db() def init_db(self): file = self.db_name + '.sqlite' self._db = sqlite3.connect(file) self._cursor = self._db.cursor() # Create table sql = """ CREATE TABLE IF NOT EXISTS tbl_urls( url TEXT primary key not null, code INTEGER, headers TEXT, data BLOB, time TIMESTAMP DEFAULT CURRENT_TIMESTAMP );""" self._cursor.execute(sql) def get(self, url): if self._cursor is None: self.InitDB() sql = "SELECT * FROM tbl_urls WHERE url=?;" self._cursor.execute(sql, (url,)) row = self._cursor.fetchone() if row is not None: out = { 'url': row[0], 'code': row[1], 'headers': json.loads(row[2]), 'data': row[3] } return out return None def set(self, dat): if self._cursor is None: self.init_db() sql = "INSERT OR REPLACE INTO tbl_urls(url,code,headers,data) VALUES (?,?,?,?);" self._cursor.execute(sql, (dat['url'], dat['code'], dat['headers'], dat['data'])) self._db.commit() class Download: def __init__(self): self.method = 'GET' self.user_agent = self.random_user_agent() @staticmethod def random_user_agent(self, browser=None, os=None): return 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 8.0; WOW64; Trident/5.0; .NET CLR 2.7.40781; .NET4.0E; en-SG)' @staticmethod def get_page(url): r = urllib.request.urlopen(url) code = r.getcode() headers = r.getheaders() data = r.read() url = r.geturl() # return Response(r) class Response: def __init__(self, res): self.code = res['code'] self.headers = res['headers'] self.data = res['data'] self.url = res['url'] def charset(self): encode = 'UTF-8' if hasattr(self.headers, 'Content-Type'): m = re.search('charset=([a-z 0-9\-\_]+)', self.headers, re.IGNORECASE) if m: encode = m.group(1) return encode @property def text(self): encode = self.charset() return self.data.decode(encode) def parser(self): return HtmlPage(self.html, self.url)
bsd-3-clause
hip-odoo/odoo
addons/payment_adyen/tests/test_adyen.py
24
2956
# -*- coding: utf-8 -*- from lxml import objectify import urlparse from odoo.addons.payment.tests.common import PaymentAcquirerCommon from odoo.addons.payment_adyen.controllers.main import AdyenController class AdyenCommon(PaymentAcquirerCommon): def setUp(self): super(AdyenCommon, self).setUp() # some CC (always use expiration date 06 / 2016, cvc 737, cid 7373 (amex)) self.amex = (('370000000000002', '7373')) self.dinersclub = (('36006666333344', '737')) self.discover = (('6011601160116611', '737'), ('644564456445644', '737')) self.jcb = (('3530111333300000', '737')) self.mastercard = (('5555444433331111', '737'), ('5555555555554444', '737')) self.visa = (('4111 1111 1111 1111', '737'), ('4444333322221111', '737')) self.mcdebit = (('5500000000000004', '737')) self.visadebit = (('4400000000000008', '737')) self.maestro = (('6731012345678906', '737')) self.laser = (('630495060000000000', '737')) self.hipercard = (('6062828888666688', '737')) self.dsmastercard = (('521234567890 1234', '737', 'user', 'password')) self.dsvisa = (('4212345678901237', '737', 'user', 'password')) self.mistercash = (('6703444444444449', None, 'user', 'password')) class AdyenForm(AdyenCommon): def test_10_adyen_form_render(self): # be sure not to do stupid things adyen = self.env.ref('payment.payment_acquirer_adyen') self.assertEqual(adyen.environment, 'test', 'test without test environment') # ---------------------------------------- # Test: button direct rendering # ---------------------------------------- base_url = self.env['ir.config_parameter'].get_param('web.base.url') form_values = { 'merchantAccount': 'OpenERPCOM', 'merchantReference': 'test_ref0', 'skinCode': 'cbqYWvVL', 'paymentAmount': '1', 'currencyCode': 'EUR', 'resURL': '%s' % urlparse.urljoin(base_url, AdyenController._return_url), } # render the button res = adyen.render( 'test_ref0', 0.01, self.currency_euro.id, partner_id=None, partner_values=self.buyer_values) # check form result tree = objectify.fromstring(res) self.assertEqual(tree.get('action'), 'https://test.adyen.com/hpp/pay.shtml', 'adyen: wrong form POST url') for form_input in tree.input: if form_input.get('name') in ['submit', 'shipBeforeDate', 'sessionValidity', 'shopperLocale', 'merchantSig']: continue self.assertEqual( form_input.get('value'), form_values[form_input.get('name')], 'adyen: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')]) )
agpl-3.0
JonathanStein/odoo
addons/hr_holidays/wizard/__init__.py
442
1122
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import hr_holidays_summary_department import hr_holidays_summary_employees # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
mojmir-svoboda/BlackBoxTT
3rd_party/yaml-cpp/test/gmock-1.7.0/scripts/generator/cpp/ast.py
268
62296
#!/usr/bin/env python # # Copyright 2007 Neal Norwitz # Portions Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generate an Abstract Syntax Tree (AST) for C++.""" __author__ = 'nnorwitz@google.com (Neal Norwitz)' # TODO: # * Tokens should never be exported, need to convert to Nodes # (return types, parameters, etc.) # * Handle static class data for templatized classes # * Handle casts (both C++ and C-style) # * Handle conditions and loops (if/else, switch, for, while/do) # # TODO much, much later: # * Handle #define # * exceptions try: # Python 3.x import builtins except ImportError: # Python 2.x import __builtin__ as builtins import sys import traceback from cpp import keywords from cpp import tokenize from cpp import utils if not hasattr(builtins, 'reversed'): # Support Python 2.3 and earlier. def reversed(seq): for i in range(len(seq)-1, -1, -1): yield seq[i] if not hasattr(builtins, 'next'): # Support Python 2.5 and earlier. def next(obj): return obj.next() VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3) FUNCTION_NONE = 0x00 FUNCTION_CONST = 0x01 FUNCTION_VIRTUAL = 0x02 FUNCTION_PURE_VIRTUAL = 0x04 FUNCTION_CTOR = 0x08 FUNCTION_DTOR = 0x10 FUNCTION_ATTRIBUTE = 0x20 FUNCTION_UNKNOWN_ANNOTATION = 0x40 FUNCTION_THROW = 0x80 """ These are currently unused. Should really handle these properly at some point. TYPE_MODIFIER_INLINE = 0x010000 TYPE_MODIFIER_EXTERN = 0x020000 TYPE_MODIFIER_STATIC = 0x040000 TYPE_MODIFIER_CONST = 0x080000 TYPE_MODIFIER_REGISTER = 0x100000 TYPE_MODIFIER_VOLATILE = 0x200000 TYPE_MODIFIER_MUTABLE = 0x400000 TYPE_MODIFIER_MAP = { 'inline': TYPE_MODIFIER_INLINE, 'extern': TYPE_MODIFIER_EXTERN, 'static': TYPE_MODIFIER_STATIC, 'const': TYPE_MODIFIER_CONST, 'register': TYPE_MODIFIER_REGISTER, 'volatile': TYPE_MODIFIER_VOLATILE, 'mutable': TYPE_MODIFIER_MUTABLE, } """ _INTERNAL_TOKEN = 'internal' _NAMESPACE_POP = 'ns-pop' # TODO(nnorwitz): use this as a singleton for templated_types, etc # where we don't want to create a new empty dict each time. It is also const. class _NullDict(object): __contains__ = lambda self: False keys = values = items = iterkeys = itervalues = iteritems = lambda self: () # TODO(nnorwitz): move AST nodes into a separate module. class Node(object): """Base AST node.""" def __init__(self, start, end): self.start = start self.end = end def IsDeclaration(self): """Returns bool if this node is a declaration.""" return False def IsDefinition(self): """Returns bool if this node is a definition.""" return False def IsExportable(self): """Returns bool if this node exportable from a header file.""" return False def Requires(self, node): """Does this AST node require the definition of the node passed in?""" return False def XXX__str__(self): return self._StringHelper(self.__class__.__name__, '') def _StringHelper(self, name, suffix): if not utils.DEBUG: return '%s(%s)' % (name, suffix) return '%s(%d, %d, %s)' % (name, self.start, self.end, suffix) def __repr__(self): return str(self) class Define(Node): def __init__(self, start, end, name, definition): Node.__init__(self, start, end) self.name = name self.definition = definition def __str__(self): value = '%s %s' % (self.name, self.definition) return self._StringHelper(self.__class__.__name__, value) class Include(Node): def __init__(self, start, end, filename, system): Node.__init__(self, start, end) self.filename = filename self.system = system def __str__(self): fmt = '"%s"' if self.system: fmt = '<%s>' return self._StringHelper(self.__class__.__name__, fmt % self.filename) class Goto(Node): def __init__(self, start, end, label): Node.__init__(self, start, end) self.label = label def __str__(self): return self._StringHelper(self.__class__.__name__, str(self.label)) class Expr(Node): def __init__(self, start, end, expr): Node.__init__(self, start, end) self.expr = expr def Requires(self, node): # TODO(nnorwitz): impl. return False def __str__(self): return self._StringHelper(self.__class__.__name__, str(self.expr)) class Return(Expr): pass class Delete(Expr): pass class Friend(Expr): def __init__(self, start, end, expr, namespace): Expr.__init__(self, start, end, expr) self.namespace = namespace[:] class Using(Node): def __init__(self, start, end, names): Node.__init__(self, start, end) self.names = names def __str__(self): return self._StringHelper(self.__class__.__name__, str(self.names)) class Parameter(Node): def __init__(self, start, end, name, parameter_type, default): Node.__init__(self, start, end) self.name = name self.type = parameter_type self.default = default def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. return self.type.name == node.name def __str__(self): name = str(self.type) suffix = '%s %s' % (name, self.name) if self.default: suffix += ' = ' + ''.join([d.name for d in self.default]) return self._StringHelper(self.__class__.__name__, suffix) class _GenericDeclaration(Node): def __init__(self, start, end, name, namespace): Node.__init__(self, start, end) self.name = name self.namespace = namespace[:] def FullName(self): prefix = '' if self.namespace and self.namespace[-1]: prefix = '::'.join(self.namespace) + '::' return prefix + self.name def _TypeStringHelper(self, suffix): if self.namespace: names = [n or '<anonymous>' for n in self.namespace] suffix += ' in ' + '::'.join(names) return self._StringHelper(self.__class__.__name__, suffix) # TODO(nnorwitz): merge with Parameter in some way? class VariableDeclaration(_GenericDeclaration): def __init__(self, start, end, name, var_type, initial_value, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.type = var_type self.initial_value = initial_value def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. return self.type.name == node.name def ToString(self): """Return a string that tries to reconstitute the variable decl.""" suffix = '%s %s' % (self.type, self.name) if self.initial_value: suffix += ' = ' + self.initial_value return suffix def __str__(self): return self._StringHelper(self.__class__.__name__, self.ToString()) class Typedef(_GenericDeclaration): def __init__(self, start, end, name, alias, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.alias = alias def IsDefinition(self): return True def IsExportable(self): return True def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. name = node.name for token in self.alias: if token is not None and name == token.name: return True return False def __str__(self): suffix = '%s, %s' % (self.name, self.alias) return self._TypeStringHelper(suffix) class _NestedType(_GenericDeclaration): def __init__(self, start, end, name, fields, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.fields = fields def IsDefinition(self): return True def IsExportable(self): return True def __str__(self): suffix = '%s, {%s}' % (self.name, self.fields) return self._TypeStringHelper(suffix) class Union(_NestedType): pass class Enum(_NestedType): pass class Class(_GenericDeclaration): def __init__(self, start, end, name, bases, templated_types, body, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.bases = bases self.body = body self.templated_types = templated_types def IsDeclaration(self): return self.bases is None and self.body is None def IsDefinition(self): return not self.IsDeclaration() def IsExportable(self): return not self.IsDeclaration() def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. if self.bases: for token_list in self.bases: # TODO(nnorwitz): bases are tokens, do name comparision. for token in token_list: if token.name == node.name: return True # TODO(nnorwitz): search in body too. return False def __str__(self): name = self.name if self.templated_types: name += '<%s>' % self.templated_types suffix = '%s, %s, %s' % (name, self.bases, self.body) return self._TypeStringHelper(suffix) class Struct(Class): pass class Function(_GenericDeclaration): def __init__(self, start, end, name, return_type, parameters, modifiers, templated_types, body, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) converter = TypeConverter(namespace) self.return_type = converter.CreateReturnType(return_type) self.parameters = converter.ToParameters(parameters) self.modifiers = modifiers self.body = body self.templated_types = templated_types def IsDeclaration(self): return self.body is None def IsDefinition(self): return self.body is not None def IsExportable(self): if self.return_type and 'static' in self.return_type.modifiers: return False return None not in self.namespace def Requires(self, node): if self.parameters: # TODO(nnorwitz): parameters are tokens, do name comparision. for p in self.parameters: if p.name == node.name: return True # TODO(nnorwitz): search in body too. return False def __str__(self): # TODO(nnorwitz): add templated_types. suffix = ('%s %s(%s), 0x%02x, %s' % (self.return_type, self.name, self.parameters, self.modifiers, self.body)) return self._TypeStringHelper(suffix) class Method(Function): def __init__(self, start, end, name, in_class, return_type, parameters, modifiers, templated_types, body, namespace): Function.__init__(self, start, end, name, return_type, parameters, modifiers, templated_types, body, namespace) # TODO(nnorwitz): in_class could also be a namespace which can # mess up finding functions properly. self.in_class = in_class class Type(_GenericDeclaration): """Type used for any variable (eg class, primitive, struct, etc).""" def __init__(self, start, end, name, templated_types, modifiers, reference, pointer, array): """ Args: name: str name of main type templated_types: [Class (Type?)] template type info between <> modifiers: [str] type modifiers (keywords) eg, const, mutable, etc. reference, pointer, array: bools """ _GenericDeclaration.__init__(self, start, end, name, []) self.templated_types = templated_types if not name and modifiers: self.name = modifiers.pop() self.modifiers = modifiers self.reference = reference self.pointer = pointer self.array = array def __str__(self): prefix = '' if self.modifiers: prefix = ' '.join(self.modifiers) + ' ' name = str(self.name) if self.templated_types: name += '<%s>' % self.templated_types suffix = prefix + name if self.reference: suffix += '&' if self.pointer: suffix += '*' if self.array: suffix += '[]' return self._TypeStringHelper(suffix) # By definition, Is* are always False. A Type can only exist in # some sort of variable declaration, parameter, or return value. def IsDeclaration(self): return False def IsDefinition(self): return False def IsExportable(self): return False class TypeConverter(object): def __init__(self, namespace_stack): self.namespace_stack = namespace_stack def _GetTemplateEnd(self, tokens, start): count = 1 end = start while 1: token = tokens[end] end += 1 if token.name == '<': count += 1 elif token.name == '>': count -= 1 if count == 0: break return tokens[start:end-1], end def ToType(self, tokens): """Convert [Token,...] to [Class(...), ] useful for base classes. For example, code like class Foo : public Bar<x, y> { ... }; the "Bar<x, y>" portion gets converted to an AST. Returns: [Class(...), ...] """ result = [] name_tokens = [] reference = pointer = array = False def AddType(templated_types): # Partition tokens into name and modifier tokens. names = [] modifiers = [] for t in name_tokens: if keywords.IsKeyword(t.name): modifiers.append(t.name) else: names.append(t.name) name = ''.join(names) result.append(Type(name_tokens[0].start, name_tokens[-1].end, name, templated_types, modifiers, reference, pointer, array)) del name_tokens[:] i = 0 end = len(tokens) while i < end: token = tokens[i] if token.name == '<': new_tokens, new_end = self._GetTemplateEnd(tokens, i+1) AddType(self.ToType(new_tokens)) # If there is a comma after the template, we need to consume # that here otherwise it becomes part of the name. i = new_end reference = pointer = array = False elif token.name == ',': AddType([]) reference = pointer = array = False elif token.name == '*': pointer = True elif token.name == '&': reference = True elif token.name == '[': pointer = True elif token.name == ']': pass else: name_tokens.append(token) i += 1 if name_tokens: # No '<' in the tokens, just a simple name and no template. AddType([]) return result def DeclarationToParts(self, parts, needs_name_removed): name = None default = [] if needs_name_removed: # Handle default (initial) values properly. for i, t in enumerate(parts): if t.name == '=': default = parts[i+1:] name = parts[i-1].name if name == ']' and parts[i-2].name == '[': name = parts[i-3].name i -= 1 parts = parts[:i-1] break else: if parts[-1].token_type == tokenize.NAME: name = parts.pop().name else: # TODO(nnorwitz): this is a hack that happens for code like # Register(Foo<T>); where it thinks this is a function call # but it's actually a declaration. name = '???' modifiers = [] type_name = [] other_tokens = [] templated_types = [] i = 0 end = len(parts) while i < end: p = parts[i] if keywords.IsKeyword(p.name): modifiers.append(p.name) elif p.name == '<': templated_tokens, new_end = self._GetTemplateEnd(parts, i+1) templated_types = self.ToType(templated_tokens) i = new_end - 1 # Don't add a spurious :: to data members being initialized. next_index = i + 1 if next_index < end and parts[next_index].name == '::': i += 1 elif p.name in ('[', ']', '='): # These are handled elsewhere. other_tokens.append(p) elif p.name not in ('*', '&', '>'): # Ensure that names have a space between them. if (type_name and type_name[-1].token_type == tokenize.NAME and p.token_type == tokenize.NAME): type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0)) type_name.append(p) else: other_tokens.append(p) i += 1 type_name = ''.join([t.name for t in type_name]) return name, type_name, templated_types, modifiers, default, other_tokens def ToParameters(self, tokens): if not tokens: return [] result = [] name = type_name = '' type_modifiers = [] pointer = reference = array = False first_token = None default = [] def AddParameter(): if default: del default[0] # Remove flag. end = type_modifiers[-1].end parts = self.DeclarationToParts(type_modifiers, True) (name, type_name, templated_types, modifiers, unused_default, unused_other_tokens) = parts parameter_type = Type(first_token.start, first_token.end, type_name, templated_types, modifiers, reference, pointer, array) p = Parameter(first_token.start, end, name, parameter_type, default) result.append(p) template_count = 0 for s in tokens: if not first_token: first_token = s if s.name == '<': template_count += 1 elif s.name == '>': template_count -= 1 if template_count > 0: type_modifiers.append(s) continue if s.name == ',': AddParameter() name = type_name = '' type_modifiers = [] pointer = reference = array = False first_token = None default = [] elif s.name == '*': pointer = True elif s.name == '&': reference = True elif s.name == '[': array = True elif s.name == ']': pass # Just don't add to type_modifiers. elif s.name == '=': # Got a default value. Add any value (None) as a flag. default.append(None) elif default: default.append(s) else: type_modifiers.append(s) AddParameter() return result def CreateReturnType(self, return_type_seq): if not return_type_seq: return None start = return_type_seq[0].start end = return_type_seq[-1].end _, name, templated_types, modifiers, default, other_tokens = \ self.DeclarationToParts(return_type_seq, False) names = [n.name for n in other_tokens] reference = '&' in names pointer = '*' in names array = '[' in names return Type(start, end, name, templated_types, modifiers, reference, pointer, array) def GetTemplateIndices(self, names): # names is a list of strings. start = names.index('<') end = len(names) - 1 while end > 0: if names[end] == '>': break end -= 1 return start, end+1 class AstBuilder(object): def __init__(self, token_stream, filename, in_class='', visibility=None, namespace_stack=[]): self.tokens = token_stream self.filename = filename # TODO(nnorwitz): use a better data structure (deque) for the queue. # Switching directions of the "queue" improved perf by about 25%. # Using a deque should be even better since we access from both sides. self.token_queue = [] self.namespace_stack = namespace_stack[:] self.in_class = in_class if in_class is None: self.in_class_name_only = None else: self.in_class_name_only = in_class.split('::')[-1] self.visibility = visibility self.in_function = False self.current_token = None # Keep the state whether we are currently handling a typedef or not. self._handling_typedef = False self.converter = TypeConverter(self.namespace_stack) def HandleError(self, msg, token): printable_queue = list(reversed(self.token_queue[-20:])) sys.stderr.write('Got %s in %s @ %s %s\n' % (msg, self.filename, token, printable_queue)) def Generate(self): while 1: token = self._GetNextToken() if not token: break # Get the next token. self.current_token = token # Dispatch on the next token type. if token.token_type == _INTERNAL_TOKEN: if token.name == _NAMESPACE_POP: self.namespace_stack.pop() continue try: result = self._GenerateOne(token) if result is not None: yield result except: self.HandleError('exception', token) raise def _CreateVariable(self, pos_token, name, type_name, type_modifiers, ref_pointer_name_seq, templated_types, value=None): reference = '&' in ref_pointer_name_seq pointer = '*' in ref_pointer_name_seq array = '[' in ref_pointer_name_seq var_type = Type(pos_token.start, pos_token.end, type_name, templated_types, type_modifiers, reference, pointer, array) return VariableDeclaration(pos_token.start, pos_token.end, name, var_type, value, self.namespace_stack) def _GenerateOne(self, token): if token.token_type == tokenize.NAME: if (keywords.IsKeyword(token.name) and not keywords.IsBuiltinType(token.name)): method = getattr(self, 'handle_' + token.name) return method() elif token.name == self.in_class_name_only: # The token name is the same as the class, must be a ctor if # there is a paren. Otherwise, it's the return type. # Peek ahead to get the next token to figure out which. next = self._GetNextToken() self._AddBackToken(next) if next.token_type == tokenize.SYNTAX and next.name == '(': return self._GetMethod([token], FUNCTION_CTOR, None, True) # Fall through--handle like any other method. # Handle data or function declaration/definition. syntax = tokenize.SYNTAX temp_tokens, last_token = \ self._GetVarTokensUpTo(syntax, '(', ';', '{', '[') temp_tokens.insert(0, token) if last_token.name == '(': # If there is an assignment before the paren, # this is an expression, not a method. expr = bool([e for e in temp_tokens if e.name == '=']) if expr: new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';') temp_tokens.append(last_token) temp_tokens.extend(new_temp) last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0) if last_token.name == '[': # Handle array, this isn't a method, unless it's an operator. # TODO(nnorwitz): keep the size somewhere. # unused_size = self._GetTokensUpTo(tokenize.SYNTAX, ']') temp_tokens.append(last_token) if temp_tokens[-2].name == 'operator': temp_tokens.append(self._GetNextToken()) else: temp_tokens2, last_token = \ self._GetVarTokensUpTo(tokenize.SYNTAX, ';') temp_tokens.extend(temp_tokens2) if last_token.name == ';': # Handle data, this isn't a method. parts = self.converter.DeclarationToParts(temp_tokens, True) (name, type_name, templated_types, modifiers, default, unused_other_tokens) = parts t0 = temp_tokens[0] names = [t.name for t in temp_tokens] if templated_types: start, end = self.converter.GetTemplateIndices(names) names = names[:start] + names[end:] default = ''.join([t.name for t in default]) return self._CreateVariable(t0, name, type_name, modifiers, names, templated_types, default) if last_token.name == '{': self._AddBackTokens(temp_tokens[1:]) self._AddBackToken(last_token) method_name = temp_tokens[0].name method = getattr(self, 'handle_' + method_name, None) if not method: # Must be declaring a variable. # TODO(nnorwitz): handle the declaration. return None return method() return self._GetMethod(temp_tokens, 0, None, False) elif token.token_type == tokenize.SYNTAX: if token.name == '~' and self.in_class: # Must be a dtor (probably not in method body). token = self._GetNextToken() # self.in_class can contain A::Name, but the dtor will only # be Name. Make sure to compare against the right value. if (token.token_type == tokenize.NAME and token.name == self.in_class_name_only): return self._GetMethod([token], FUNCTION_DTOR, None, True) # TODO(nnorwitz): handle a lot more syntax. elif token.token_type == tokenize.PREPROCESSOR: # TODO(nnorwitz): handle more preprocessor directives. # token starts with a #, so remove it and strip whitespace. name = token.name[1:].lstrip() if name.startswith('include'): # Remove "include". name = name[7:].strip() assert name # Handle #include \<newline> "header-on-second-line.h". if name.startswith('\\'): name = name[1:].strip() assert name[0] in '<"', token assert name[-1] in '>"', token system = name[0] == '<' filename = name[1:-1] return Include(token.start, token.end, filename, system) if name.startswith('define'): # Remove "define". name = name[6:].strip() assert name value = '' for i, c in enumerate(name): if c.isspace(): value = name[i:].lstrip() name = name[:i] break return Define(token.start, token.end, name, value) if name.startswith('if') and name[2:3].isspace(): condition = name[3:].strip() if condition.startswith('0') or condition.startswith('(0)'): self._SkipIf0Blocks() return None def _GetTokensUpTo(self, expected_token_type, expected_token): return self._GetVarTokensUpTo(expected_token_type, expected_token)[0] def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens): last_token = self._GetNextToken() tokens = [] while (last_token.token_type != expected_token_type or last_token.name not in expected_tokens): tokens.append(last_token) last_token = self._GetNextToken() return tokens, last_token # TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necesary. def _IgnoreUpTo(self, token_type, token): unused_tokens = self._GetTokensUpTo(token_type, token) def _SkipIf0Blocks(self): count = 1 while 1: token = self._GetNextToken() if token.token_type != tokenize.PREPROCESSOR: continue name = token.name[1:].lstrip() if name.startswith('endif'): count -= 1 if count == 0: break elif name.startswith('if'): count += 1 def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None): if GetNextToken is None: GetNextToken = self._GetNextToken # Assumes the current token is open_paren and we will consume # and return up to the close_paren. count = 1 token = GetNextToken() while 1: if token.token_type == tokenize.SYNTAX: if token.name == open_paren: count += 1 elif token.name == close_paren: count -= 1 if count == 0: break yield token token = GetNextToken() yield token def _GetParameters(self): return self._GetMatchingChar('(', ')') def GetScope(self): return self._GetMatchingChar('{', '}') def _GetNextToken(self): if self.token_queue: return self.token_queue.pop() return next(self.tokens) def _AddBackToken(self, token): if token.whence == tokenize.WHENCE_STREAM: token.whence = tokenize.WHENCE_QUEUE self.token_queue.insert(0, token) else: assert token.whence == tokenize.WHENCE_QUEUE, token self.token_queue.append(token) def _AddBackTokens(self, tokens): if tokens: if tokens[-1].whence == tokenize.WHENCE_STREAM: for token in tokens: token.whence = tokenize.WHENCE_QUEUE self.token_queue[:0] = reversed(tokens) else: assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens self.token_queue.extend(reversed(tokens)) def GetName(self, seq=None): """Returns ([tokens], next_token_info).""" GetNextToken = self._GetNextToken if seq is not None: it = iter(seq) GetNextToken = lambda: next(it) next_token = GetNextToken() tokens = [] last_token_was_name = False while (next_token.token_type == tokenize.NAME or (next_token.token_type == tokenize.SYNTAX and next_token.name in ('::', '<'))): # Two NAMEs in a row means the identifier should terminate. # It's probably some sort of variable declaration. if last_token_was_name and next_token.token_type == tokenize.NAME: break last_token_was_name = next_token.token_type == tokenize.NAME tokens.append(next_token) # Handle templated names. if next_token.name == '<': tokens.extend(self._GetMatchingChar('<', '>', GetNextToken)) last_token_was_name = True next_token = GetNextToken() return tokens, next_token def GetMethod(self, modifiers, templated_types): return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') assert len(return_type_and_name) >= 1 return self._GetMethod(return_type_and_name, modifiers, templated_types, False) def _GetMethod(self, return_type_and_name, modifiers, templated_types, get_paren): template_portion = None if get_paren: token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token if token.name == '<': # Handle templatized dtors. template_portion = [token] template_portion.extend(self._GetMatchingChar('<', '>')) token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token assert token.name == '(', token name = return_type_and_name.pop() # Handle templatized ctors. if name.name == '>': index = 1 while return_type_and_name[index].name != '<': index += 1 template_portion = return_type_and_name[index:] + [name] del return_type_and_name[index:] name = return_type_and_name.pop() elif name.name == ']': rt = return_type_and_name assert rt[-1].name == '[', return_type_and_name assert rt[-2].name == 'operator', return_type_and_name name_seq = return_type_and_name[-2:] del return_type_and_name[-2:] name = tokenize.Token(tokenize.NAME, 'operator[]', name_seq[0].start, name.end) # Get the open paren so _GetParameters() below works. unused_open_paren = self._GetNextToken() # TODO(nnorwitz): store template_portion. return_type = return_type_and_name indices = name if return_type: indices = return_type[0] # Force ctor for templatized ctors. if name.name == self.in_class and not modifiers: modifiers |= FUNCTION_CTOR parameters = list(self._GetParameters()) del parameters[-1] # Remove trailing ')'. # Handling operator() is especially weird. if name.name == 'operator' and not parameters: token = self._GetNextToken() assert token.name == '(', token parameters = list(self._GetParameters()) del parameters[-1] # Remove trailing ')'. token = self._GetNextToken() while token.token_type == tokenize.NAME: modifier_token = token token = self._GetNextToken() if modifier_token.name == 'const': modifiers |= FUNCTION_CONST elif modifier_token.name == '__attribute__': # TODO(nnorwitz): handle more __attribute__ details. modifiers |= FUNCTION_ATTRIBUTE assert token.name == '(', token # Consume everything between the (parens). unused_tokens = list(self._GetMatchingChar('(', ')')) token = self._GetNextToken() elif modifier_token.name == 'throw': modifiers |= FUNCTION_THROW assert token.name == '(', token # Consume everything between the (parens). unused_tokens = list(self._GetMatchingChar('(', ')')) token = self._GetNextToken() elif modifier_token.name == modifier_token.name.upper(): # HACK(nnorwitz): assume that all upper-case names # are some macro we aren't expanding. modifiers |= FUNCTION_UNKNOWN_ANNOTATION else: self.HandleError('unexpected token', modifier_token) assert token.token_type == tokenize.SYNTAX, token # Handle ctor initializers. if token.name == ':': # TODO(nnorwitz): anything else to handle for initializer list? while token.name != ';' and token.name != '{': token = self._GetNextToken() # Handle pointer to functions that are really data but look # like method declarations. if token.name == '(': if parameters[0].name == '*': # name contains the return type. name = parameters.pop() # parameters contains the name of the data. modifiers = [p.name for p in parameters] # Already at the ( to open the parameter list. function_parameters = list(self._GetMatchingChar('(', ')')) del function_parameters[-1] # Remove trailing ')'. # TODO(nnorwitz): store the function_parameters. token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token assert token.name == ';', token return self._CreateVariable(indices, name.name, indices.name, modifiers, '', None) # At this point, we got something like: # return_type (type::*name_)(params); # This is a data member called name_ that is a function pointer. # With this code: void (sq_type::*field_)(string&); # We get: name=void return_type=[] parameters=sq_type ... field_ # TODO(nnorwitz): is return_type always empty? # TODO(nnorwitz): this isn't even close to being correct. # Just put in something so we don't crash and can move on. real_name = parameters[-1] modifiers = [p.name for p in self._GetParameters()] del modifiers[-1] # Remove trailing ')'. return self._CreateVariable(indices, real_name.name, indices.name, modifiers, '', None) if token.name == '{': body = list(self.GetScope()) del body[-1] # Remove trailing '}'. else: body = None if token.name == '=': token = self._GetNextToken() assert token.token_type == tokenize.CONSTANT, token assert token.name == '0', token modifiers |= FUNCTION_PURE_VIRTUAL token = self._GetNextToken() if token.name == '[': # TODO(nnorwitz): store tokens and improve parsing. # template <typename T, size_t N> char (&ASH(T (&seq)[N]))[N]; tokens = list(self._GetMatchingChar('[', ']')) token = self._GetNextToken() assert token.name == ';', (token, return_type_and_name, parameters) # Looks like we got a method, not a function. if len(return_type) > 2 and return_type[-1].name == '::': return_type, in_class = \ self._GetReturnTypeAndClassName(return_type) return Method(indices.start, indices.end, name.name, in_class, return_type, parameters, modifiers, templated_types, body, self.namespace_stack) return Function(indices.start, indices.end, name.name, return_type, parameters, modifiers, templated_types, body, self.namespace_stack) def _GetReturnTypeAndClassName(self, token_seq): # Splitting the return type from the class name in a method # can be tricky. For example, Return::Type::Is::Hard::To::Find(). # Where is the return type and where is the class name? # The heuristic used is to pull the last name as the class name. # This includes all the templated type info. # TODO(nnorwitz): if there is only One name like in the # example above, punt and assume the last bit is the class name. # Ignore a :: prefix, if exists so we can find the first real name. i = 0 if token_seq[0].name == '::': i = 1 # Ignore a :: suffix, if exists. end = len(token_seq) - 1 if token_seq[end-1].name == '::': end -= 1 # Make a copy of the sequence so we can append a sentinel # value. This is required for GetName will has to have some # terminating condition beyond the last name. seq_copy = token_seq[i:end] seq_copy.append(tokenize.Token(tokenize.SYNTAX, '', 0, 0)) names = [] while i < end: # Iterate through the sequence parsing out each name. new_name, next = self.GetName(seq_copy[i:]) assert new_name, 'Got empty new_name, next=%s' % next # We got a pointer or ref. Add it to the name. if next and next.token_type == tokenize.SYNTAX: new_name.append(next) names.append(new_name) i += len(new_name) # Now that we have the names, it's time to undo what we did. # Remove the sentinel value. names[-1].pop() # Flatten the token sequence for the return type. return_type = [e for seq in names[:-1] for e in seq] # The class name is the last name. class_name = names[-1] return return_type, class_name def handle_bool(self): pass def handle_char(self): pass def handle_int(self): pass def handle_long(self): pass def handle_short(self): pass def handle_double(self): pass def handle_float(self): pass def handle_void(self): pass def handle_wchar_t(self): pass def handle_unsigned(self): pass def handle_signed(self): pass def _GetNestedType(self, ctor): name = None name_tokens, token = self.GetName() if name_tokens: name = ''.join([t.name for t in name_tokens]) # Handle forward declarations. if token.token_type == tokenize.SYNTAX and token.name == ';': return ctor(token.start, token.end, name, None, self.namespace_stack) if token.token_type == tokenize.NAME and self._handling_typedef: self._AddBackToken(token) return ctor(token.start, token.end, name, None, self.namespace_stack) # Must be the type declaration. fields = list(self._GetMatchingChar('{', '}')) del fields[-1] # Remove trailing '}'. if token.token_type == tokenize.SYNTAX and token.name == '{': next = self._GetNextToken() new_type = ctor(token.start, token.end, name, fields, self.namespace_stack) # A name means this is an anonymous type and the name # is the variable declaration. if next.token_type != tokenize.NAME: return new_type name = new_type token = next # Must be variable declaration using the type prefixed with keyword. assert token.token_type == tokenize.NAME, token return self._CreateVariable(token, token.name, name, [], '', None) def handle_struct(self): # Special case the handling typedef/aliasing of structs here. # It would be a pain to handle in the class code. name_tokens, var_token = self.GetName() if name_tokens: next_token = self._GetNextToken() is_syntax = (var_token.token_type == tokenize.SYNTAX and var_token.name[0] in '*&') is_variable = (var_token.token_type == tokenize.NAME and next_token.name == ';') variable = var_token if is_syntax and not is_variable: variable = next_token temp = self._GetNextToken() if temp.token_type == tokenize.SYNTAX and temp.name == '(': # Handle methods declared to return a struct. t0 = name_tokens[0] struct = tokenize.Token(tokenize.NAME, 'struct', t0.start-7, t0.start-2) type_and_name = [struct] type_and_name.extend(name_tokens) type_and_name.extend((var_token, next_token)) return self._GetMethod(type_and_name, 0, None, False) assert temp.name == ';', (temp, name_tokens, var_token) if is_syntax or (is_variable and not self._handling_typedef): modifiers = ['struct'] type_name = ''.join([t.name for t in name_tokens]) position = name_tokens[0] return self._CreateVariable(position, variable.name, type_name, modifiers, var_token.name, None) name_tokens.extend((var_token, next_token)) self._AddBackTokens(name_tokens) else: self._AddBackToken(var_token) return self._GetClass(Struct, VISIBILITY_PUBLIC, None) def handle_union(self): return self._GetNestedType(Union) def handle_enum(self): return self._GetNestedType(Enum) def handle_auto(self): # TODO(nnorwitz): warn about using auto? Probably not since it # will be reclaimed and useful for C++0x. pass def handle_register(self): pass def handle_const(self): pass def handle_inline(self): pass def handle_extern(self): pass def handle_static(self): pass def handle_virtual(self): # What follows must be a method. token = token2 = self._GetNextToken() if token.name == 'inline': # HACK(nnorwitz): handle inline dtors by ignoring 'inline'. token2 = self._GetNextToken() if token2.token_type == tokenize.SYNTAX and token2.name == '~': return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR, None) assert token.token_type == tokenize.NAME or token.name == '::', token return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') return_type_and_name.insert(0, token) if token2 is not token: return_type_and_name.insert(1, token2) return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL, None, False) def handle_volatile(self): pass def handle_mutable(self): pass def handle_public(self): assert self.in_class self.visibility = VISIBILITY_PUBLIC def handle_protected(self): assert self.in_class self.visibility = VISIBILITY_PROTECTED def handle_private(self): assert self.in_class self.visibility = VISIBILITY_PRIVATE def handle_friend(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert tokens t0 = tokens[0] return Friend(t0.start, t0.end, tokens, self.namespace_stack) def handle_static_cast(self): pass def handle_const_cast(self): pass def handle_dynamic_cast(self): pass def handle_reinterpret_cast(self): pass def handle_new(self): pass def handle_delete(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert tokens return Delete(tokens[0].start, tokens[0].end, tokens) def handle_typedef(self): token = self._GetNextToken() if (token.token_type == tokenize.NAME and keywords.IsKeyword(token.name)): # Token must be struct/enum/union/class. method = getattr(self, 'handle_' + token.name) self._handling_typedef = True tokens = [method()] self._handling_typedef = False else: tokens = [token] # Get the remainder of the typedef up to the semi-colon. tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';')) # TODO(nnorwitz): clean all this up. assert tokens name = tokens.pop() indices = name if tokens: indices = tokens[0] if not indices: indices = token if name.name == ')': # HACK(nnorwitz): Handle pointers to functions "properly". if (len(tokens) >= 4 and tokens[1].name == '(' and tokens[2].name == '*'): tokens.append(name) name = tokens[3] elif name.name == ']': # HACK(nnorwitz): Handle arrays properly. if len(tokens) >= 2: tokens.append(name) name = tokens[1] new_type = tokens if tokens and isinstance(tokens[0], tokenize.Token): new_type = self.converter.ToType(tokens)[0] return Typedef(indices.start, indices.end, name.name, new_type, self.namespace_stack) def handle_typeid(self): pass # Not needed yet. def handle_typename(self): pass # Not needed yet. def _GetTemplatedTypes(self): result = {} tokens = list(self._GetMatchingChar('<', '>')) len_tokens = len(tokens) - 1 # Ignore trailing '>'. i = 0 while i < len_tokens: key = tokens[i].name i += 1 if keywords.IsKeyword(key) or key == ',': continue type_name = default = None if i < len_tokens: i += 1 if tokens[i-1].name == '=': assert i < len_tokens, '%s %s' % (i, tokens) default, unused_next_token = self.GetName(tokens[i:]) i += len(default) else: if tokens[i-1].name != ',': # We got something like: Type variable. # Re-adjust the key (variable) and type_name (Type). key = tokens[i-1].name type_name = tokens[i-2] result[key] = (type_name, default) return result def handle_template(self): token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token assert token.name == '<', token templated_types = self._GetTemplatedTypes() # TODO(nnorwitz): for now, just ignore the template params. token = self._GetNextToken() if token.token_type == tokenize.NAME: if token.name == 'class': return self._GetClass(Class, VISIBILITY_PRIVATE, templated_types) elif token.name == 'struct': return self._GetClass(Struct, VISIBILITY_PUBLIC, templated_types) elif token.name == 'friend': return self.handle_friend() self._AddBackToken(token) tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';') tokens.append(last) self._AddBackTokens(tokens) if last.name == '(': return self.GetMethod(FUNCTION_NONE, templated_types) # Must be a variable definition. return None def handle_true(self): pass # Nothing to do. def handle_false(self): pass # Nothing to do. def handle_asm(self): pass # Not needed yet. def handle_class(self): return self._GetClass(Class, VISIBILITY_PRIVATE, None) def _GetBases(self): # Get base classes. bases = [] while 1: token = self._GetNextToken() assert token.token_type == tokenize.NAME, token # TODO(nnorwitz): store kind of inheritance...maybe. if token.name not in ('public', 'protected', 'private'): # If inheritance type is not specified, it is private. # Just put the token back so we can form a name. # TODO(nnorwitz): it would be good to warn about this. self._AddBackToken(token) else: # Check for virtual inheritance. token = self._GetNextToken() if token.name != 'virtual': self._AddBackToken(token) else: # TODO(nnorwitz): store that we got virtual for this base. pass base, next_token = self.GetName() bases_ast = self.converter.ToType(base) assert len(bases_ast) == 1, bases_ast bases.append(bases_ast[0]) assert next_token.token_type == tokenize.SYNTAX, next_token if next_token.name == '{': token = next_token break # Support multiple inheritance. assert next_token.name == ',', next_token return bases, token def _GetClass(self, class_type, visibility, templated_types): class_name = None class_token = self._GetNextToken() if class_token.token_type != tokenize.NAME: assert class_token.token_type == tokenize.SYNTAX, class_token token = class_token else: # Skip any macro (e.g. storage class specifiers) after the # 'class' keyword. next_token = self._GetNextToken() if next_token.token_type == tokenize.NAME: self._AddBackToken(next_token) else: self._AddBackTokens([class_token, next_token]) name_tokens, token = self.GetName() class_name = ''.join([t.name for t in name_tokens]) bases = None if token.token_type == tokenize.SYNTAX: if token.name == ';': # Forward declaration. return class_type(class_token.start, class_token.end, class_name, None, templated_types, None, self.namespace_stack) if token.name in '*&': # Inline forward declaration. Could be method or data. name_token = self._GetNextToken() next_token = self._GetNextToken() if next_token.name == ';': # Handle data modifiers = ['class'] return self._CreateVariable(class_token, name_token.name, class_name, modifiers, token.name, None) else: # Assume this is a method. tokens = (class_token, token, name_token, next_token) self._AddBackTokens(tokens) return self.GetMethod(FUNCTION_NONE, None) if token.name == ':': bases, token = self._GetBases() body = None if token.token_type == tokenize.SYNTAX and token.name == '{': assert token.token_type == tokenize.SYNTAX, token assert token.name == '{', token ast = AstBuilder(self.GetScope(), self.filename, class_name, visibility, self.namespace_stack) body = list(ast.Generate()) if not self._handling_typedef: token = self._GetNextToken() if token.token_type != tokenize.NAME: assert token.token_type == tokenize.SYNTAX, token assert token.name == ';', token else: new_class = class_type(class_token.start, class_token.end, class_name, bases, None, body, self.namespace_stack) modifiers = [] return self._CreateVariable(class_token, token.name, new_class, modifiers, token.name, None) else: if not self._handling_typedef: self.HandleError('non-typedef token', token) self._AddBackToken(token) return class_type(class_token.start, class_token.end, class_name, bases, templated_types, body, self.namespace_stack) def handle_namespace(self): token = self._GetNextToken() # Support anonymous namespaces. name = None if token.token_type == tokenize.NAME: name = token.name token = self._GetNextToken() self.namespace_stack.append(name) assert token.token_type == tokenize.SYNTAX, token # Create an internal token that denotes when the namespace is complete. internal_token = tokenize.Token(_INTERNAL_TOKEN, _NAMESPACE_POP, None, None) internal_token.whence = token.whence if token.name == '=': # TODO(nnorwitz): handle aliasing namespaces. name, next_token = self.GetName() assert next_token.name == ';', next_token self._AddBackToken(internal_token) else: assert token.name == '{', token tokens = list(self.GetScope()) # Replace the trailing } with the internal namespace pop token. tokens[-1] = internal_token # Handle namespace with nothing in it. self._AddBackTokens(tokens) return None def handle_using(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert tokens return Using(tokens[0].start, tokens[0].end, tokens) def handle_explicit(self): assert self.in_class # Nothing much to do. # TODO(nnorwitz): maybe verify the method name == class name. # This must be a ctor. return self.GetMethod(FUNCTION_CTOR, None) def handle_this(self): pass # Nothing to do. def handle_operator(self): # Pull off the next token(s?) and make that part of the method name. pass def handle_sizeof(self): pass def handle_case(self): pass def handle_switch(self): pass def handle_default(self): token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX assert token.name == ':' def handle_if(self): pass def handle_else(self): pass def handle_return(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') if not tokens: return Return(self.current_token.start, self.current_token.end, None) return Return(tokens[0].start, tokens[0].end, tokens) def handle_goto(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert len(tokens) == 1, str(tokens) return Goto(tokens[0].start, tokens[0].end, tokens[0].name) def handle_try(self): pass # Not needed yet. def handle_catch(self): pass # Not needed yet. def handle_throw(self): pass # Not needed yet. def handle_while(self): pass def handle_do(self): pass def handle_for(self): pass def handle_break(self): self._IgnoreUpTo(tokenize.SYNTAX, ';') def handle_continue(self): self._IgnoreUpTo(tokenize.SYNTAX, ';') def BuilderFromSource(source, filename): """Utility method that returns an AstBuilder from source code. Args: source: 'C++ source code' filename: 'file1' Returns: AstBuilder """ return AstBuilder(tokenize.GetTokens(source), filename) def PrintIndentifiers(filename, should_print): """Prints all identifiers for a C++ source file. Args: filename: 'file1' should_print: predicate with signature: bool Function(token) """ source = utils.ReadFile(filename, False) if source is None: sys.stderr.write('Unable to find: %s\n' % filename) return #print('Processing %s' % actual_filename) builder = BuilderFromSource(source, filename) try: for node in builder.Generate(): if should_print(node): print(node.name) except KeyboardInterrupt: return except: pass def PrintAllIndentifiers(filenames, should_print): """Prints all identifiers for each C++ source file in filenames. Args: filenames: ['file1', 'file2', ...] should_print: predicate with signature: bool Function(token) """ for path in filenames: PrintIndentifiers(path, should_print) def main(argv): for filename in argv[1:]: source = utils.ReadFile(filename) if source is None: continue print('Processing %s' % filename) builder = BuilderFromSource(source, filename) try: entire_ast = filter(None, builder.Generate()) except KeyboardInterrupt: return except: # Already printed a warning, print the traceback and continue. traceback.print_exc() else: if utils.DEBUG: for ast in entire_ast: print(ast) if __name__ == '__main__': main(sys.argv)
mit
dcroc16/skunk_works
google_appengine/lib/django-0.96/django/core/cache/backends/base.py
32
1675
"Base Cache class." from django.core.exceptions import ImproperlyConfigured class InvalidCacheBackendError(ImproperlyConfigured): pass class BaseCache(object): def __init__(self, params): timeout = params.get('timeout', 300) try: timeout = int(timeout) except (ValueError, TypeError): timeout = 300 self.default_timeout = timeout def get(self, key, default=None): """ Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. """ raise NotImplementedError def set(self, key, value, timeout=None): """ Set a value in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ raise NotImplementedError def delete(self, key): """ Delete a key from the cache, failing silently. """ raise NotImplementedError def get_many(self, keys): """ Fetch a bunch of keys from the cache. For certain backends (memcached, pgsql) this can be *much* faster when fetching multiple values. Returns a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ d = {} for k in keys: val = self.get(k) if val is not None: d[k] = val return d def has_key(self, key): """ Returns True if the key is in the cache and has not expired. """ return self.get(key) is not None
mit
andrewleech/SickRage
lib/requests/packages/urllib3/response.py
150
22662
from __future__ import absolute_import from contextlib import contextmanager import zlib import io import logging from socket import timeout as SocketTimeout from socket import error as SocketError from ._collections import HTTPHeaderDict from .exceptions import ( BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked, IncompleteRead, InvalidHeader ) from .packages.six import string_types as basestring, binary_type, PY3 from .packages.six.moves import http_client as httplib from .connection import HTTPException, BaseSSLError from .util.response import is_fp_closed, is_response_to_head log = logging.getLogger(__name__) class DeflateDecoder(object): def __init__(self): self._first_try = True self._data = binary_type() self._obj = zlib.decompressobj() def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): if not data: return data if not self._first_try: return self._obj.decompress(data) self._data += data try: return self._obj.decompress(data) except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None class GzipDecoder(object): def __init__(self): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): if not data: return data return self._obj.decompress(data) def _get_decoder(mode): if mode == 'gzip': return GzipDecoder() return DeflateDecoder() class HTTPResponse(io.IOBase): """ HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. Extra parameters for behaviour not present in httplib.HTTPResponse: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, attempts to decode specific content-encoding's based on headers (like 'gzip' and 'deflate') will be skipped and raw data will be used instead. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse object, it's convenient to include the original for debug purposes. It's otherwise unused. :param retries: The retries contains the last :class:`~urllib3.util.retry.Retry` that was used during the request. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. """ CONTENT_DECODERS = ['gzip', 'deflate'] REDIRECT_STATUSES = [301, 302, 303, 307, 308] def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, original_response=None, pool=None, connection=None, retries=None, enforce_content_length=False, request_method=None): if isinstance(headers, HTTPHeaderDict): self.headers = headers else: self.headers = HTTPHeaderDict(headers) self.status = status self.version = version self.reason = reason self.strict = strict self.decode_content = decode_content self.retries = retries self.enforce_content_length = enforce_content_length self._decoder = None self._body = None self._fp = None self._original_response = original_response self._fp_bytes_read = 0 if body and isinstance(body, (basestring, binary_type)): self._body = body self._pool = pool self._connection = connection if hasattr(body, 'read'): self._fp = body # Are we using the chunked-style of transfer encoding? self.chunked = False self.chunk_left = None tr_enc = self.headers.get('transfer-encoding', '').lower() # Don't incur the penalty of creating a list and then discarding it encodings = (enc.strip() for enc in tr_enc.split(",")) if "chunked" in encodings: self.chunked = True # Determine length of response self.length_remaining = self._init_length(request_method) # If requested, preload the body. if preload_content and not self._body: self._body = self.read(decode_content=decode_content) def get_redirect_location(self): """ Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. """ if self.status in self.REDIRECT_STATUSES: return self.headers.get('location') return False def release_conn(self): if not self._pool or not self._connection: return self._pool._put_conn(self._connection) self._connection = None @property def data(self): # For backwords-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body if self._fp: return self.read(cache_content=True) @property def connection(self): return self._connection def tell(self): """ Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). """ return self._fp_bytes_read def _init_length(self, request_method): """ Set initial length value for Response content if available. """ length = self.headers.get('content-length') if length is not None and self.chunked: # This Response will fail with an IncompleteRead if it can't be # received as chunked. This method falls back to attempt reading # the response before raising an exception. log.warning("Received response with both Content-Length and " "Transfer-Encoding set. This is expressly forbidden " "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " "attempting to process response as Transfer-Encoding: " "chunked.") return None elif length is not None: try: # RFC 7230 section 3.3.2 specifies multiple content lengths can # be sent in a single Content-Length header # (e.g. Content-Length: 42, 42). This line ensures the values # are all valid ints and that as long as the `set` length is 1, # all values are the same. Otherwise, the header is invalid. lengths = set([int(val) for val in length.split(',')]) if len(lengths) > 1: raise InvalidHeader("Content-Length contained multiple " "unmatching values (%s)" % length) length = lengths.pop() except ValueError: length = None else: if length < 0: length = None # Convert status to int for comparison # In some cases, httplib returns a status of "_UNKNOWN" try: status = int(self.status) except ValueError: status = 0 # Check for responses that shouldn't include a body if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': length = 0 return length def _init_decoder(self): """ Set-up the _decoder attribute if necessary. """ # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 content_encoding = self.headers.get('content-encoding', '').lower() if self._decoder is None and content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) def _decode(self, data, decode_content, flush_decoder): """ Decode the data passed in and potentially flush the decoder. """ try: if decode_content and self._decoder: data = self._decoder.decompress(data) except (IOError, zlib.error) as e: content_encoding = self.headers.get('content-encoding', '').lower() raise DecodeError( "Received response with content-encoding: %s, but " "failed to decode it." % content_encoding, e) if flush_decoder and decode_content: data += self._flush_decoder() return data def _flush_decoder(self): """ Flushes the decoder. Should only be called if the decoder is actually being used. """ if self._decoder: buf = self._decoder.decompress(b'') return buf + self._decoder.flush() return b'' @contextmanager def _error_catcher(self): """ Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the high-level api. On exit, release the connection back to the pool. """ clean_exit = False try: try: yield except SocketTimeout: # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but # there is yet no clean way to get at it from this context. raise ReadTimeoutError(self._pool, None, 'Read timed out.') except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? if 'read operation timed out' not in str(e): # Defensive: # This shouldn't happen but just in case we're missing an edge # case, let's avoid swallowing SSL errors. raise raise ReadTimeoutError(self._pool, None, 'Read timed out.') except (HTTPException, SocketError) as e: # This includes IncompleteRead. raise ProtocolError('Connection broken: %r' % e, e) # If no exception is thrown, we should avoid cleaning up # unnecessarily. clean_exit = True finally: # If we didn't terminate cleanly, we need to throw away our # connection. if not clean_exit: # The response may not be closed but we're not going to use it # anymore so close it now to ensure that the connection is # released back to the pool. if self._original_response: self._original_response.close() # Closing the response may not actually be sufficient to close # everything, so if we have a hold of the connection close that # too. if self._connection: self._connection.close() # If we hold the original response but it's closed now, we should # return the connection back to the pool. if self._original_response and self._original_response.isclosed(): self.release_conn() def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ self._init_decoder() if decode_content is None: decode_content = self.decode_content if self._fp is None: return flush_decoder = False data = None with self._error_catcher(): if amt is None: # cStringIO doesn't like amt=None data = self._fp.read() flush_decoder = True else: cache_content = False data = self._fp.read(amt) if amt != 0 and not data: # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ # already do. However, versions of python released before # December 15, 2012 (http://bugs.python.org/issue16298) do # not properly close the connection in all cases. There is # no harm in redundantly calling close. self._fp.close() flush_decoder = True if self.enforce_content_length and self.length_remaining not in (0, None): # This is an edge case that httplib failed to cover due # to concerns of backward compatibility. We're # addressing it here to make sure IncompleteRead is # raised during streaming, so all calls with incorrect # Content-Length are caught. raise IncompleteRead(self._fp_bytes_read, self.length_remaining) if data: self._fp_bytes_read += len(data) if self.length_remaining is not None: self.length_remaining -= len(data) data = self._decode(data, decode_content, flush_decoder) if cache_content: self._body = data return data def stream(self, amt=2**16, decode_content=None): """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ if self.chunked and self.supports_chunked_reads(): for line in self.read_chunked(amt, decode_content=decode_content): yield line else: while not is_fp_closed(self._fp): data = self.read(amt=amt, decode_content=decode_content) if data: yield data @classmethod def from_httplib(ResponseCls, r, **response_kw): """ Given an :class:`httplib.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along with ``original_response=r``. """ headers = r.msg if not isinstance(headers, HTTPHeaderDict): if PY3: # Python 3 headers = HTTPHeaderDict(headers.items()) else: # Python 2 headers = HTTPHeaderDict.from_httplib(headers) # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, 'strict', 0) resp = ResponseCls(body=r, headers=headers, status=r.status, version=r.version, reason=r.reason, strict=strict, original_response=r, **response_kw) return resp # Backwards-compatibility methods for httplib.HTTPResponse def getheaders(self): return self.headers def getheader(self, name, default=None): return self.headers.get(name, default) # Overrides from io.IOBase def close(self): if not self.closed: self._fp.close() if self._connection: self._connection.close() @property def closed(self): if self._fp is None: return True elif hasattr(self._fp, 'isclosed'): return self._fp.isclosed() elif hasattr(self._fp, 'closed'): return self._fp.closed else: return True def fileno(self): if self._fp is None: raise IOError("HTTPResponse has no file to get a fileno from") elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: raise IOError("The file-like object this HTTPResponse is wrapped " "around has no file descriptor") def flush(self): if self._fp is not None and hasattr(self._fp, 'flush'): return self._fp.flush() def readable(self): # This method is required for `io` module compatibility. return True def readinto(self, b): # This method is required for `io` module compatibility. temp = self.read(len(b)) if len(temp) == 0: return 0 else: b[:len(temp)] = temp return len(temp) def supports_chunked_reads(self): """ Checks if the underlying file-like object looks like a httplib.HTTPResponse object. We do this by testing for the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ return hasattr(self._fp, 'fp') def _update_chunk_length(self): # First, we'll figure out length of a chunk and then # we'll try to read it from socket. if self.chunk_left is not None: return line = self._fp.fp.readline() line = line.split(b';', 1)[0] try: self.chunk_left = int(line, 16) except ValueError: # Invalid chunked protocol response, abort. self.close() raise httplib.IncompleteRead(line) def _handle_chunk(self, amt): returned_chunk = None if amt is None: chunk = self._fp._safe_read(self.chunk_left) returned_chunk = chunk self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. self.chunk_left = None elif amt < self.chunk_left: value = self._fp._safe_read(amt) self.chunk_left = self.chunk_left - amt returned_chunk = value elif amt == self.chunk_left: value = self._fp._safe_read(amt) self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. self.chunk_left = None returned_chunk = value else: # amt > self.chunk_left returned_chunk = self._fp._safe_read(self.chunk_left) self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. self.chunk_left = None return returned_chunk def read_chunked(self, amt=None, decode_content=None): """ Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ self._init_decoder() # FIXME: Rewrite this method and make it a class with a better structured logic. if not self.chunked: raise ResponseNotChunked( "Response is not chunked. " "Header 'transfer-encoding: chunked' is missing.") if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( "Body should be httplib.HTTPResponse like. " "It should have have an fp attribute which returns raw chunks.") # Don't bother reading the body of a HEAD request. if self._original_response and is_response_to_head(self._original_response): self._original_response.close() return with self._error_catcher(): while True: self._update_chunk_length() if self.chunk_left == 0: break chunk = self._handle_chunk(amt) decoded = self._decode(chunk, decode_content=decode_content, flush_decoder=False) if decoded: yield decoded if decode_content: # On CPython and PyPy, we should never need to flush the # decoder. However, on Jython we *might* need to, so # lets defensively do it anyway. decoded = self._flush_decoder() if decoded: # Platform-specific: Jython. yield decoded # Chunk content ends with \r\n: discard it. while True: line = self._fp.fp.readline() if not line: # Some sites may not end with '\r\n'. break if line == b'\r\n': break # We read everything; close the "file". if self._original_response: self._original_response.close()
gpl-3.0
rven/odoo
addons/l10n_ch/models/res_bank.py
1
16379
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import re from odoo import api, fields, models, _ from odoo.exceptions import ValidationError from odoo.tools.misc import mod10r from odoo.exceptions import UserError import werkzeug.urls ISR_SUBSCRIPTION_CODE = {'CHF': '01', 'EUR': '03'} CLEARING = "09000" _re_postal = re.compile('^[0-9]{2}-[0-9]{1,6}-[0-9]$') def _is_l10n_ch_postal(account_ref): """ Returns True if the string account_ref is a valid postal account number, i.e. it only contains ciphers and is last cipher is the result of a recursive modulo 10 operation ran over the rest of it. Shorten form with - is also accepted. """ if _re_postal.match(account_ref or ''): ref_subparts = account_ref.split('-') account_ref = ref_subparts[0] + ref_subparts[1].rjust(6, '0') + ref_subparts[2] if re.match('\d+$', account_ref or ''): account_ref_without_check = account_ref[:-1] return mod10r(account_ref_without_check) == account_ref return False def _is_l10n_ch_isr_issuer(account_ref, currency_code): """ Returns True if the string account_ref is a valid a valid ISR issuer An ISR issuer is postal account number that starts by 01 (CHF) or 03 (EUR), """ if (account_ref or '').startswith(ISR_SUBSCRIPTION_CODE[currency_code]): return _is_l10n_ch_postal(account_ref) return False class ResPartnerBank(models.Model): _inherit = 'res.partner.bank' l10n_ch_postal = fields.Char( string="Swiss Postal Account", readonly=False, store=True, compute='_compute_l10n_ch_postal', help="This field is used for the Swiss postal account number on a vendor account and for the client number on " "your own account. The client number is mostly 6 numbers without -, while the postal account number can " "be e.g. 01-162-8") # fields to configure ISR payment slip generation l10n_ch_isr_subscription_chf = fields.Char(string='CHF ISR Subscription Number', help='The subscription number provided by the bank or Postfinance to identify the bank, used to generate ISR in CHF. eg. 01-162-8') l10n_ch_isr_subscription_eur = fields.Char(string='EUR ISR Subscription Number', help='The subscription number provided by the bank or Postfinance to identify the bank, used to generate ISR in EUR. eg. 03-162-5') l10n_ch_show_subscription = fields.Boolean(compute='_compute_l10n_ch_show_subscription', default=lambda self: self.env.company.country_id.code == 'CH') def _is_isr_issuer(self): return (_is_l10n_ch_isr_issuer(self.l10n_ch_postal, 'CHF') or _is_l10n_ch_isr_issuer(self.l10n_ch_postal, 'EUR')) @api.constrains("l10n_ch_postal", "partner_id") def _check_postal_num(self): """Validate postal number format""" for rec in self: if rec.l10n_ch_postal and not _is_l10n_ch_postal(rec.l10n_ch_postal): # l10n_ch_postal is used for the purpose of Client Number on your own accounts, so don't do the check there if rec.partner_id and not rec.partner_id.ref_company_ids: raise ValidationError( _("The postal number {} is not valid.\n" "It must be a valid postal number format. eg. 10-8060-7").format(rec.l10n_ch_postal)) return True @api.constrains("l10n_ch_isr_subscription_chf", "l10n_ch_isr_subscription_eur") def _check_subscription_num(self): """Validate ISR subscription number format Subscription number can only starts with 01 or 03 """ for rec in self: for currency in ["CHF", "EUR"]: subscrip = rec.l10n_ch_isr_subscription_chf if currency == "CHF" else rec.l10n_ch_isr_subscription_eur if subscrip and not _is_l10n_ch_isr_issuer(subscrip, currency): example = "01-162-8" if currency == "CHF" else "03-162-5" raise ValidationError( _("The ISR subcription {} for {} number is not valid.\n" "It must starts with {} and we a valid postal number format. eg. {}" ).format(subscrip, currency, ISR_SUBSCRIPTION_CODE[currency], example)) return True @api.depends('partner_id', 'company_id') def _compute_l10n_ch_show_subscription(self): for bank in self: if bank.partner_id: bank.l10n_ch_show_subscription = bank.partner_id.ref_company_ids.country_id.code =='CH' elif bank.company_id: bank.l10n_ch_show_subscription = bank.company_id.country_id.code == 'CH' else: bank.l10n_ch_show_subscription = self.env.company.country_id.code == 'CH' @api.depends('acc_number', 'acc_type') def _compute_sanitized_acc_number(self): #Only remove spaces in case it is not postal postal_banks = self.filtered(lambda b: b.acc_type == "postal") for bank in postal_banks: bank.sanitized_acc_number = bank.acc_number super(ResPartnerBank, self - postal_banks)._compute_sanitized_acc_number() @api.model def _get_supported_account_types(self): rslt = super(ResPartnerBank, self)._get_supported_account_types() rslt.append(('postal', _('Postal'))) return rslt @api.model def retrieve_acc_type(self, acc_number): """ Overridden method enabling the recognition of swiss postal bank account numbers. """ acc_number_split = "" # acc_number_split is needed to continue to recognize the account # as a postal account even if the difference if acc_number and " " in acc_number: acc_number_split = acc_number.split(" ")[0] if _is_l10n_ch_postal(acc_number) or (acc_number_split and _is_l10n_ch_postal(acc_number_split)): return 'postal' else: return super(ResPartnerBank, self).retrieve_acc_type(acc_number) @api.depends('acc_number', 'partner_id', 'acc_type') def _compute_l10n_ch_postal(self): for record in self: if record.acc_type == 'iban': record.l10n_ch_postal = self._retrieve_l10n_ch_postal(record.sanitized_acc_number) elif record.acc_type == 'postal': if record.acc_number and " " in record.acc_number: record.l10n_ch_postal = record.acc_number.split(" ")[0] else: record.l10n_ch_postal = record.acc_number # In case of ISR issuer, this number is not # unique and we fill acc_number with partner # name to give proper information to the user if record.partner_id and record.acc_number[:2] in ["01", "03"]: record.acc_number = ("{} {}").format(record.acc_number, record.partner_id.name) @api.model def _is_postfinance_iban(self, iban): """Postfinance IBAN have format CHXX 0900 0XXX XXXX XXXX K Where 09000 is the clearing number """ return iban.startswith('CH') and iban[4:9] == CLEARING @api.model def _pretty_postal_num(self, number): """format a postal account number or an ISR subscription number as per specifications with '-' separators. eg. 010001628 -> 01-162-8 """ if re.match('^[0-9]{2}-[0-9]{1,6}-[0-9]$', number or ''): return number currency_code = number[:2] middle_part = number[2:-1] trailing_cipher = number[-1] middle_part = middle_part.lstrip("0") return currency_code + '-' + middle_part + '-' + trailing_cipher @api.model def _retrieve_l10n_ch_postal(self, iban): """Reads a swiss postal account number from a an IBAN and returns it as a string. Returns None if no valid postal account number was found, or the given iban was not from Swiss Postfinance. CH09 0900 0000 1000 8060 7 -> 10-8060-7 """ if self._is_postfinance_iban(iban): # the IBAN corresponds to a swiss account return self._pretty_postal_num(iban[-9:]) return None def _get_qr_code_url(self, qr_method, amount, currency, debtor_partner, free_communication, structured_communication): if qr_method == 'ch_qr': qr_code_vals = self._l10n_ch_get_qr_vals(amount, currency, debtor_partner, free_communication, structured_communication) return '/report/barcode/?type=%s&value=%s&width=%s&height=%s&quiet=1&mask=ch_cross' % ('QR', werkzeug.urls.url_quote_plus('\n'.join(qr_code_vals)), 256, 256) return super()._get_qr_code_url(qr_method, amount, currency, debtor_partner, free_communication, structured_communication) def _l10n_ch_get_qr_vals(self, amount, currency, debtor_partner, free_communication, structured_communication): comment = "" if free_communication: comment = (free_communication[:137] + '...') if len(free_communication) > 140 else free_communication creditor_addr_1, creditor_addr_2 = self._get_partner_address_lines(self.partner_id) debtor_addr_1, debtor_addr_2 = self._get_partner_address_lines(debtor_partner) # Compute reference type (empty by default, only mandatory for QR-IBAN, # and must then be 27 characters-long, with mod10r check digit as the 27th one, # just like ISR number for invoices) reference_type = 'NON' reference = '' if self._is_qr_iban(): # _check_for_qr_code_errors ensures we can't have a QR-IBAN without a QR-reference here reference_type = 'QRR' reference = structured_communication currency = currency or self.currency_id or self.company_id.currency_id return [ 'SPC', # QR Type '0200', # Version '1', # Coding Type self.sanitized_acc_number, # IBAN 'K', # Creditor Address Type (self.acc_holder_name or self.partner_id.name)[:70], # Creditor Name creditor_addr_1, # Creditor Address Line 1 creditor_addr_2, # Creditor Address Line 2 '', # Creditor Postal Code (empty, since we're using combined addres elements) '', # Creditor Town (empty, since we're using combined addres elements) self.partner_id.country_id.code, # Creditor Country '', # Ultimate Creditor Address Type '', # Name '', # Ultimate Creditor Address Line 1 '', # Ultimate Creditor Address Line 2 '', # Ultimate Creditor Postal Code '', # Ultimate Creditor Town '', # Ultimate Creditor Country '{:.2f}'.format(amount), # Amount currency.name, # Currency 'K', # Ultimate Debtor Address Type debtor_partner.commercial_partner_id.name[:70], # Ultimate Debtor Name debtor_addr_1, # Ultimate Debtor Address Line 1 debtor_addr_2, # Ultimate Debtor Address Line 2 '', # Ultimate Debtor Postal Code (not to be provided for address type K) '', # Ultimate Debtor Postal City (not to be provided for address type K) debtor_partner.country_id.code, # Ultimate Debtor Postal Country reference_type, # Reference Type reference, # Reference comment, # Unstructured Message 'EPD', # Mandatory trailer part ] def _get_partner_address_lines(self, partner): """ Returns a tuple of two elements containing the address lines to use for this partner. Line 1 contains the street and number, line 2 contains zip and city. Those two lines are limited to 70 characters """ streets = [partner.street, partner.street2] line_1 = ' '.join(filter(None, streets)) line_2 = partner.zip + ' ' + partner.city return line_1[:70], line_2[:70] def _check_qr_iban_range(self, iban): if not iban or len(iban) < 9: return False iid_start_index = 4 iid_end_index = 8 iid = iban[iid_start_index : iid_end_index+1] return re.match('\d+', iid) \ and 30000 <= int(iid) <= 31999 # Those values for iid are reserved for QR-IBANs only def _is_qr_iban(self): """ Tells whether or not this bank account has a QR-IBAN account number. QR-IBANs are specific identifiers used in Switzerland as references in QR-codes. They are formed like regular IBANs, but are actually something different. """ self.ensure_one() return self.acc_type == 'iban' \ and self._check_qr_iban_range(self.sanitized_acc_number) @api.model def _is_qr_reference(self, reference): """ Checks whether the given reference is a QR-reference, i.e. it is made of 27 digits, the 27th being a mod10r check on the 26 previous ones. """ return reference \ and len(reference) == 27 \ and re.match('\d+$', reference) \ and reference == mod10r(reference[:-1]) def _eligible_for_qr_code(self, qr_method, debtor_partner, currency): if qr_method == 'ch_qr': return self.acc_type == 'iban' and \ self.partner_id.country_id.code == 'CH' and \ (not debtor_partner or debtor_partner.country_id.code == 'CH') \ and currency.name in ('EUR', 'CHF') return super()._eligible_for_qr_code(qr_method, debtor_partner, currency) def _check_for_qr_code_errors(self, qr_method, amount, currency, debtor_partner, free_communication, structured_communication): def _partner_fields_set(partner): return partner.zip and \ partner.city and \ partner.country_id.code and \ (partner.street or partner.street2) if qr_method == 'ch_qr': if not _partner_fields_set(self.partner_id): return _("The partner set on the bank account meant to receive the payment (%s) must have a complete postal address (street, zip, city and country).", self.acc_number) if debtor_partner and not _partner_fields_set(debtor_partner): return _("The partner the QR-code must have a complete postal address (street, zip, city and country).") if self._is_qr_iban() and not self._is_qr_reference(structured_communication): return _("When using a QR-IBAN as the destination account of a QR-code, the payment reference must be a QR-reference.") return super()._check_for_qr_code_errors(qr_method, amount, currency, debtor_partner, free_communication, structured_communication) @api.model def _get_available_qr_methods(self): rslt = super()._get_available_qr_methods() rslt.append(('ch_qr', _("Swiss QR bill"), 10)) return rslt
agpl-3.0
kenshay/ImageScript
ProgramData/Android/ADB/platform-tools/systrace/catapult/telemetry/third_party/web-page-replay/third_party/dns/rdtypes/ANY/ISDN.py
248
3233
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import dns.exception import dns.rdata import dns.tokenizer class ISDN(dns.rdata.Rdata): """ISDN record @ivar address: the ISDN address @type address: string @ivar subaddress: the ISDN subaddress (or '' if not present) @type subaddress: string @see: RFC 1183""" __slots__ = ['address', 'subaddress'] def __init__(self, rdclass, rdtype, address, subaddress): super(ISDN, self).__init__(rdclass, rdtype) self.address = address self.subaddress = subaddress def to_text(self, origin=None, relativize=True, **kw): if self.subaddress: return '"%s" "%s"' % (dns.rdata._escapify(self.address), dns.rdata._escapify(self.subaddress)) else: return '"%s"' % dns.rdata._escapify(self.address) def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True): address = tok.get_string() t = tok.get() if not t.is_eol_or_eof(): tok.unget(t) subaddress = tok.get_string() else: tok.unget(t) subaddress = '' tok.get_eol() return cls(rdclass, rdtype, address, subaddress) from_text = classmethod(from_text) def to_wire(self, file, compress = None, origin = None): l = len(self.address) assert l < 256 byte = chr(l) file.write(byte) file.write(self.address) l = len(self.subaddress) if l > 0: assert l < 256 byte = chr(l) file.write(byte) file.write(self.subaddress) def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None): l = ord(wire[current]) current += 1 rdlen -= 1 if l > rdlen: raise dns.exception.FormError address = wire[current : current + l] current += l rdlen -= l if rdlen > 0: l = ord(wire[current]) current += 1 rdlen -= 1 if l != rdlen: raise dns.exception.FormError subaddress = wire[current : current + l] else: subaddress = '' return cls(rdclass, rdtype, address, subaddress) from_wire = classmethod(from_wire) def _cmp(self, other): v = cmp(self.address, other.address) if v == 0: v = cmp(self.subaddress, other.subaddress) return v
gpl-3.0
matiasdecarli/ansible-modules-core
files/template.py
105
3323
# this is a virtual module that is entirely implemented server side # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: template version_added: historical short_description: Templates a file out to a remote server. description: - Templates are processed by the Jinja2 templating language (U(http://jinja.pocoo.org/docs/)) - documentation on the template formatting can be found in the Template Designer Documentation (U(http://jinja.pocoo.org/docs/templates/)). - "Six additional variables can be used in templates: C(ansible_managed) (configurable via the C(defaults) section of C(ansible.cfg)) contains a string which can be used to describe the template name, host, modification time of the template file and the owner uid, C(template_host) contains the node name of the template's machine, C(template_uid) the owner, C(template_path) the absolute path of the template, C(template_fullpath) is the absolute path of the template, and C(template_run_date) is the date that the template was rendered. Note that including a string that uses a date in the template will result in the template being marked 'changed' each time." options: src: description: - Path of a Jinja2 formatted template on the local server. This can be a relative or absolute path. required: true dest: description: - Location to render the template to on the remote machine. required: true backup: description: - Create a backup file including the timestamp information so you can get the original file back if you somehow clobbered it incorrectly. required: false choices: [ "yes", "no" ] default: "no" force: description: - the default is C(yes), which will replace the remote file when contents are different than the source. If C(no), the file will only be transferred if the destination does not exist. required: false choices: [ "yes", "no" ] default: "yes" notes: - "Since Ansible version 0.9, templates are loaded with C(trim_blocks=True)." author: - Ansible Core Team - Michael DeHaan extends_documentation_fragment: - files - validate ''' EXAMPLES = ''' # Example from Ansible Playbooks - template: src=/mytemplates/foo.j2 dest=/etc/file.conf owner=bin group=wheel mode=0644 # The same example, but using symbolic modes equivalent to 0644 - template: src=/mytemplates/foo.j2 dest=/etc/file.conf owner=bin group=wheel mode="u=rw,g=r,o=r" # Copy a new "sudoers" file into place, after passing validation with visudo - template: src=/mine/sudoers dest=/etc/sudoers validate='visudo -cf %s' '''
gpl-3.0
googleapis/googleapis-gen
google/cloud/talent/v4beta1/talent-v4beta1-py/google/cloud/talent_v4beta1/services/completion/transports/grpc.py
1
11561
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.talent_v4beta1.types import completion_service from .base import CompletionTransport, DEFAULT_CLIENT_INFO class CompletionGrpcTransport(CompletionTransport): """gRPC backend transport for Completion. A service handles auto completion. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ _stubs: Dict[str, Callable] def __init__(self, *, host: str = 'jobs.googleapis.com', credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None else: if api_mtls_endpoint: host = api_mtls_endpoint # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) # Wrap messages. This must be done after self._grpc_channel exists self._prep_wrapped_messages(client_info) @classmethod def create_channel(cls, host: str = 'jobs.googleapis.com', credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. quota_project_id (Optional[str]): An optional project to use for billing and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. Raises: google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs ) @property def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service. """ return self._grpc_channel @property def complete_query(self) -> Callable[ [completion_service.CompleteQueryRequest], completion_service.CompleteQueryResponse]: r"""Return a callable for the complete query method over gRPC. Completes the specified prefix with keyword suggestions. Intended for use by a job search auto- complete search box. Returns: Callable[[~.CompleteQueryRequest], ~.CompleteQueryResponse]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'complete_query' not in self._stubs: self._stubs['complete_query'] = self.grpc_channel.unary_unary( '/google.cloud.talent.v4beta1.Completion/CompleteQuery', request_serializer=completion_service.CompleteQueryRequest.serialize, response_deserializer=completion_service.CompleteQueryResponse.deserialize, ) return self._stubs['complete_query'] __all__ = ( 'CompletionGrpcTransport', )
apache-2.0
morbozoo/sonyHeadphones
tools/scripts/upgradevc11.py
14
1271
import os import fileinput import sys import shutil import xml.dom.minidom import codecs from os.path import join, getsize for root, dirs, files in os.walk(sys.argv[1]): for name in dirs: if name == 'vc10': print join( root, name ) shutil.copytree( join( root, name ), join( root, "vc11" ) ) files = os.listdir( join( root, "vc11" ) ) for file in files: filePath = join( join( root, "vc11" ), file ) if os.path.splitext( filePath )[1] == '.vcxproj': print "processing: " + filePath dom = xml.dom.minidom.parse( filePath ) for node in dom.getElementsByTagName( 'PropertyGroup' ): if node.hasAttribute( 'Label' ) and (node.attributes['Label'].value == 'Configuration'): x = dom.createElement( 'PlatformToolset' ) val = dom.createTextNode( 'v110_xp' ) x.appendChild( val ) node.appendChild( x ) os.remove( filePath ) f = codecs.open( filePath, 'w', 'utf-8-sig' ) dom.writexml( f, encoding='utf-8' ) f.close()
bsd-2-clause
rldleblanc/ceph-tools
osd_hunter.py
1
6255
#!/usr/bin/python # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 import argparse import re import datetime import operator import pprint import glob import gzip slow_threshold = 10 #seconds # Nothing to change past here verbose = None re_slow = re.compile(r'^(\d+-\d+-\d+\s+\d+:\d+:\d+\.\d+)\s+\w+\s+0.*slow.*(client\.\d+\.\d+:\d+).*from\s+(\d+(,\d+)*)') re_io = re.compile(r'^(\d+-\d+-\d+\s+\d+:\d+:\d+\.\d+)\s+\w+\s+1.*<==.*(osd\.\d+|client).*(client\.\d+\.\d+:\d+).*') def get_date(datestring): nofrag, frag = datestring.split(".") date = datetime.datetime.strptime(nofrag, "%Y-%m-%d %H:%M:%S") frag = frag[:6] #truncate to microseconds frag += (6 - len(frag)) * '0' date = date.replace(microsecond=int(frag)) return date def get_log_files(args): if args.all is True: if args.zip is True: return glob.glob(args.logdir + "ceph-osd.*.log*") else: return glob.glob(args.logdir + "ceph-osd.*.log") else: if args.zip is True: return glob.glob(args.logdir + "ceph-osd." + str(args.osd) + ".log*") else: return glob.glob(args.logdir + "ceph-osd." + str(args.osd) + ".log") def find_blocked(args): slow_osds = {} if args.all is True: if verbose >= 1: print "Searching all OSDs." for file in get_log_files(args): result = search_logs(file) if result: slow_osds.update(result) pass else: if verbose >= 1: print "Going to search OSD " + str(args.osd) + "." slow_osds = search_logs(get_log_files(args)[0]) if verbose >=3: pprint.pprint(slow_osds) if len(slow_osds) > 0: print_output(slow_osds) else: print "Could not find any slow OSDs." def print_output(slow_osds): # Tally up the slow OSDs # go thorugh all arrays and create a new array of slow OSDs # with the OSD ID as the key and increment the value for each # Sort the list asending and print out the OSDs. osd_report = {} for key in slow_osds.keys(): if slow_osds[key].get('start', None): if slow_osds[key].get('slow', None): for i in slow_osds[key]['slow']: if i not in osd_report.keys(): osd_report[i] = 1 else: osd_report[i] += 1 osd_report = sorted(osd_report.items(), key=operator.itemgetter(1)) if len(osd_report) > 0: for i in osd_report: print "OSD " + str(i[0]) + ": " + str(i[1]) else: print "Could not find any slow OSDs." def search_logs(logfile): if verbose >= 1: print "Searching through " + logfile + "..." try: # Iterate through the file looking for slow messages so we know # which I/O are problematic if 'gz' in logfile: with gzip.open(logfile, 'rb') as f: return scan_file(f) else: with open(logfile, 'rb') as f: return scan_file(f) return None except OSError, e: print "Could not open " + logfile + " for reading." sys.exit(1) def scan_file(fd): slow_osds = {} # If the line has slow, capture the date/time, the client id # and the secondary OSDs as slow clients for line in fd: matches = re_slow.match(line) if matches and not matches.group(1) in slow_osds.keys(): slow_osds[matches.group(2)] = {} #slow_osds[matches.group(2)]['start'] = get_date(matches.group(1)) slow_osds[matches.group(2)]['slow'] = matches.group(3).split(",") # On the second iteration, look for lines that have the client id # 1. Get the data/time stamp from the request from the client, # set as the start time for the I/O # 2. If it has ondisk status. Get the date/time. Compare with the # start time and if less than 30 seconds, move osd to the # fast list. if len(slow_osds) > 0: # Jump back to the start of the file fd.seek(0) for line in fd: matches = re_io.match(line) if matches and matches.group(3) in slow_osds.keys(): if 'client' in matches.group(2): slow_osds[matches.group(3)]['start'] = get_date(matches.group(1)) elif 'osd' in matches.group(2) and slow_osds[matches.group(3)].get('start', None): latency = get_date(matches.group(1)) - slow_osds[matches.group(3)]['start'] osd = matches.group(2).split(".")[1] if latency < datetime.timedelta(seconds=slow_threshold): if osd in slow_osds[matches.group(3)]['slow']: slow_osds[matches.group(3)]['slow'].remove(osd) if not slow_osds[matches.group(3)].get('fast', None): slow_osds[matches.group(3)]['fast'] = [osd] elif osd not in slow_osds[matches.group(3)]['fast']: slow_osds[matches.group(3)]['fast'] += [osd] return slow_osds def main(): # Main execution global verbose parser = argparse.ArgumentParser(description="Hunts for slow OSDs by looking thorugh OSD logs.") osdgroup = parser.add_mutually_exclusive_group(required=True) osdgroup.add_argument('-o', '--osd', type=int, help="an OSD on this host that is reporting slow I/O.") osdgroup.add_argument('-a', '--all', action="store_true", default="false", help="Search logs of all OSDs in logdir.") parser.add_argument('-z', '--zip', action="store_true", default="false", help="Also search through compressed logfiles.") parser.add_argument('-l', '--logdir', default="/var/log/ceph/", help="Location of log files. Defaults to /var/log/ceph/.") parser.add_argument('-v', '--verbose', action="count", default=0, help="Increase verbosity, more flags means more output.") args = parser.parse_args() verbose = args.verbose if verbose >= 3: pprint.pprint(args) if args.all or args.osd: find_blocked(args) if __name__ == "__main__": main()
lgpl-3.0
henaras/horizon
openstack_dashboard/dashboards/project/loadbalancers/workflows.py
20
30064
# Copyright 2013, Big Switch Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon.utils import validators from horizon import workflows from openstack_dashboard import api from openstack_dashboard.dashboards.project.loadbalancers import utils AVAILABLE_PROTOCOLS = ('HTTP', 'HTTPS', 'TCP') AVAILABLE_METHODS = ('ROUND_ROBIN', 'LEAST_CONNECTIONS', 'SOURCE_IP') LOG = logging.getLogger(__name__) class AddPoolAction(workflows.Action): name = forms.CharField(max_length=80, label=_("Name")) description = forms.CharField( initial="", required=False, max_length=80, label=_("Description")) # provider is optional because some LBaaS implemetation does # not support service-type extension. provider = forms.ChoiceField(label=_("Provider"), required=False) subnet_id = forms.ChoiceField(label=_("Subnet")) protocol = forms.ChoiceField(label=_("Protocol")) lb_method = forms.ChoiceField(label=_("Load Balancing Method")) admin_state_up = forms.ChoiceField(choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State")) def __init__(self, request, *args, **kwargs): super(AddPoolAction, self).__init__(request, *args, **kwargs) tenant_id = request.user.tenant_id subnet_id_choices = [('', _("Select a Subnet"))] try: networks = api.neutron.network_list_for_tenant(request, tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve networks list.')) networks = [] for n in networks: for s in n['subnets']: name = "%s (%s)" % (s.name, s.cidr) subnet_id_choices.append((s.id, name)) self.fields['subnet_id'].choices = subnet_id_choices protocol_choices = [('', _("Select a Protocol"))] [protocol_choices.append((p, p)) for p in AVAILABLE_PROTOCOLS] self.fields['protocol'].choices = protocol_choices lb_method_choices = [('', _("Select a Method"))] [lb_method_choices.append((m, m)) for m in AVAILABLE_METHODS] self.fields['lb_method'].choices = lb_method_choices # provider choice try: if api.neutron.is_extension_supported(request, 'service-type'): provider_list = api.neutron.provider_list(request) providers = [p for p in provider_list if p['service_type'] == 'LOADBALANCER'] else: providers = None except Exception: exceptions.handle(request, _('Unable to retrieve providers list.')) providers = [] if providers: default_providers = [p for p in providers if p.get('default')] if default_providers: default_provider = default_providers[0]['name'] else: default_provider = None provider_choices = [(p['name'], p['name']) for p in providers if p['name'] != default_provider] if default_provider: provider_choices.insert( 0, (default_provider, _("%s (default)") % default_provider)) else: if providers is None: msg = _("Provider for Load Balancer is not supported") else: msg = _("No provider is available") provider_choices = [('', msg)] self.fields['provider'].widget.attrs['readonly'] = True self.fields['provider'].choices = provider_choices class Meta(object): name = _("Add New Pool") permissions = ('openstack.services.network',) help_text_template = 'project/loadbalancers/_create_pool_help.html' class AddPoolStep(workflows.Step): action_class = AddPoolAction contributes = ("name", "description", "subnet_id", "provider", "protocol", "lb_method", "admin_state_up") def contribute(self, data, context): context = super(AddPoolStep, self).contribute(data, context) context['admin_state_up'] = (context['admin_state_up'] == 'True') if data: return context class AddPool(workflows.Workflow): slug = "addpool" name = _("Add Pool") finalize_button_name = _("Add") success_message = _('Added pool "%s".') failure_message = _('Unable to add pool "%s".') success_url = "horizon:project:loadbalancers:index" default_steps = (AddPoolStep,) def format_status_message(self, message): name = self.context.get('name') return message % name def handle(self, request, context): try: api.lbaas.pool_create(request, **context) return True except Exception: return False class AddVipAction(workflows.Action): name = forms.CharField(max_length=80, label=_("Name")) description = forms.CharField( initial="", required=False, max_length=80, label=_("Description")) subnet_id = forms.ChoiceField(label=_("VIP Subnet"), initial="", required=False) address = forms.IPField(label=_("Specify a free IP address " "from the selected subnet"), version=forms.IPv4, mask=False, required=False) protocol_port = forms.IntegerField( label=_("Protocol Port"), min_value=1, help_text=_("Enter an integer value " "between 1 and 65535."), validators=[validators.validate_port_range]) protocol = forms.ChoiceField(label=_("Protocol")) session_persistence = forms.ChoiceField( required=False, initial={}, label=_("Session Persistence"), widget=forms.Select(attrs={ 'class': 'switchable', 'data-slug': 'persistence' })) cookie_name = forms.CharField( initial="", required=False, max_length=80, label=_("Cookie Name"), help_text=_("Required for APP_COOKIE persistence;" " Ignored otherwise."), widget=forms.TextInput(attrs={ 'class': 'switched', 'data-switch-on': 'persistence', 'data-persistence-app_cookie': 'APP_COOKIE', })) connection_limit = forms.IntegerField( required=False, min_value=-1, label=_("Connection Limit"), help_text=_("Maximum number of connections allowed " "for the VIP or '-1' if the limit is not set")) admin_state_up = forms.ChoiceField(choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State")) def __init__(self, request, *args, **kwargs): super(AddVipAction, self).__init__(request, *args, **kwargs) tenant_id = request.user.tenant_id subnet_id_choices = [('', _("Select a Subnet"))] try: networks = api.neutron.network_list_for_tenant(request, tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve networks list.')) networks = [] for n in networks: for s in n['subnets']: name = "%s (%s)" % (s.name, s.cidr) subnet_id_choices.append((s.id, name)) self.fields['subnet_id'].choices = subnet_id_choices protocol_choices = [('', _("Select a Protocol"))] [protocol_choices.append((p, p)) for p in AVAILABLE_PROTOCOLS] self.fields['protocol'].choices = protocol_choices session_persistence_choices = [('', _("No Session Persistence"))] for mode in ('SOURCE_IP', 'HTTP_COOKIE', 'APP_COOKIE'): session_persistence_choices.append((mode.lower(), mode)) self.fields[ 'session_persistence'].choices = session_persistence_choices def clean(self): cleaned_data = super(AddVipAction, self).clean() persistence = cleaned_data.get('session_persistence') if persistence: cleaned_data['session_persistence'] = persistence.upper() if (cleaned_data.get('session_persistence') == 'APP_COOKIE' and not cleaned_data.get('cookie_name')): msg = _('Cookie name is required for APP_COOKIE persistence.') self._errors['cookie_name'] = self.error_class([msg]) return cleaned_data class Meta(object): name = _("Specify VIP") permissions = ('openstack.services.network',) help_text = _("Create a VIP for this pool. " "Assign a name, description, IP address, port, " "and maximum connections allowed for the VIP. " "Choose the protocol and session persistence " "method for the VIP. " "Admin State is UP (checked) by default.") class AddVipStep(workflows.Step): action_class = AddVipAction depends_on = ("pool_id", "subnet") contributes = ("name", "description", "subnet_id", "address", "protocol_port", "protocol", "session_persistence", "cookie_name", "connection_limit", "admin_state_up") def contribute(self, data, context): context = super(AddVipStep, self).contribute(data, context) context['admin_state_up'] = (context['admin_state_up'] == 'True') return context class AddVip(workflows.Workflow): slug = "addvip" name = _("Add VIP") finalize_button_name = _("Add") success_message = _('Added VIP "%s".') failure_message = _('Unable to add VIP "%s".') success_url = "horizon:project:loadbalancers:index" default_steps = (AddVipStep,) def format_status_message(self, message): name = self.context.get('name') return message % name def handle(self, request, context): if context['subnet_id'] == '': try: pool = api.lbaas.pool_get(request, context['pool_id']) context['subnet_id'] = pool['subnet_id'] except Exception: context['subnet_id'] = None self.failure_message = _( 'Unable to retrieve the specified pool. ' 'Unable to add VIP "%s".') return False if context['session_persistence']: stype = context['session_persistence'] if stype == 'APP_COOKIE': cookie = context['cookie_name'] context['session_persistence'] = {'type': stype, 'cookie_name': cookie} else: context['session_persistence'] = {'type': stype} else: context['session_persistence'] = {} try: api.lbaas.vip_create(request, **context) return True except Exception: return False class AddMemberAction(workflows.Action): pool_id = forms.ChoiceField(label=_("Pool")) member_type = forms.ChoiceField( label=_("Member Source"), choices=[('server_list', _("Select from active instances")), ('member_address', _("Specify member IP address"))], required=False, widget=forms.Select(attrs={ 'class': 'switchable', 'data-slug': 'membertype' })) members = forms.MultipleChoiceField( label=_("Member(s)"), required=False, initial=["default"], widget=forms.SelectMultiple(attrs={ 'class': 'switched', 'data-switch-on': 'membertype', 'data-membertype-server_list': _("Member(s)"), }), help_text=_("Select members for this pool ")) address = forms.IPField(required=False, label=_("Member address"), help_text=_("Specify member IP address"), widget=forms.TextInput(attrs={ 'class': 'switched', 'data-switch-on': 'membertype', 'data-membertype-member_address': _("Member address"), }), initial="", version=forms.IPv4 | forms.IPv6, mask=False) weight = forms.IntegerField( max_value=256, min_value=1, label=_("Weight"), required=False, help_text=_("Relative part of requests this pool member serves " "compared to others. \nThe same weight will be applied to " "all the selected members and can be modified later. " "Weight must be in the range 1 to 256.") ) protocol_port = forms.IntegerField( label=_("Protocol Port"), min_value=1, help_text=_("Enter an integer value between 1 and 65535. " "The same port will be used for all the selected " "members and can be modified later."), validators=[validators.validate_port_range] ) admin_state_up = forms.ChoiceField(choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State")) def __init__(self, request, *args, **kwargs): super(AddMemberAction, self).__init__(request, *args, **kwargs) pool_id_choices = [('', _("Select a Pool"))] try: tenant_id = self.request.user.tenant_id pools = api.lbaas.pool_list(request, tenant_id=tenant_id) except Exception: pools = [] exceptions.handle(request, _('Unable to retrieve pools list.')) pools = sorted(pools, key=lambda pool: pool.name) for p in pools: pool_id_choices.append((p.id, p.name)) self.fields['pool_id'].choices = pool_id_choices members_choices = [] try: servers, has_more = api.nova.server_list(request) except Exception: servers = [] exceptions.handle(request, _('Unable to retrieve instances list.')) if len(servers) == 0: self.fields['members'].label = _( "No servers available. To add a member, you " "need at least one running instance.") self.fields['pool_id'].required = False self.fields['protocol_port'].required = False return for m in servers: members_choices.append((m.id, m.name)) self.fields['members'].choices = sorted( members_choices, key=lambda member: member[1]) def clean(self): cleaned_data = super(AddMemberAction, self).clean() if (cleaned_data.get('member_type') == 'server_list' and not cleaned_data.get('members')): msg = _('At least one member must be specified') self._errors['members'] = self.error_class([msg]) elif (cleaned_data.get('member_type') == 'member_address' and not cleaned_data.get('address')): msg = _('Member IP address must be specified') self._errors['address'] = self.error_class([msg]) return cleaned_data class Meta(object): name = _("Add New Member") permissions = ('openstack.services.network',) help_text = _("Add member(s) to the selected pool.\n\n" "Choose one or more listed instances to be " "added to the pool as member(s). " "Assign a numeric weight and port number for the " "selected member(s) to operate(s) on; e.g., 80. \n\n" "Only one port can be associated with " "each instance.") class AddMemberStep(workflows.Step): action_class = AddMemberAction contributes = ("pool_id", "member_type", "members", "address", "protocol_port", "weight", "admin_state_up") def contribute(self, data, context): context = super(AddMemberStep, self).contribute(data, context) context['admin_state_up'] = (context['admin_state_up'] == 'True') return context class AddMember(workflows.Workflow): slug = "addmember" name = _("Add Member") finalize_button_name = _("Add") success_message = _('Added member(s).') failure_message = _('Unable to add member(s)') success_url = "horizon:project:loadbalancers:index" default_steps = (AddMemberStep,) def handle(self, request, context): if context['member_type'] == 'server_list': try: pool = api.lbaas.pool_get(request, context['pool_id']) subnet_id = pool['subnet_id'] except Exception: self.failure_message = _('Unable to retrieve ' 'the specified pool.') return False for m in context['members']: params = {'device_id': m} try: plist = api.neutron.port_list(request, **params) except Exception: return False # Sort port list for each member. This is needed to avoid # attachment of random ports in case of creation of several # members attached to several networks. plist = sorted(plist, key=lambda port: port.network_id) psubnet = [p for p in plist for ips in p.fixed_ips if ips['subnet_id'] == subnet_id] # If possible, select a port on pool subnet. if psubnet: selected_port = psubnet[0] elif plist: selected_port = plist[0] else: selected_port = None if selected_port: context['address'] = \ selected_port.fixed_ips[0]['ip_address'] try: api.lbaas.member_create(request, **context).id except Exception as e: msg = self.failure_message LOG.info('%s: %s' % (msg, e)) return False return True else: try: context['member_id'] = api.lbaas.member_create( request, **context).id return True except Exception as e: msg = self.failure_message LOG.info('%s: %s' % (msg, e)) return False class AddMonitorAction(workflows.Action): type = forms.ChoiceField( label=_("Type"), choices=[('ping', _('PING')), ('tcp', _('TCP')), ('http', _('HTTP')), ('https', _('HTTPS'))], widget=forms.Select(attrs={ 'class': 'switchable', 'data-slug': 'type' })) delay = forms.IntegerField( min_value=1, label=_("Delay"), help_text=_("The minimum time in seconds between regular checks " "of a member. It must be greater than or equal to " "timeout")) timeout = forms.IntegerField( min_value=1, label=_("Timeout"), help_text=_("The maximum time in seconds for a monitor to wait " "for a reply. It must be less than or equal to delay")) max_retries = forms.IntegerField( max_value=10, min_value=1, label=_("Max Retries (1~10)"), help_text=_("Number of permissible failures before changing " "the status of member to inactive")) http_method = forms.ChoiceField( initial="GET", required=False, choices=[('GET', _('GET'))], label=_("HTTP Method"), help_text=_("HTTP method used to check health status of a member"), widget=forms.Select(attrs={ 'class': 'switched', 'data-switch-on': 'type', 'data-type-http': _('HTTP Method'), 'data-type-https': _('HTTP Method') })) url_path = forms.CharField( initial="/", required=False, max_length=80, label=_("URL"), widget=forms.TextInput(attrs={ 'class': 'switched', 'data-switch-on': 'type', 'data-type-http': _('URL'), 'data-type-https': _('URL') })) expected_codes = forms.RegexField( initial="200", required=False, max_length=80, regex=r'^(\d{3}(\s*,\s*\d{3})*)$|^(\d{3}-\d{3})$', label=_("Expected HTTP Status Codes"), help_text=_("Expected code may be a single value (e.g. 200), " "a list of values (e.g. 200, 202), " "or range of values (e.g. 200-204)"), widget=forms.TextInput(attrs={ 'class': 'switched', 'data-switch-on': 'type', 'data-type-http': _('Expected HTTP Status Codes'), 'data-type-https': _('Expected HTTP Status Codes') })) admin_state_up = forms.ChoiceField(choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State")) def __init__(self, request, *args, **kwargs): super(AddMonitorAction, self).__init__(request, *args, **kwargs) def clean(self): cleaned_data = super(AddMonitorAction, self).clean() type_opt = cleaned_data.get('type') delay = cleaned_data.get('delay') timeout = cleaned_data.get('timeout') if not delay >= timeout: msg = _('Delay must be greater than or equal to Timeout') self._errors['delay'] = self.error_class([msg]) if type_opt in ['http', 'https']: http_method_opt = cleaned_data.get('http_method') url_path = cleaned_data.get('url_path') expected_codes = cleaned_data.get('expected_codes') if not http_method_opt: msg = _('Please choose a HTTP method') self._errors['http_method'] = self.error_class([msg]) if not url_path: msg = _('Please specify an URL') self._errors['url_path'] = self.error_class([msg]) if not expected_codes: msg = _('Please enter a single value (e.g. 200), ' 'a list of values (e.g. 200, 202), ' 'or range of values (e.g. 200-204)') self._errors['expected_codes'] = self.error_class([msg]) return cleaned_data class Meta(object): name = _("Add New Monitor") permissions = ('openstack.services.network',) help_text = _("Create a monitor template.\n\n" "Select type of monitoring. " "Specify delay, timeout, and retry limits " "required by the monitor. " "Specify method, URL path, and expected " "HTTP codes upon success.") class AddMonitorStep(workflows.Step): action_class = AddMonitorAction contributes = ("type", "delay", "timeout", "max_retries", "http_method", "url_path", "expected_codes", "admin_state_up") def contribute(self, data, context): context = super(AddMonitorStep, self).contribute(data, context) context['admin_state_up'] = (context['admin_state_up'] == 'True') if data: return context class AddMonitor(workflows.Workflow): slug = "addmonitor" name = _("Add Monitor") finalize_button_name = _("Add") success_message = _('Added monitor') failure_message = _('Unable to add monitor') success_url = "horizon:project:loadbalancers:index" default_steps = (AddMonitorStep,) def handle(self, request, context): try: context['monitor_id'] = api.lbaas.pool_health_monitor_create( request, **context).get('id') return True except Exception: exceptions.handle(request, _("Unable to add monitor.")) return False class AddPMAssociationAction(workflows.Action): monitor_id = forms.ChoiceField(label=_("Monitor")) def __init__(self, request, *args, **kwargs): super(AddPMAssociationAction, self).__init__(request, *args, **kwargs) def populate_monitor_id_choices(self, request, context): self.fields['monitor_id'].label = _("Select a monitor template " "for %s") % context['pool_name'] monitor_id_choices = [('', _("Select a Monitor"))] try: tenant_id = self.request.user.tenant_id monitors = api.lbaas.pool_health_monitor_list(request, tenant_id=tenant_id) pool_monitors_ids = [pm.id for pm in context['pool_monitors']] for m in monitors: if m.id not in pool_monitors_ids: display_name = utils.get_monitor_display_name(m) monitor_id_choices.append((m.id, display_name)) except Exception: exceptions.handle(request, _('Unable to retrieve monitors list.')) self.fields['monitor_id'].choices = monitor_id_choices return monitor_id_choices class Meta(object): name = _("Association Details") permissions = ('openstack.services.network',) help_text = _("Associate a health monitor with target pool.") class AddPMAssociationStep(workflows.Step): action_class = AddPMAssociationAction depends_on = ("pool_id", "pool_name", "pool_monitors") contributes = ("monitor_id",) def contribute(self, data, context): context = super(AddPMAssociationStep, self).contribute(data, context) if data: return context class AddPMAssociation(workflows.Workflow): slug = "addassociation" name = _("Associate Monitor") finalize_button_name = _("Associate") success_message = _('Associated monitor.') failure_message = _('Unable to associate monitor.') success_url = "horizon:project:loadbalancers:index" default_steps = (AddPMAssociationStep,) def handle(self, request, context): try: context['monitor_id'] = api.lbaas.pool_monitor_association_create( request, **context) return True except Exception: exceptions.handle(request, _("Unable to associate monitor.")) return False class DeletePMAssociationAction(workflows.Action): monitor_id = forms.ChoiceField(label=_("Monitor")) def __init__(self, request, *args, **kwargs): super(DeletePMAssociationAction, self).__init__( request, *args, **kwargs) def populate_monitor_id_choices(self, request, context): self.fields['monitor_id'].label = (_("Select a health monitor of %s") % context['pool_name']) monitor_id_choices = [('', _("Select a Monitor"))] try: monitors = api.lbaas.pool_health_monitor_list(request) pool_monitors_ids = [pm.id for pm in context['pool_monitors']] for m in monitors: if m.id in pool_monitors_ids: display_name = utils.get_monitor_display_name(m) monitor_id_choices.append((m.id, display_name)) except Exception: exceptions.handle(request, _('Unable to retrieve monitors list.')) self.fields['monitor_id'].choices = monitor_id_choices return monitor_id_choices class Meta(object): name = _("Association Details") permissions = ('openstack.services.network',) help_text = _("Disassociate a health monitor from target pool. ") class DeletePMAssociationStep(workflows.Step): action_class = DeletePMAssociationAction depends_on = ("pool_id", "pool_name", "pool_monitors") contributes = ("monitor_id",) def contribute(self, data, context): context = super(DeletePMAssociationStep, self).contribute( data, context) if data: return context class DeletePMAssociation(workflows.Workflow): slug = "deleteassociation" name = _("Disassociate Monitor") finalize_button_name = _("Disassociate") success_message = _('Disassociated monitor.') failure_message = _('Unable to disassociate monitor.') success_url = "horizon:project:loadbalancers:index" default_steps = (DeletePMAssociationStep,) def handle(self, request, context): try: context['monitor_id'] = api.lbaas.pool_monitor_association_delete( request, **context) return True except Exception: exceptions.handle(request, _("Unable to disassociate monitor.")) return False
apache-2.0
razvanphp/arangodb
3rdParty/V8-3.31.74.1/third_party/python_26/Lib/lib2to3/fixes/fix_except.py
52
3243
"""Fixer for except statements with named exceptions. The following cases will be converted: - "except E, T:" where T is a name: except E as T: - "except E, T:" where T is not a name, tuple or list: except E as t: T = t This is done because the target of an "except" clause must be a name. - "except E, T:" where T is a tuple or list literal: except E as t: T = t.args """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Assign, Attr, Name, is_tuple, is_list def find_excepts(nodes): for i, n in enumerate(nodes): if isinstance(n, pytree.Node): if n.children[0].value == 'except': yield (n, nodes[i+2]) class FixExcept(fixer_base.BaseFix): PATTERN = """ try_stmt< 'try' ':' suite cleanup=(except_clause ':' suite)+ tail=(['except' ':' suite] ['else' ':' suite] ['finally' ':' suite]) > """ def transform(self, node, results): syms = self.syms tail = [n.clone() for n in results["tail"]] try_cleanup = [ch.clone() for ch in results["cleanup"]] for except_clause, e_suite in find_excepts(try_cleanup): if len(except_clause.children) == 4: (E, comma, N) = except_clause.children[1:4] comma.replace(Name("as", prefix=" ")) if N.type != token.NAME: # Generate a new N for the except clause new_N = Name(self.new_name(), prefix=" ") target = N.clone() target.set_prefix("") N.replace(new_N) new_N = new_N.clone() # Insert "old_N = new_N" as the first statement in # the except body. This loop skips leading whitespace # and indents #TODO(cwinter) suite-cleanup suite_stmts = e_suite.children for i, stmt in enumerate(suite_stmts): if isinstance(stmt, pytree.Node): break # The assignment is different if old_N is a tuple or list # In that case, the assignment is old_N = new_N.args if is_tuple(N) or is_list(N): assign = Assign(target, Attr(new_N, Name('args'))) else: assign = Assign(target, new_N) #TODO(cwinter) stopgap until children becomes a smart list for child in reversed(suite_stmts[:i]): e_suite.insert_child(0, child) e_suite.insert_child(i, assign) elif N.get_prefix() == "": # No space after a comma is legal; no space after "as", # not so much. N.set_prefix(" ") #TODO(cwinter) fix this when children becomes a smart list children = [c.clone() for c in node.children[:3]] + try_cleanup + tail return pytree.Node(node.type, children)
apache-2.0
Andygmb/python-social-auth
social/exceptions.py
65
3143
class SocialAuthBaseException(ValueError): """Base class for pipeline exceptions.""" pass class WrongBackend(SocialAuthBaseException): def __init__(self, backend_name): self.backend_name = backend_name def __str__(self): return 'Incorrect authentication service "{0}"'.format( self.backend_name ) class MissingBackend(WrongBackend): def __str__(self): return 'Missing backend "{0}" entry'.format(self.backend_name) class NotAllowedToDisconnect(SocialAuthBaseException): """User is not allowed to disconnect it's social account.""" pass class AuthException(SocialAuthBaseException): """Auth process exception.""" def __init__(self, backend, *args, **kwargs): self.backend = backend super(AuthException, self).__init__(*args, **kwargs) class AuthFailed(AuthException): """Auth process failed for some reason.""" def __str__(self): msg = super(AuthFailed, self).__str__() if msg == 'access_denied': return 'Authentication process was canceled' return 'Authentication failed: {0}'.format(msg) class AuthCanceled(AuthException): """Auth process was canceled by user.""" def __str__(self): return 'Authentication process canceled' class AuthUnknownError(AuthException): """Unknown auth process error.""" def __str__(self): msg = super(AuthUnknownError, self).__str__() return 'An unknown error happened while authenticating {0}'.format(msg) class AuthTokenError(AuthException): """Auth token error.""" def __str__(self): msg = super(AuthTokenError, self).__str__() return 'Token error: {0}'.format(msg) class AuthMissingParameter(AuthException): """Missing parameter needed to start or complete the process.""" def __init__(self, backend, parameter, *args, **kwargs): self.parameter = parameter super(AuthMissingParameter, self).__init__(backend, *args, **kwargs) def __str__(self): return 'Missing needed parameter {0}'.format(self.parameter) class AuthStateMissing(AuthException): """State parameter is incorrect.""" def __str__(self): return 'Session value state missing.' class AuthStateForbidden(AuthException): """State parameter is incorrect.""" def __str__(self): return 'Wrong state parameter given.' class AuthAlreadyAssociated(AuthException): """A different user has already associated the target social account""" pass class AuthTokenRevoked(AuthException): """User revoked the access_token in the provider.""" def __str__(self): return 'User revoke access to the token' class AuthForbidden(AuthException): """Authentication for this user is forbidden""" def __str__(self): return 'Your credentials aren\'t allowed' class AuthUnreachableProvider(AuthException): """Cannot reach the provider""" def __str__(self): return 'The authentication provider could not be reached' class InvalidEmail(AuthException): def __str__(self): return 'Email couldn\'t be validated'
bsd-3-clause
wolfram74/numerical_methods_iserles_notes
venv/lib/python2.7/site-packages/sympy/mpmath/tests/test_bitwise.py
35
7152
""" Test bit-level integer and mpf operations """ from sympy.mpmath import * from sympy.mpmath.libmp import * def test_bitcount(): assert bitcount(0) == 0 assert bitcount(1) == 1 assert bitcount(7) == 3 assert bitcount(8) == 4 assert bitcount(2**100) == 101 assert bitcount(2**100-1) == 100 def test_trailing(): assert trailing(0) == 0 assert trailing(1) == 0 assert trailing(2) == 1 assert trailing(7) == 0 assert trailing(8) == 3 assert trailing(2**100) == 100 assert trailing(2**100-1) == 0 def test_round_down(): assert from_man_exp(0, -4, 4, round_down)[:3] == (0, 0, 0) assert from_man_exp(0xf0, -4, 4, round_down)[:3] == (0, 15, 0) assert from_man_exp(0xf1, -4, 4, round_down)[:3] == (0, 15, 0) assert from_man_exp(0xff, -4, 4, round_down)[:3] == (0, 15, 0) assert from_man_exp(-0xf0, -4, 4, round_down)[:3] == (1, 15, 0) assert from_man_exp(-0xf1, -4, 4, round_down)[:3] == (1, 15, 0) assert from_man_exp(-0xff, -4, 4, round_down)[:3] == (1, 15, 0) def test_round_up(): assert from_man_exp(0, -4, 4, round_up)[:3] == (0, 0, 0) assert from_man_exp(0xf0, -4, 4, round_up)[:3] == (0, 15, 0) assert from_man_exp(0xf1, -4, 4, round_up)[:3] == (0, 1, 4) assert from_man_exp(0xff, -4, 4, round_up)[:3] == (0, 1, 4) assert from_man_exp(-0xf0, -4, 4, round_up)[:3] == (1, 15, 0) assert from_man_exp(-0xf1, -4, 4, round_up)[:3] == (1, 1, 4) assert from_man_exp(-0xff, -4, 4, round_up)[:3] == (1, 1, 4) def test_round_floor(): assert from_man_exp(0, -4, 4, round_floor)[:3] == (0, 0, 0) assert from_man_exp(0xf0, -4, 4, round_floor)[:3] == (0, 15, 0) assert from_man_exp(0xf1, -4, 4, round_floor)[:3] == (0, 15, 0) assert from_man_exp(0xff, -4, 4, round_floor)[:3] == (0, 15, 0) assert from_man_exp(-0xf0, -4, 4, round_floor)[:3] == (1, 15, 0) assert from_man_exp(-0xf1, -4, 4, round_floor)[:3] == (1, 1, 4) assert from_man_exp(-0xff, -4, 4, round_floor)[:3] == (1, 1, 4) def test_round_ceiling(): assert from_man_exp(0, -4, 4, round_ceiling)[:3] == (0, 0, 0) assert from_man_exp(0xf0, -4, 4, round_ceiling)[:3] == (0, 15, 0) assert from_man_exp(0xf1, -4, 4, round_ceiling)[:3] == (0, 1, 4) assert from_man_exp(0xff, -4, 4, round_ceiling)[:3] == (0, 1, 4) assert from_man_exp(-0xf0, -4, 4, round_ceiling)[:3] == (1, 15, 0) assert from_man_exp(-0xf1, -4, 4, round_ceiling)[:3] == (1, 15, 0) assert from_man_exp(-0xff, -4, 4, round_ceiling)[:3] == (1, 15, 0) def test_round_nearest(): assert from_man_exp(0, -4, 4, round_nearest)[:3] == (0, 0, 0) assert from_man_exp(0xf0, -4, 4, round_nearest)[:3] == (0, 15, 0) assert from_man_exp(0xf7, -4, 4, round_nearest)[:3] == (0, 15, 0) assert from_man_exp(0xf8, -4, 4, round_nearest)[:3] == (0, 1, 4) # 1111.1000 -> 10000.0 assert from_man_exp(0xf9, -4, 4, round_nearest)[:3] == (0, 1, 4) # 1111.1001 -> 10000.0 assert from_man_exp(0xe8, -4, 4, round_nearest)[:3] == (0, 7, 1) # 1110.1000 -> 1110.0 assert from_man_exp(0xe9, -4, 4, round_nearest)[:3] == (0, 15, 0) # 1110.1001 -> 1111.0 assert from_man_exp(-0xf0, -4, 4, round_nearest)[:3] == (1, 15, 0) assert from_man_exp(-0xf7, -4, 4, round_nearest)[:3] == (1, 15, 0) assert from_man_exp(-0xf8, -4, 4, round_nearest)[:3] == (1, 1, 4) assert from_man_exp(-0xf9, -4, 4, round_nearest)[:3] == (1, 1, 4) assert from_man_exp(-0xe8, -4, 4, round_nearest)[:3] == (1, 7, 1) assert from_man_exp(-0xe9, -4, 4, round_nearest)[:3] == (1, 15, 0) def test_rounding_bugs(): # 1 less than power-of-two cases assert from_man_exp(72057594037927935, -56, 53, round_up) == (0, 1, 0, 1) assert from_man_exp(73786976294838205979, -65, 53, round_nearest) == (0, 1, 1, 1) assert from_man_exp(31, 0, 4, round_up) == (0, 1, 5, 1) assert from_man_exp(-31, 0, 4, round_floor) == (1, 1, 5, 1) assert from_man_exp(255, 0, 7, round_up) == (0, 1, 8, 1) assert from_man_exp(-255, 0, 7, round_floor) == (1, 1, 8, 1) def test_rounding_issue160(): a = from_man_exp(9867,-100) b = from_man_exp(9867,-200) c = from_man_exp(-1,0) z = (1, 1023, -10, 10) assert mpf_add(a, c, 10, 'd') == z assert mpf_add(b, c, 10, 'd') == z assert mpf_add(c, a, 10, 'd') == z assert mpf_add(c, b, 10, 'd') == z def test_perturb(): a = fone b = from_float(0.99999999999999989) c = from_float(1.0000000000000002) assert mpf_perturb(a, 0, 53, round_nearest) == a assert mpf_perturb(a, 1, 53, round_nearest) == a assert mpf_perturb(a, 0, 53, round_up) == c assert mpf_perturb(a, 0, 53, round_ceiling) == c assert mpf_perturb(a, 0, 53, round_down) == a assert mpf_perturb(a, 0, 53, round_floor) == a assert mpf_perturb(a, 1, 53, round_up) == a assert mpf_perturb(a, 1, 53, round_ceiling) == a assert mpf_perturb(a, 1, 53, round_down) == b assert mpf_perturb(a, 1, 53, round_floor) == b a = mpf_neg(a) b = mpf_neg(b) c = mpf_neg(c) assert mpf_perturb(a, 0, 53, round_nearest) == a assert mpf_perturb(a, 1, 53, round_nearest) == a assert mpf_perturb(a, 0, 53, round_up) == a assert mpf_perturb(a, 0, 53, round_floor) == a assert mpf_perturb(a, 0, 53, round_down) == b assert mpf_perturb(a, 0, 53, round_ceiling) == b assert mpf_perturb(a, 1, 53, round_up) == c assert mpf_perturb(a, 1, 53, round_floor) == c assert mpf_perturb(a, 1, 53, round_down) == a assert mpf_perturb(a, 1, 53, round_ceiling) == a def test_add_exact(): ff = from_float assert mpf_add(ff(3.0), ff(2.5)) == ff(5.5) assert mpf_add(ff(3.0), ff(-2.5)) == ff(0.5) assert mpf_add(ff(-3.0), ff(2.5)) == ff(-0.5) assert mpf_add(ff(-3.0), ff(-2.5)) == ff(-5.5) assert mpf_sub(mpf_add(fone, ff(1e-100)), fone) == ff(1e-100) assert mpf_sub(mpf_add(ff(1e-100), fone), fone) == ff(1e-100) assert mpf_sub(mpf_add(fone, ff(-1e-100)), fone) == ff(-1e-100) assert mpf_sub(mpf_add(ff(-1e-100), fone), fone) == ff(-1e-100) assert mpf_add(fone, fzero) == fone assert mpf_add(fzero, fone) == fone assert mpf_add(fzero, fzero) == fzero def test_long_exponent_shifts(): mp.dps = 15 # Check for possible bugs due to exponent arithmetic overflow # in a C implementation x = mpf(1) for p in [32, 64]: a = ldexp(1,2**(p-1)) b = ldexp(1,2**p) c = ldexp(1,2**(p+1)) d = ldexp(1,-2**(p-1)) e = ldexp(1,-2**p) f = ldexp(1,-2**(p+1)) assert (x+a) == a assert (x+b) == b assert (x+c) == c assert (x+d) == x assert (x+e) == x assert (x+f) == x assert (a+x) == a assert (b+x) == b assert (c+x) == c assert (d+x) == x assert (e+x) == x assert (f+x) == x assert (x-a) == -a assert (x-b) == -b assert (x-c) == -c assert (x-d) == x assert (x-e) == x assert (x-f) == x assert (a-x) == a assert (b-x) == b assert (c-x) == c assert (d-x) == -x assert (e-x) == -x assert (f-x) == -x
mit
manipopopo/tensorflow
tensorflow/python/training/device_util_test.py
23
3343
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for device utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.eager import context from tensorflow.python.framework import ops from tensorflow.python.platform import test from tensorflow.python.training import device_util class DeviceUtilTest(test.TestCase): def testCurrentDeviceWithGlobalGraph(self): with ops.device("/cpu:0"): self.assertEqual(device_util.current(), "/device:CPU:0") with ops.device("/job:worker"): with ops.device("/cpu:0"): self.assertEqual(device_util.current(), "/job:worker/device:CPU:0") with ops.device("/cpu:0"): with ops.device("/gpu:0"): self.assertEqual(device_util.current(), "/device:GPU:0") def testCurrentDeviceWithNonGlobalGraph(self): with ops.Graph().as_default(): with ops.device("/cpu:0"): self.assertEqual(device_util.current(), "/device:CPU:0") def testCurrentDeviceWithEager(self): with context.eager_mode(): with ops.device("/cpu:0"): self.assertEqual(device_util.current(), "/job:localhost/replica:0/task:0/device:CPU:0") def testCanonicalizeWithoutDefaultDevice(self): self.assertEqual( device_util.canonicalize("/cpu:0"), "/replica:0/task:0/device:CPU:0") self.assertEqual( device_util.canonicalize("/job:worker/cpu:0"), "/job:worker/replica:0/task:0/device:CPU:0") self.assertEqual( device_util.canonicalize("/job:worker/task:1/cpu:0"), "/job:worker/replica:0/task:1/device:CPU:0") def testCanonicalizeWithDefaultDevice(self): self.assertEqual( device_util.canonicalize("/job:worker/task:1/cpu:0", default="/gpu:0"), "/job:worker/replica:0/task:1/device:CPU:0") self.assertEqual( device_util.canonicalize("/job:worker/task:1", default="/gpu:0"), "/job:worker/replica:0/task:1/device:GPU:0") self.assertEqual( device_util.canonicalize("/cpu:0", default="/job:worker"), "/job:worker/replica:0/task:0/device:CPU:0") def testResolveWithDeviceScope(self): with ops.device("/gpu:0"): self.assertEqual( device_util.resolve("/job:worker/task:1/cpu:0"), "/job:worker/replica:0/task:1/device:CPU:0") self.assertEqual( device_util.resolve("/job:worker/task:1"), "/job:worker/replica:0/task:1/device:GPU:0") with ops.device("/job:worker"): self.assertEqual( device_util.resolve("/cpu:0"), "/job:worker/replica:0/task:0/device:CPU:0") if __name__ == "__main__": test.main()
apache-2.0
michaelhkw/incubator-impala
tests/comparison/db_types.py
3
6058
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import re import sys from collections import defaultdict from common import ValExpr, ValExprList module_contents = dict() class DataTypeMetaclass(type): '''Provides sorting of classes used to determine upcasting.''' def __init__(cls, name, bases, dict): super(DataTypeMetaclass, cls).__init__(name, bases, dict) if name in ('Char', 'DataType', 'Decimal', 'Float', 'Int', 'Number', 'Timestamp'): cls.type = cls else: cls.type = cls.get_generic_type() def __cmp__(cls, other): if not isinstance(other, DataTypeMetaclass): return -1 return cmp( getattr(cls, 'CMP_VALUE', cls.__name__), getattr(other, 'CMP_VALUE', other.__name__)) class DataType(ValExpr): '''Base class for data types. Data types are represented as classes so inheritance can be used. ''' __metaclass__ = DataTypeMetaclass @staticmethod def group_by_type(vals): '''Group cols by their data type and return a dict of the results.''' vals_by_type = defaultdict(ValExprList) for val in vals: vals_by_type[val.type].append(val) return vals_by_type @classmethod def get_base_type(cls): '''This should only be called from a subclass to find the type that is just below DataType in the class hierarchy. For example Int and Decimal would both return Number as their base type. ''' if DataType in cls.__bases__: return cls for base in cls.__bases__: if issubclass(base, DataType): return base.get_base_type() raise Exception('Unable to determine base type of %s' % cls) @classmethod def get_generic_type(cls): return cls.get_base_type() @classmethod def name(cls): return cls.__name__ @classmethod def is_approximate(cls): return False def __init__(self, val): self.val = val @property def exact_type(self): return type(self) class Boolean(DataType): pass class Number(DataType): pass class Int(Number): @classmethod def get_generic_type(cls): return Int # Used to compare with other numbers for determining upcasting CMP_VALUE = 2 # Used during data generation to keep vals in range MIN = -2 ** 31 MAX = -MIN - 1 class TinyInt(Int): CMP_VALUE = 0 MIN = -2 ** 7 MAX = -MIN - 1 class SmallInt(Int): CMP_VALUE = 1 MIN = -2 ** 15 MAX = -MIN - 1 class BigInt(Int): CMP_VALUE = 3 MIN = -2 ** 63 MAX = -MIN - 1 class Decimal(Number): @classmethod def get_generic_type(cls): return Decimal CMP_VALUE = 4 MAX_DIGITS = 38 # Arbitrary default values MAX_FRACTIONAL_DIGITS = 10 # Arbitrary default values class Float(Number): @classmethod def get_generic_type(cls): return Float @classmethod def is_approximate(cls): return True CMP_VALUE = 5 class Double(Float): CMP_VALUE = 6 class Char(DataType): CMP_VALUE = 100 MIN = 0 MAX = 255 # This is not the true max class VarChar(Char): CMP_VALUE = 101 MAX = 255 # Not a true max. This is used to differentiate between VarChar and String. class String(VarChar): CMP_VALUE = 102 MIN = VarChar.MAX + 1 # This is used to differentiate between VarChar and String. MAX = 1000 # This is not the true max. class Timestamp(DataType): pass EXACT_TYPES = [ BigInt, Boolean, Char, Decimal, Double, Float, Int, SmallInt, String, Timestamp, TinyInt, VarChar] JOINABLE_TYPES = (Char, Decimal, Int, Timestamp) TYPES = tuple(set(type_.type for type_ in EXACT_TYPES)) __DECIMAL_TYPE_CACHE = dict() def get_decimal_class(total_digits, fractional_digits): cache_key = (total_digits, fractional_digits) if cache_key not in __DECIMAL_TYPE_CACHE: __DECIMAL_TYPE_CACHE[cache_key] = type( 'Decimal%02d%02d' % (total_digits, fractional_digits), (Decimal, ), {'MAX_DIGITS': total_digits, 'MAX_FRACTIONAL_DIGITS': fractional_digits}) return __DECIMAL_TYPE_CACHE[cache_key] __CHAR_TYPE_CACHE = dict() def get_char_class(length): if length not in __CHAR_TYPE_CACHE: __CHAR_TYPE_CACHE[length] = type( 'Char%04d' % length, (Char, ), {'MAX': length}) return __CHAR_TYPE_CACHE[length] __VARCHAR_TYPE_CACHE = dict() def get_varchar_class(length): if length not in __VARCHAR_TYPE_CACHE: __VARCHAR_TYPE_CACHE[length] = type( 'VarChar%04d' % length, (VarChar, ), {'MAX': length}) return __VARCHAR_TYPE_CACHE[length] class ModuleWrapper(object): def __init__(self, module): self.module = module self.decimal_class_pattern = re.compile(r"Decimal(\d{2})(\d{2})") self.char_class_pattern = re.compile(r"Char(\d+)") self.varchar_class_pattern = re.compile(r"VarChar(\d+)") def __getattr__(self, name): match = self.decimal_class_pattern.match(name) if match: return self.get_decimal_class(int(match.group(1)), int(match.group(2))) match = self.char_class_pattern.match(name) if match: return self.get_char_class(int(match.group(1))) match = self.varchar_class_pattern.match(name) if match: return self.get_varchar_class(int(match.group(1))) return getattr(self.module, name) sys.modules[__name__] = ModuleWrapper(sys.modules[__name__])
apache-2.0
santisiri/popego
envs/ALPHA-POPEGO/lib/python2.5/site-packages/numpy-1.0.4-py2.5-linux-x86_64.egg/numpy/distutils/cpuinfo.py
1
22466
#!/usr/bin/env python """ cpuinfo Copyright 2002 Pearu Peterson all rights reserved, Pearu Peterson <pearu@cens.ioc.ee> Permission to use, modify, and distribute this software is given under the terms of the NumPy (BSD style) license. See LICENSE.txt that came with this distribution for specifics. NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. Pearu Peterson """ __all__ = ['cpu'] import sys, re, types import os import commands import warnings def getoutput(cmd, successful_status=(0,), stacklevel=1): try: status, output = commands.getstatusoutput(cmd) except EnvironmentError, e: warnings.warn(str(e), UserWarning, stacklevel=stacklevel) return False, output if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status: return True, output return False, output def command_info(successful_status=(0,), stacklevel=1, **kw): info = {} for key in kw: ok, output = getoutput(kw[key], successful_status=successful_status, stacklevel=stacklevel+1) if ok: info[key] = output.strip() return info def command_by_line(cmd, successful_status=(0,), stacklevel=1): ok, output = getoutput(cmd, successful_status=successful_status, stacklevel=stacklevel+1) if not ok: return for line in output.splitlines(): yield line.strip() def key_value_from_command(cmd, sep, successful_status=(0,), stacklevel=1): d = {} for line in command_by_line(cmd, successful_status=successful_status, stacklevel=stacklevel+1): l = [s.strip() for s in line.split(sep, 1)] if len(l) == 2: d[l[0]] = l[1] return d class CPUInfoBase(object): """Holds CPU information and provides methods for requiring the availability of various CPU features. """ def _try_call(self,func): try: return func() except: pass def __getattr__(self,name): if not name.startswith('_'): if hasattr(self,'_'+name): attr = getattr(self,'_'+name) if type(attr) is types.MethodType: return lambda func=self._try_call,attr=attr : func(attr) else: return lambda : None raise AttributeError,name def _getNCPUs(self): return 1 def _is_32bit(self): return not self.is_64bit() class LinuxCPUInfo(CPUInfoBase): info = None def __init__(self): if self.info is not None: return info = [ {} ] ok, output = getoutput('uname -m') if ok: info[0]['uname_m'] = output.strip() try: fo = open('/proc/cpuinfo') except EnvironmentError, e: warnings.warn(str(e), UserWarning) else: for line in fo: name_value = [s.strip() for s in line.split(':', 1)] if len(name_value) != 2: continue name, value = name_value if not info or info[-1].has_key(name): # next processor info.append({}) info[-1][name] = value fo.close() self.__class__.info = info def _not_impl(self): pass # Athlon def _is_AMD(self): return self.info[0]['vendor_id']=='AuthenticAMD' def _is_AthlonK6_2(self): return self._is_AMD() and self.info[0]['model'] == '2' def _is_AthlonK6_3(self): return self._is_AMD() and self.info[0]['model'] == '3' def _is_AthlonK6(self): return re.match(r'.*?AMD-K6',self.info[0]['model name']) is not None def _is_AthlonK7(self): return re.match(r'.*?AMD-K7',self.info[0]['model name']) is not None def _is_AthlonMP(self): return re.match(r'.*?Athlon\(tm\) MP\b', self.info[0]['model name']) is not None def _is_AMD64(self): return self.is_AMD() and self.info[0]['family'] == '15' def _is_Athlon64(self): return re.match(r'.*?Athlon\(tm\) 64\b', self.info[0]['model name']) is not None def _is_AthlonHX(self): return re.match(r'.*?Athlon HX\b', self.info[0]['model name']) is not None def _is_Opteron(self): return re.match(r'.*?Opteron\b', self.info[0]['model name']) is not None def _is_Hammer(self): return re.match(r'.*?Hammer\b', self.info[0]['model name']) is not None # Alpha def _is_Alpha(self): return self.info[0]['cpu']=='Alpha' def _is_EV4(self): return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4' def _is_EV5(self): return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5' def _is_EV56(self): return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56' def _is_PCA56(self): return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56' # Intel #XXX _is_i386 = _not_impl def _is_Intel(self): return self.info[0]['vendor_id']=='GenuineIntel' def _is_i486(self): return self.info[0]['cpu']=='i486' def _is_i586(self): return self.is_Intel() and self.info[0]['cpu family'] == '5' def _is_i686(self): return self.is_Intel() and self.info[0]['cpu family'] == '6' def _is_Celeron(self): return re.match(r'.*?Celeron', self.info[0]['model name']) is not None def _is_Pentium(self): return re.match(r'.*?Pentium', self.info[0]['model name']) is not None def _is_PentiumII(self): return re.match(r'.*?Pentium.*?II\b', self.info[0]['model name']) is not None def _is_PentiumPro(self): return re.match(r'.*?PentiumPro\b', self.info[0]['model name']) is not None def _is_PentiumMMX(self): return re.match(r'.*?Pentium.*?MMX\b', self.info[0]['model name']) is not None def _is_PentiumIII(self): return re.match(r'.*?Pentium.*?III\b', self.info[0]['model name']) is not None def _is_PentiumIV(self): return re.match(r'.*?Pentium.*?(IV|4)\b', self.info[0]['model name']) is not None def _is_PentiumM(self): return re.match(r'.*?Pentium.*?M\b', self.info[0]['model name']) is not None def _is_Prescott(self): return self.is_PentiumIV() and self.has_sse3() def _is_Nocona(self): return self.is_64bit() and self.is_PentiumIV() def _is_Core2(self): return self.is_64bit() and self.is_Intel() and \ re.match(r'.*?Core\(TM\)2\b', \ self.info[0]['model name']) is not None def _is_Itanium(self): return re.match(r'.*?Itanium\b', self.info[0]['family']) is not None def _is_XEON(self): return re.match(r'.*?XEON\b', self.info[0]['model name'],re.IGNORECASE) is not None _is_Xeon = _is_XEON # Varia def _is_singleCPU(self): return len(self.info) == 1 def _getNCPUs(self): return len(self.info) def _has_fdiv_bug(self): return self.info[0]['fdiv_bug']=='yes' def _has_f00f_bug(self): return self.info[0]['f00f_bug']=='yes' def _has_mmx(self): return re.match(r'.*?\bmmx\b',self.info[0]['flags']) is not None def _has_sse(self): return re.match(r'.*?\bsse\b',self.info[0]['flags']) is not None def _has_sse2(self): return re.match(r'.*?\bsse2\b',self.info[0]['flags']) is not None def _has_sse3(self): return re.match(r'.*?\bsse3\b',self.info[0]['flags']) is not None def _has_3dnow(self): return re.match(r'.*?\b3dnow\b',self.info[0]['flags']) is not None def _has_3dnowext(self): return re.match(r'.*?\b3dnowext\b',self.info[0]['flags']) is not None def _is_64bit(self): if self.is_Alpha(): return True if self.info[0].get('clflush size','')=='64': return True if self.info[0].get('uname_m','')=='x86_64': return True if self.info[0].get('arch','')=='IA-64': return True return False def _is_32bit(self): return not self.is_64bit() class IRIXCPUInfo(CPUInfoBase): info = None def __init__(self): if self.info is not None: return info = key_value_from_command('sysconf', sep=' ', successful_status=(0,1)) self.__class__.info = info def _not_impl(self): pass def _is_singleCPU(self): return self.info.get('NUM_PROCESSORS') == '1' def _getNCPUs(self): return int(self.info.get('NUM_PROCESSORS', 1)) def __cputype(self,n): return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n) def _is_r2000(self): return self.__cputype(2000) def _is_r3000(self): return self.__cputype(3000) def _is_r3900(self): return self.__cputype(3900) def _is_r4000(self): return self.__cputype(4000) def _is_r4100(self): return self.__cputype(4100) def _is_r4300(self): return self.__cputype(4300) def _is_r4400(self): return self.__cputype(4400) def _is_r4600(self): return self.__cputype(4600) def _is_r4650(self): return self.__cputype(4650) def _is_r5000(self): return self.__cputype(5000) def _is_r6000(self): return self.__cputype(6000) def _is_r8000(self): return self.__cputype(8000) def _is_r10000(self): return self.__cputype(10000) def _is_r12000(self): return self.__cputype(12000) def _is_rorion(self): return self.__cputype('orion') def get_ip(self): try: return self.info.get('MACHINE') except: pass def __machine(self,n): return self.info.get('MACHINE').lower() == 'ip%s' % (n) def _is_IP19(self): return self.__machine(19) def _is_IP20(self): return self.__machine(20) def _is_IP21(self): return self.__machine(21) def _is_IP22(self): return self.__machine(22) def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000() def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000() def _is_IP24(self): return self.__machine(24) def _is_IP25(self): return self.__machine(25) def _is_IP26(self): return self.__machine(26) def _is_IP27(self): return self.__machine(27) def _is_IP28(self): return self.__machine(28) def _is_IP30(self): return self.__machine(30) def _is_IP32(self): return self.__machine(32) def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000() def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000() class DarwinCPUInfo(CPUInfoBase): info = None def __init__(self): if self.info is not None: return info = command_info(arch='arch', machine='machine') info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=') self.__class__.info = info def _not_impl(self): pass def _getNCPUs(self): return int(self.info['sysctl_hw'].get('hw.ncpu', 1)) def _is_Power_Macintosh(self): return self.info['sysctl_hw']['hw.machine']=='Power Macintosh' def _is_i386(self): return self.info['arch']=='i386' def _is_ppc(self): return self.info['arch']=='ppc' def __machine(self,n): return self.info['machine'] == 'ppc%s'%n def _is_ppc601(self): return self.__machine(601) def _is_ppc602(self): return self.__machine(602) def _is_ppc603(self): return self.__machine(603) def _is_ppc603e(self): return self.__machine('603e') def _is_ppc604(self): return self.__machine(604) def _is_ppc604e(self): return self.__machine('604e') def _is_ppc620(self): return self.__machine(620) def _is_ppc630(self): return self.__machine(630) def _is_ppc740(self): return self.__machine(740) def _is_ppc7400(self): return self.__machine(7400) def _is_ppc7450(self): return self.__machine(7450) def _is_ppc750(self): return self.__machine(750) def _is_ppc403(self): return self.__machine(403) def _is_ppc505(self): return self.__machine(505) def _is_ppc801(self): return self.__machine(801) def _is_ppc821(self): return self.__machine(821) def _is_ppc823(self): return self.__machine(823) def _is_ppc860(self): return self.__machine(860) class SunOSCPUInfo(CPUInfoBase): info = None def __init__(self): if self.info is not None: return info = command_info(arch='arch', mach='mach', uname_i='uname_i', isainfo_b='isainfo -b', isainfo_n='isainfo -n', ) info['uname_X'] = key_value_from_command('uname -X', sep='=') for line in command_by_line('psrinfo -v 0'): m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line) if m: info['processor'] = m.group('p') break self.__class__.info = info def _not_impl(self): pass def _is_32bit(self): return self.info['isainfo_b']=='32' def _is_64bit(self): return self.info['isainfo_b']=='64' def _is_i386(self): return self.info['isainfo_n']=='i386' def _is_sparc(self): return self.info['isainfo_n']=='sparc' def _is_sparcv9(self): return self.info['isainfo_n']=='sparcv9' def _getNCPUs(self): return int(self.info['uname_X'].get('NumCPU', 1)) def _is_sun4(self): return self.info['arch']=='sun4' def _is_SUNW(self): return re.match(r'SUNW',self.info['uname_i']) is not None def _is_sparcstation5(self): return re.match(r'.*SPARCstation-5',self.info['uname_i']) is not None def _is_ultra1(self): return re.match(r'.*Ultra-1',self.info['uname_i']) is not None def _is_ultra250(self): return re.match(r'.*Ultra-250',self.info['uname_i']) is not None def _is_ultra2(self): return re.match(r'.*Ultra-2',self.info['uname_i']) is not None def _is_ultra30(self): return re.match(r'.*Ultra-30',self.info['uname_i']) is not None def _is_ultra4(self): return re.match(r'.*Ultra-4',self.info['uname_i']) is not None def _is_ultra5_10(self): return re.match(r'.*Ultra-5_10',self.info['uname_i']) is not None def _is_ultra5(self): return re.match(r'.*Ultra-5',self.info['uname_i']) is not None def _is_ultra60(self): return re.match(r'.*Ultra-60',self.info['uname_i']) is not None def _is_ultra80(self): return re.match(r'.*Ultra-80',self.info['uname_i']) is not None def _is_ultraenterprice(self): return re.match(r'.*Ultra-Enterprise',self.info['uname_i']) is not None def _is_ultraenterprice10k(self): return re.match(r'.*Ultra-Enterprise-10000',self.info['uname_i']) is not None def _is_sunfire(self): return re.match(r'.*Sun-Fire',self.info['uname_i']) is not None def _is_ultra(self): return re.match(r'.*Ultra',self.info['uname_i']) is not None def _is_cpusparcv7(self): return self.info['processor']=='sparcv7' def _is_cpusparcv8(self): return self.info['processor']=='sparcv8' def _is_cpusparcv9(self): return self.info['processor']=='sparcv9' class Win32CPUInfo(CPUInfoBase): info = None pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor" # XXX: what does the value of # HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0 # mean? def __init__(self): if self.info is not None: return info = [] try: #XXX: Bad style to use so long `try:...except:...`. Fix it! import _winreg prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"\ "\s+stepping\s+(?P<STP>\d+)",re.IGNORECASE) chnd=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, self.pkey) pnum=0 while 1: try: proc=_winreg.EnumKey(chnd,pnum) except _winreg.error: break else: pnum+=1 info.append({"Processor":proc}) phnd=_winreg.OpenKey(chnd,proc) pidx=0 while True: try: name,value,vtpe=_winreg.EnumValue(phnd,pidx) except _winreg.error: break else: pidx=pidx+1 info[-1][name]=value if name=="Identifier": srch=prgx.search(value) if srch: info[-1]["Family"]=int(srch.group("FML")) info[-1]["Model"]=int(srch.group("MDL")) info[-1]["Stepping"]=int(srch.group("STP")) except: print sys.exc_value,'(ignoring)' self.__class__.info = info def _not_impl(self): pass # Athlon def _is_AMD(self): return self.info[0]['VendorIdentifier']=='AuthenticAMD' def _is_Am486(self): return self.is_AMD() and self.info[0]['Family']==4 def _is_Am5x86(self): return self.is_AMD() and self.info[0]['Family']==4 def _is_AMDK5(self): return self.is_AMD() and self.info[0]['Family']==5 \ and self.info[0]['Model'] in [0,1,2,3] def _is_AMDK6(self): return self.is_AMD() and self.info[0]['Family']==5 \ and self.info[0]['Model'] in [6,7] def _is_AMDK6_2(self): return self.is_AMD() and self.info[0]['Family']==5 \ and self.info[0]['Model']==8 def _is_AMDK6_3(self): return self.is_AMD() and self.info[0]['Family']==5 \ and self.info[0]['Model']==9 def _is_AMDK7(self): return self.is_AMD() and self.info[0]['Family'] == 6 # To reliably distinguish between the different types of AMD64 chips # (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would # require looking at the 'brand' from cpuid def _is_AMD64(self): return self.is_AMD() and self.info[0]['Family'] == 15 # Intel def _is_Intel(self): return self.info[0]['VendorIdentifier']=='GenuineIntel' def _is_i386(self): return self.info[0]['Family']==3 def _is_i486(self): return self.info[0]['Family']==4 def _is_i586(self): return self.is_Intel() and self.info[0]['Family']==5 def _is_i686(self): return self.is_Intel() and self.info[0]['Family']==6 def _is_Pentium(self): return self.is_Intel() and self.info[0]['Family']==5 def _is_PentiumMMX(self): return self.is_Intel() and self.info[0]['Family']==5 \ and self.info[0]['Model']==4 def _is_PentiumPro(self): return self.is_Intel() and self.info[0]['Family']==6 \ and self.info[0]['Model']==1 def _is_PentiumII(self): return self.is_Intel() and self.info[0]['Family']==6 \ and self.info[0]['Model'] in [3,5,6] def _is_PentiumIII(self): return self.is_Intel() and self.info[0]['Family']==6 \ and self.info[0]['Model'] in [7,8,9,10,11] def _is_PentiumIV(self): return self.is_Intel() and self.info[0]['Family']==15 def _is_PentiumM(self): return self.is_Intel() and self.info[0]['Family'] == 6 \ and self.info[0]['Model'] in [9, 13, 14] def _is_Core2(self): return self.is_Intel() and self.info[0]['Family'] == 6 \ and self.info[0]['Model'] in [15, 16, 17] # Varia def _is_singleCPU(self): return len(self.info) == 1 def _getNCPUs(self): return len(self.info) def _has_mmx(self): if self.is_Intel(): return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \ or (self.info[0]['Family'] in [6,15]) elif self.is_AMD(): return self.info[0]['Family'] in [5,6,15] else: return False def _has_sse(self): if self.is_Intel(): return (self.info[0]['Family']==6 and \ self.info[0]['Model'] in [7,8,9,10,11]) \ or self.info[0]['Family']==15 elif self.is_AMD(): return (self.info[0]['Family']==6 and \ self.info[0]['Model'] in [6,7,8,10]) \ or self.info[0]['Family']==15 else: return False def _has_sse2(self): if self.is_Intel(): return self.is_Pentium4() or self.is_PentiumM() \ or self.is_Core2() elif self.is_AMD(): return self.is_AMD64() else: return False def _has_3dnow(self): return self.is_AMD() and self.info[0]['Family'] in [5,6,15] def _has_3dnowext(self): return self.is_AMD() and self.info[0]['Family'] in [6,15] if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?) cpuinfo = LinuxCPUInfo elif sys.platform.startswith('irix'): cpuinfo = IRIXCPUInfo elif sys.platform == 'darwin': cpuinfo = DarwinCPUInfo elif sys.platform.startswith('sunos'): cpuinfo = SunOSCPUInfo elif sys.platform.startswith('win32'): cpuinfo = Win32CPUInfo elif sys.platform.startswith('cygwin'): cpuinfo = LinuxCPUInfo #XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices. else: cpuinfo = CPUInfoBase cpu = cpuinfo() if __name__ == "__main__": cpu.is_blaa() cpu.is_Intel() cpu.is_Alpha() print 'CPU information:', for name in dir(cpuinfo): if name[0]=='_' and name[1]!='_': r = getattr(cpu,name[1:])() if r: if r!=1: print '%s=%s' %(name[1:],r), else: print name[1:], print
bsd-3-clause
PyYoshi/android_kernel_kyocera_l03
tools/perf/util/setup.py
4998
1330
#!/usr/bin/python2 from distutils.core import setup, Extension from os import getenv from distutils.command.build_ext import build_ext as _build_ext from distutils.command.install_lib import install_lib as _install_lib class build_ext(_build_ext): def finalize_options(self): _build_ext.finalize_options(self) self.build_lib = build_lib self.build_temp = build_tmp class install_lib(_install_lib): def finalize_options(self): _install_lib.finalize_options(self) self.build_dir = build_lib cflags = ['-fno-strict-aliasing', '-Wno-write-strings'] cflags += getenv('CFLAGS', '').split() build_lib = getenv('PYTHON_EXTBUILD_LIB') build_tmp = getenv('PYTHON_EXTBUILD_TMP') ext_sources = [f.strip() for f in file('util/python-ext-sources') if len(f.strip()) > 0 and f[0] != '#'] perf = Extension('perf', sources = ext_sources, include_dirs = ['util/include'], extra_compile_args = cflags, ) setup(name='perf', version='0.1', description='Interface with the Linux profiling infrastructure', author='Arnaldo Carvalho de Melo', author_email='acme@redhat.com', license='GPLv2', url='http://perf.wiki.kernel.org', ext_modules=[perf], cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
gpl-2.0
prescottprue/PiOpenLighting
tools/rdm/TestDefinitions.py
2
223170
# !/usr/bin/python # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # TestDefinitions.py # Copyright (C) 2010 Simon Newton '''This defines all the tests for RDM responders.''' __author__ = 'nomis52@gmail.com (Simon Newton)' import datetime import operator import struct from ExpectedResults import * from ResponderTest import ResponderTestFixture, TestFixture from ResponderTest import OptionalParameterTestFixture from TestCategory import TestCategory from ola import PidStore from ola import RDMConstants from ola.OlaClient import RDMNack from ola.PidStore import ROOT_DEVICE from ola.UID import UID import TestMixins from TestMixins import MAX_DMX_ADDRESS MAX_PERSONALITY_NUMBER = 255 # Mute Tests #------------------------------------------------------------------------------ class MuteDevice(ResponderTestFixture): """Mute device and verify response.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'DISC_MUTE' PROVIDES = ['mute_supported', 'mute_control_fields'] def Test(self): self.AddExpectedResults([ self.AckDiscoveryResult(), UnsupportedResult( warning='RDM Controller does not support DISCOVERY commands') ]) self.SendDiscovery(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): supported = (response.response_code != OlaClient.RDM_PLUGIN_DISCOVERY_NOT_SUPPORTED) self.SetProperty('mute_supported', supported) if supported: self.SetProperty('mute_control_fields', fields['control_field']) binding_uids = fields.get('binding_uid', []) if binding_uids: if (binding_uids[0]['binding_uid'].manufacturer_id != self.uid.manufacturer_id): self.AddWarning( 'Binding UID manufacturer ID 0x%04hx does not equal device ' 'manufacturer ID of 0x%04hx' % ( binding_uids[0].manufacturer_id, self.uid.manufacturer_id)) class MuteDeviceWithData(ResponderTestFixture): """Mute device with param data.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'DISC_MUTE' def Test(self): # Section 6.3.4 of E1.20 self.AddExpectedResults([ TimeoutResult(), UnsupportedResult() ]) self.SendRawDiscovery(ROOT_DEVICE, self.pid, 'x') class UnMuteDevice(ResponderTestFixture): """UnMute device and verify response.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'DISC_UN_MUTE' PROVIDES = ['unmute_supported'] REQUIRES = ['mute_control_fields'] def Test(self): self.AddExpectedResults([ self.AckDiscoveryResult(), UnsupportedResult() ]) self.SendDiscovery(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): supported = (response.response_code != OlaClient.RDM_PLUGIN_DISCOVERY_NOT_SUPPORTED) self.SetProperty('unmute_supported', supported) if supported: if fields['control_field'] != self.Property('mute_control_fields'): self.AddWarning( "Mute / Unmute control fields don't match. 0x%hx != 0x%hx" % (self.Property('mute_control_fields'), fields['control_field'])) class UnMuteDeviceWithData(ResponderTestFixture): """UnMute device info with param data.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'DISC_UN_MUTE' def Test(self): # Section 6.3.4 of E1.20 self.AddExpectedResults([ TimeoutResult(), UnsupportedResult() ]) self.SendRawDiscovery(ROOT_DEVICE, self.pid, 'x') class RequestsWhileUnmuted(ResponderTestFixture): """Unmute the device, send a GET DEVICE_INFO request, mute device again.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'DISC_UN_MUTE' # this requires sub_device_count so that we know DEVICE_INFO is supported REQUIRES = ['mute_supported', 'unmute_supported', 'sub_device_count'] def Test(self): if not (self.Property('unmute_supported') and self.Property('mute_supported')): self.SetNotRun('Controller does not support mute / unmute commands') return self.AddExpectedResults(self.AckDiscoveryResult(action=self.GetDeviceInfo)) self.SendDiscovery(ROOT_DEVICE, self.pid) def GetDeviceInfo(self): device_info_pid = self.LookupPid('DEVICE_INFO') self.AddExpectedResults(AckGetResult(device_info_pid.value)) self.SendGet(ROOT_DEVICE, device_info_pid) def ResetState(self): # mute the device again mute_pid = self.LookupPid('DISC_MUTE') self.SendDiscovery(ROOT_DEVICE, mute_pid) self._wrapper.Run() # Invalid DISCOVERY_PIDs #------------------------------------------------------------------------------ class InvalidDiscoveryPID(ResponderTestFixture): """Send an invalid Discovery CC PID, see E1.20 6.3.4""" CATEGORY = TestCategory.ERROR_CONDITIONS # We need to mock out a PID here class MockPid(object): def __init__(self): self.value = 0x000f def ValidateAddressing(request_params, request_type): return True def __str__(self): return '0x%04hx' % self.value def Test(self): mock_pid = self.MockPid() self.AddExpectedResults([ TimeoutResult(), UnsupportedResult() ]) self.SendRawDiscovery(ROOT_DEVICE, mock_pid) # DUB Tests #------------------------------------------------------------------------------ class DUBFullTree(TestMixins.DiscoveryMixin, ResponderTestFixture): """Confirm the device responds within the entire DUB range.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PROVIDES = ['dub_supported'] def LowerBound(self): return UID(0, 0) def UpperBound(self): return UID.AllDevices() def DUBResponseCode(self, response_code): self.SetProperty( 'dub_supported', response_code != OlaClient.RDM_PLUGIN_DISCOVERY_NOT_SUPPORTED) class DUBManufacturerTree(TestMixins.DiscoveryMixin, ResponderTestFixture): """Confirm the device responds within it's manufacturer DUB range.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(self.uid.manufacturer_id, 0) def UpperBound(self): return UID.VendorcastAddress(self.uid.manufacturer_id) class DUBSingleUID(TestMixins.DiscoveryMixin, ResponderTestFixture): """Confirm the device responds to just it's own range.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return self.uid def UpperBound(self): return self.uid class DUBSingleLowerUID(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> - 1 to <UID> - 1.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID.PreviousUID(self.uid) def UpperBound(self): return UID.PreviousUID(self.uid) def ExpectResponse(self): return False class DUBSingleUpperUID(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> + 1 to <UID> + 1.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID.NextUID(self.uid) def UpperBound(self): return UID.NextUID(self.uid) def ExpectResponse(self): return False class DUBAffirmativeLowerBound(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> to ffff:ffffffff.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return self.uid def UpperBound(self): return UID.AllDevices() class DUBNegativeLowerBound(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> + 1 to ffff:ffffffff.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID.NextUID(self.uid) def UpperBound(self): return UID.AllDevices() def ExpectResponse(self): return False class DUBAffirmativeUpperBound(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from 0000:00000000 to <UID>.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(0, 0) def UpperBound(self): return self.uid class DUBNegativeUpperBound(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from 0000:00000000 to <UID> - 1.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(0, 0) def UpperBound(self): return UID.PreviousUID(self.uid) def ExpectResponse(self): return False class DUBDifferentManufacturer(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB with a different manufacturer's range.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(self.uid.manufacturer_id - 1, 0) def UpperBound(self): return UID(self.uid.manufacturer_id - 1, 0xffffffff) def ExpectResponse(self): return False class DUBSignedComparisons(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB to check UIDs aren't using signed values.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): # Section 5.1 of E1.20 limits the manufacturer ID range to 0 - 0x7fff so # this should be safe for all cases. return UID(0x8000, 0) def UpperBound(self): return UID.AllDevices() def ExpectResponse(self): return False class DUBNegativeVendorcast(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB to another manufacturer's vendorcast address.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(0, 0) def UpperBound(self): return UID.AllDevices() def ExpectResponse(self): return False def Target(self): return UID(self.uid.manufacturer_id - 1, 0xffffffff) class DUBPositiveVendorcast(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB to this manufacturer's vendorcast address.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(0, 0) def UpperBound(self): return UID.AllDevices() def Target(self): return UID(self.uid.manufacturer_id, 0xffffffff) class DUBPositiveUnicast(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB to the device's address.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID(0, 0) def UpperBound(self): return UID.AllDevices() def Target(self): return self.uid class DUBInvertedFullTree(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from ffff:ffffffff to 0000:00000000.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID.AllDevices() def UpperBound(self): return UID(0, 0) def ExpectResponse(self): return False class DUBInvertedRange(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> + 1 to <UID> - 1.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID.NextUID(self.uid) def UpperBound(self): return UID.PreviousUID(self.uid) def ExpectResponse(self): return False class DUBInvertedLowerUID(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> to <UID> - 1.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return self.uid def UpperBound(self): return UID.PreviousUID(self.uid) def ExpectResponse(self): return False class DUBInvertedUpperUID(TestMixins.DiscoveryMixin, ResponderTestFixture): """DUB from <UID> + 1 to <UID>.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT REQUIRES = ['dub_supported'] + TestMixins.DiscoveryMixin.REQUIRES def LowerBound(self): return UID.NextUID(self.uid) def UpperBound(self): return self.uid def ExpectResponse(self): return False # Device Info tests #------------------------------------------------------------------------------ class DeviceInfoTest(object): """The base device info test class.""" PID = 'DEVICE_INFO' FIELDS = ['device_model', 'product_category', 'software_version', 'dmx_footprint', 'current_personality', 'personality_count', 'dmx_start_address', 'sub_device_count', 'sensor_count'] FIELD_VALUES = { 'protocol_major': 1, 'protocol_minor': 0, } class GetDeviceInfo(ResponderTestFixture, DeviceInfoTest): """GET device info & verify.""" CATEGORY = TestCategory.CORE PROVIDES = [ 'current_personality', 'dmx_footprint', 'dmx_start_address', 'personality_count', 'sensor_count', 'software_version', 'sub_device_count', ] def Test(self): self.AddExpectedResults(self.AckGetResult( field_names=self.FIELDS, field_values=self.FIELD_VALUES)) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, unused_response, fields): """Check the footprint, personalities & sub devices.""" for property in self.PROVIDES: self.SetPropertyFromDict(fields, property) footprint = fields['dmx_footprint'] if footprint > MAX_DMX_ADDRESS: self.AddWarning('DMX Footprint of %d, was more than 512' % footprint) if footprint > 0: personality_count = fields['personality_count'] current_personality = fields['current_personality'] if personality_count == 0: self.AddAdvisory('DMX Footprint non 0, but no personalities listed') if current_personality == 0: self.AddWarning('Current personality should be >= 1, was %d' % current_personality) elif current_personality > personality_count: self.AddWarning('Current personality (%d) should be less than the ' 'personality count (%d)' % (current_personality, personality_count)) start_address = fields['dmx_start_address'] if (start_address == 0 or (start_address > 512 and start_address != 0xffff)): self.AddWarning('Invalid DMX address %d in DEVICE_INFO' % start_address) sub_devices = fields['sub_device_count'] if sub_devices > 512: self.AddWarning('Sub device count > 512, was %d' % sub_devices) class GetDeviceInfoWithData(ResponderTestFixture, DeviceInfoTest): """GET device info with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PROVIDES = ['supports_over_sized_pdl'] def Test(self): self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_FORMAT_ERROR), self.AckGetResult( field_names=self.FIELDS, field_values=self.FIELD_VALUES, warning='Get %s with data returned an ack' % self.pid.name) ]) self.SendRawGet(ROOT_DEVICE, self.pid, 'x') def VerifyResult(self, response, fields): self.SetProperty('supports_over_sized_pdl', True) class GetMaxPacketSize(ResponderTestFixture, DeviceInfoTest): """Check if the responder can handle a packet of the maximum size.""" CATEGORY = TestCategory.ERROR_CONDITIONS MAX_PDL = 231 PROVIDES = ['supports_max_sized_pdl'] def Test(self): self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_FORMAT_ERROR), self.NackGetResult(RDMNack.NR_PACKET_SIZE_UNSUPPORTED), self.AckGetResult(), # some crazy devices continue to ack InvalidResponse( advisory='Responder returned an invalid response to a command with ' 'PDL of %d' % self.MAX_PDL ), TimeoutResult( advisory='Responder timed out to a command with PDL of %d' % self.MAX_PDL), ]) self.SendRawGet(ROOT_DEVICE, self.pid, 'x' * self.MAX_PDL) def VerifyResult(self, response, fields): ok = response not in [OlaClient.RDM_INVALID_RESPONSE, OlaClient.RDM_TIMEOUT] self.SetProperty('supports_max_sized_pdl', ok) class DetermineMaxPacketSize(ResponderTestFixture, DeviceInfoTest): """Binary search the pdl length space to determine the max packet size.""" CATEGORY = TestCategory.ERROR_CONDITIONS REQUIRES = ['supports_over_sized_pdl', 'supports_max_sized_pdl'] def Test(self): if self.Property('supports_max_sized_pdl'): self.SetNotRun('Device supports full sized packet') return self._lower = 1 self._upper = GetMaxPacketSize.MAX_PDL self.SendGet() def SendGet(self): if self._lower + 1 == self._upper: self.AddWarning('Max PDL supported is < %d, was %d' % (GetMaxPacketSize.MAX_PDL, self._lower)) self.Stop() return self._current = (self._lower + self._upper) / 2 self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_FORMAT_ERROR, action=self.GetPassed), self.AckGetResult(action=self.GetPassed), InvalidResponse(action=self.GetFailed), TimeoutResult(action=self.GetFailed), ]) self.SendRawGet(ROOT_DEVICE, self.pid, 'x' * self._current) def GetPassed(self): self._lower = self._current self.SendGet() def GetFailed(self): self._upper = self._current self.SendGet() class SetDeviceInfo(ResponderTestFixture, DeviceInfoTest): """SET device info.""" CATEGORY = TestCategory.ERROR_CONDITIONS def Test(self): self.AddExpectedResults(TestMixins.UnsupportedSetNacks(self.pid)) self.SendRawSet(ROOT_DEVICE, self.pid) class AllSubDevicesGetDeviceInfo(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get Device Info to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEVICE_INFO' # Supported Parameters Tests & Mixin #------------------------------------------------------------------------------ class GetSupportedParameters(ResponderTestFixture): """GET supported parameters.""" CATEGORY = TestCategory.CORE PID = 'SUPPORTED_PARAMETERS' PROVIDES = ['manufacturer_parameters', 'supported_parameters', 'acks_supported_parameters'] # declaring support for any of these is a warning: MANDATORY_PIDS = ['SUPPORTED_PARAMETERS', 'PARAMETER_DESCRIPTION', 'DEVICE_INFO', 'SOFTWARE_VERSION_LABEL', 'DMX_START_ADDRESS', 'IDENTIFY_DEVICE'] # Banned PIDs, these are pid values that can not appear in the list of # supported parameters (these are used for discovery) BANNED_PID_VALUES = [1, 2, 3] # If responders support any of the pids in these groups, the should really # support all of them. PID_GROUPS = [ ('PROXIED_DEVICE_COUNT', 'PROXIED_DEVICES'), ('LANGUAGE_CAPABILITIES', 'LANGUAGE'), ('DMX_PERSONALITY', 'DMX_PERSONALITY_DESCRIPTION'), ('SENSOR_DEFINITION', 'SENSOR_VALUE'), ('SELF_TEST_DESCRIPTION', 'PERFORM_SELFTEST'), ] # If the first pid in each group is supported, the remainer of the group # must be. PID_DEPENDENCIES = [ ('RECORD_SENSORS', 'SENSOR_VALUE'), ('DEFAULT_SLOT_VALUE', 'SLOT_DESCRIPTION'), ('CURVE', 'CURVE_DESCRIPTION'), ('OUTPUT_RESPONSE_TIME', 'OUTPUT_RESPONSE_TIME_DESCRIPTION'), ('MODULATION_FREQUENCY', 'MODULATION_FREQUENCY_DESCRIPTION'), ('LOCK_STATE', 'LOCK_STATE_DESCRIPTION'), ] def Test(self): self.AddExpectedResults([ # TODO(simon): We should cross check this against support for anything # more than the required set of parameters at the end of all tests. self.NackGetResult(RDMNack.NR_UNKNOWN_PID), self.AckGetResult(), ]) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): self.SetProperty('manufacturer_parameters', []) self.SetProperty('supported_parameters', []) self.SetProperty('acks_supported_parameters', False) return self.SetProperty('acks_supported_parameters', True) mandatory_pids = {} for p in self.MANDATORY_PIDS: pid = self.LookupPid(p) mandatory_pids[pid.value] = pid supported_parameters = [] manufacturer_parameters = [] count_by_pid = {} for item in fields['params']: param_id = item['param_id'] count_by_pid[param_id] = count_by_pid.get(param_id, 0) + 1 if param_id in self.BANNED_PID_VALUES: self.AddWarning('%d listed in supported parameters' % param_id) continue if param_id in mandatory_pids: self.AddAdvisory('%s listed in supported parameters' % mandatory_pids[param_id].name) continue supported_parameters.append(param_id) if param_id >= 0x8000 and param_id < 0xffe0: manufacturer_parameters.append(param_id) pid_store = PidStore.GetStore() # check for duplicate pids for pid, count in count_by_pid.iteritems(): if count > 1: pid_obj = self.LookupPidValue(pid) if pid_obj: self.AddAdvisory('%s listed %d times in supported parameters' % (pid_obj, count)) else: self.AddAdvisory('PID 0x%hx listed %d times in supported parameters' % (pid, count)) self.SetProperty('manufacturer_parameters', manufacturer_parameters) self.SetProperty('supported_parameters', supported_parameters) for pid_names in self.PID_GROUPS: supported_pids = [] unsupported_pids = [] for pid_name in pid_names: pid = self.LookupPid(pid_name) if pid.value in supported_parameters: supported_pids.append(pid_name) else: unsupported_pids.append(pid_name) if supported_pids and unsupported_pids: self.AddAdvisory( '%s supported but %s is not' % (','.join(supported_pids), ','.join(unsupported_pids))) for pid_names in self.PID_DEPENDENCIES: if self.LookupPid(pid_names[0]).value not in supported_parameters: continue unsupported_pids = [] for pid_name in pid_names[1:]: pid = self.LookupPid(pid_name) if pid is None: self.SetBroken('Missing PID %s' % pid_name) return if pid.value not in supported_parameters: unsupported_pids.append(pid_name) if unsupported_pids: self.AddAdvisory('%s supported but %s is not' % (pid_names[0], ','.join(unsupported_pids))) class GetSupportedParametersWithData(ResponderTestFixture): """GET supported parameters with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SUPPORTED_PARAMETERS' REQUIRES = ['acks_supported_parameters'] def Test(self): if self.Property('acks_supported_parameters'): self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_FORMAT_ERROR), self.AckGetResult( warning='Get %s with data returned an ack' % self.pid.name) ]) else: self.AddExpectedResults(self.NackGetResult(RDMNack.NR_UNKNOWN_PID)) self.SendRawGet(ROOT_DEVICE, self.pid, 'foo') class SetSupportedParameters(ResponderTestFixture): """Attempt to SET supported parameters.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SUPPORTED_PARAMETERS' def Test(self): self.AddExpectedResults(TestMixins.UnsupportedSetNacks(self.pid)) self.SendRawSet(ROOT_DEVICE, self.pid) class AllSubDevicesGetSupportedParameters(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SUPPORTED_PARAMETERS to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SUPPORTED_PARAMETERS' class GetSubDeviceSupportedParameters(ResponderTestFixture): """Check that SUPPORTED_PARAMETERS is consistent across sub devices.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SUPPORTED_PARAMETERS' REQUIRES = ['sub_device_addresses'] PROVIDES = ['sub_device_supported_parameters'] # E1.37, 2.1 Sub devices are required to support these. MANDATORY_PIDS = ['SUPPORTED_PARAMETERS', 'DEVICE_INFO', 'SOFTWARE_VERSION_LABEL', 'IDENTIFY_DEVICE'] def Test(self): self._sub_devices = self.Property('sub_device_addresses').keys() self._sub_devices.reverse() self._params = {} self._GetSupportedParams(); def _GetSupportedParams(self): if not self._sub_devices: self._CheckForConsistency() self.Stop() return self.AddExpectedResults(self.AckGetResult(action=self._GetSupportedParams)) self.SendGet(self._sub_devices[-1], self.pid) def VerifyResult(self, response, fields): sub_device = self._sub_devices.pop() supported_params = set() for p in fields['params']: supported_params.add(p['param_id']) self._params[sub_device] = supported_params def _CheckForConsistency(self): if not self._params: return supported_pids = set() for pids in self._params.itervalues(): if not supported_pids: supported_pids = pids elif supported_pids != pids: self.SetFailed('SUPPORTED_PARAMETERS for sub-devices do not match') return mandatory_pids = set(self.LookupPid(p).value for p in self.MANDATORY_PIDS) missing_pids = mandatory_pids - supported_pids if missing_pids: self.SetFailed("Missing PIDs %s from sub device's supported pid list" % ', '.join('0x%04hx' % p for p in missing_pids)); return self.SetProperty('sub_device_supported_parameters', supported_pids) # Sub Devices Test #------------------------------------------------------------------------------ class FindSubDevices(ResponderTestFixture): """Locate the sub devices by sending DEVICE_INFO messages.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEVICE_INFO' PROVIDES = ['sub_device_addresses', 'sub_device_footprints'] REQUIRES = ['sub_device_count'] def Test(self): self._device_count = self.Property('sub_device_count') self._sub_device_addresses = {} # index to start address mapping self._sub_device_footprints = {} # index to footprint mapping self._current_index = 0 # the current sub device we're trying to query self._CheckForSubDevice() def _CheckForSubDevice(self): # For each supported param message we should either see a sub device out of # range or an ack if len(self._sub_device_addresses) == self._device_count: if self._device_count == 0: self.SetNotRun('No sub devices declared') self.SetProperty('sub_device_addresses', self._sub_device_addresses) self.SetProperty('sub_device_footprints', self._sub_device_footprints) self.Stop() return if self._current_index >= PidStore.MAX_VALID_SUB_DEVICE: self.SetFailed('Only found %d of %d sub devices' % (len(self._sub_devices), self._device_count)) self.Stop() return self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_SUB_DEVICE_OUT_OF_RANGE, action=self._CheckForSubDevice), self.AckGetResult(action=self._CheckForSubDevice) ]) self._current_index += 1 self.SendGet(self._current_index, self.pid) def VerifyResult(self, response, fields): if response.WasAcked(): if fields['sub_device_count'] != self._device_count: self.SetFailed( 'For sub-device %d, DEVICE_INFO reported %d sub devices ' ' but the root device reported %s. See section 10.5' % (self._current_index, fields['sub_device_count'], self._device_count)) self.Stop() self._sub_device_addresses[self._current_index] = ( fields['dmx_start_address']) self._sub_device_footprints[self._current_index] = fields['dmx_footprint'] # Clear Status ID #------------------------------------------------------------------------------ class GetClearStatusMessages(TestMixins.UnsupportedGetMixin, OptionalParameterTestFixture): """GET clear status id.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CLEAR_STATUS_ID' class ClearStatusMessagesWithData(TestMixins.SetWithDataMixin, OptionalParameterTestFixture): """Clear the status message queue with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CLEAR_STATUS_ID' class ClearStatusMessages(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Clear the status message queue.""" CATEGORY = TestCategory.STATUS_COLLECTION PID = 'CLEAR_STATUS_ID' def Test(self): # I don't believe there is a reliable way to check that the queue is # cleared. Note that this pid should only clear status messages, not # responses to ACK_TIMERS so we can't check if the message count is 0. self.AddIfSetSupported(self.AckSetResult()) self.SendSet(ROOT_DEVICE, self.pid, []) # Parameter Description #------------------------------------------------------------------------------ class GetParamDescription(ResponderTestFixture): """Check that GET parameter description works for any manufacturer params.""" CATEGORY = TestCategory.RDM_INFORMATION PID = 'PARAMETER_DESCRIPTION' REQUIRES = ['manufacturer_parameters'] def Test(self): self.params = self.Property('manufacturer_parameters')[:] if len(self.params) == 0: self.SetNotRun('No manufacturer params found') return self._GetParam() def _GetParam(self): if len(self.params) == 0: self.Stop() return self.AddExpectedResults( self.AckGetResult(action=self._GetParam)) self.current_param = self.params.pop() self.SendGet(ROOT_DEVICE, self.pid, [self.current_param]) def VerifyResult(self, response, fields): if not response.WasAcked(): return if self.current_param != fields['pid']: self.SetFailed('Request for pid 0x%hx returned pid 0x%hx' % (self.current_param, fields['pid'])) if fields['type'] != 0: self.AddWarning('type field in parameter description is not 0, was %d' % fields['type']) if fields['command_class'] > 3: self.AddWarning( 'command class field in parameter description should be 1, 2 or 3, ' 'was %d' % fields['command_class']) class GetParamDescriptionForNonManufacturerPid(ResponderTestFixture): """GET parameter description for a non-manufacturer pid.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PARAMETER_DESCRIPTION' REQUIRES = ['manufacturer_parameters'] def Test(self): device_info_pid = self.LookupPid('DEVICE_INFO') results = [ self.NackGetResult(RDMNack.NR_UNKNOWN_PID), self.NackGetResult( RDMNack.NR_DATA_OUT_OF_RANGE, advisory='Parameter Description appears to be supported but no' 'manufacturer pids were declared'), ] if self.Property('manufacturer_parameters'): results = self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE) self.AddExpectedResults(results) self.SendGet(ROOT_DEVICE, self.pid, [device_info_pid.value]) class GetParamDescriptionWithData(ResponderTestFixture): """GET parameter description with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PARAMETER_DESCRIPTION' REQUIRES = ['manufacturer_parameters'] def Test(self): results = [ self.NackGetResult(RDMNack.NR_UNKNOWN_PID), self.NackGetResult(RDMNack.NR_FORMAT_ERROR, advisory='Parameter Description appears to be ' 'supported but no manufacturer pids were ' 'declared'), ] if self.Property('manufacturer_parameters'): results = self.NackGetResult(RDMNack.NR_FORMAT_ERROR) self.AddExpectedResults(results) self.SendRawGet(ROOT_DEVICE, self.pid, 'foo') class SetParamDescription(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """SET the parameter description.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PARAMETER_DESCRIPTION' class AllSubDevicesGetParamDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PARAMETER_DESCRIPTION to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PARAMETER_DESCRIPTION' DATA = [0x8000] # Proxied Device Count #------------------------------------------------------------------------------ class GetProxiedDeviceCount(OptionalParameterTestFixture): """GET the proxied device count.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'PROXIED_DEVICE_COUNT' REQUIRES = ['proxied_devices'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, unpacked_data): if not response.WasAcked(): return proxied_devices = self.Property('proxied_devices') if proxied_devices is None: self.AddWarning( 'PROXIED_DEVICE_COUNT ack\'ed but PROXIED_DEVICES didn\'t') return if not unpacked_data['list_changed']: # we expect the count to match the length of the list previously returned if unpacked_data['device_count'] != len(proxied_devices): self.SetFailed( 'Proxied device count doesn\'t match number of devices returned') class GetProxiedDeviceCountWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the proxied device count with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PROXIED_DEVICE_COUNT' class SetProxiedDeviceCount(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """SET the count of proxied devices.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PROXIED_DEVICE_COUNT' class AllSubDevicesGetProxiedDeviceCount(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PROXIED_DEVICE_COUNT to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PROXIED_DEVICE_COUNT' # Proxied Devices #------------------------------------------------------------------------------ class GetProxiedDevices(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the list of proxied devices.""" CATEGORY = TestCategory.NETWORK_MANAGEMENT PID = 'PROXIED_DEVICES' EXPECTED_FIELD = 'uids' PROVIDES = ['proxied_devices'] class GetProxiedDevicesWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the list of proxied devices with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PROXIED_DEVICES' class SetProxiedDevices(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """SET the list of proxied devices.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PROXIED_DEVICES' class AllSubDevicesGetProxiedDevices(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PROXIED_DEVICES to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PROXIED_DEVICES' # Comms Status #------------------------------------------------------------------------------ class GetCommsStatus(OptionalParameterTestFixture): """GET the comms status.""" CATEGORY = TestCategory.STATUS_COLLECTION PID = 'COMMS_STATUS' def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) class GetCommsStatusWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the comms status with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'COMMS_STATUS' class ClearCommsStatus(OptionalParameterTestFixture): """Clear the comms status.""" CATEGORY = TestCategory.STATUS_COLLECTION PID = 'COMMS_STATUS' def Test(self): self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid) def VerifySet(self): self.AddIfGetSupported( self.AckGetResult(field_values={ 'short_message': 0, 'length_mismatch': 0, 'checksum_fail': 0 })) self.SendGet(ROOT_DEVICE, self.pid) class ClearCommsStatusWithData(TestMixins.SetWithDataMixin, OptionalParameterTestFixture): """Clear the comms status with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'COMMS_STATUS' class AllSubDevicesGetClearCommsStatus(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get COMMS_STATUS to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'COMMS_STATUS' # Product Detail Id List #------------------------------------------------------------------------------ class GetProductDetailIdList(OptionalParameterTestFixture): """GET the list of product detail ids.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'PRODUCT_DETAIL_ID_LIST' def Test(self): self.AddIfGetSupported(self.AckGetResult(field_names=['detail_ids'])) self.SendGet(ROOT_DEVICE, self.pid) class GetProductDetailIdListWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET product detail id list with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRODUCT_DETAIL_ID_LIST' class SetProductDetailIdList(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """SET product detail id list.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRODUCT_DETAIL_ID_LIST' class AllSubDevicesGetProductDetailIdList(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PRODUCT_DETAIL_ID_LIST to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PRODUCT_DETAIL_ID_LIST' # Device Model Description #------------------------------------------------------------------------------ class GetDeviceModelDescription(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the device model description.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_MODEL_DESCRIPTION' EXPECTED_FIELD = 'description' PROVIDES = ['model_description'] class GetDeviceModelDescriptionWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get device model description with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_MODEL_DESCRIPTION' class SetDeviceModelDescription(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """Attempt to SET the device model description with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_MODEL_DESCRIPTION' class SetDeviceModelDescriptionWithData(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """SET the device model description with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_MODEL_DESCRIPTION' DATA = 'FOO BAR' class AllSubDevicesGetModelDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DEVICE_MODEL_DESCRIPTION to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEVICE_MODEL_DESCRIPTION' # Manufacturer Label #------------------------------------------------------------------------------ class GetManufacturerLabel(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the manufacturer label.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'MANUFACTURER_LABEL' EXPECTED_FIELD = 'label' PROVIDES = ['manufacturer_label'] class GetManufacturerLabelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get manufacturer label with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MANUFACTURER_LABEL' class SetManufacturerLabel(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """Attempt to SET the manufacturer label with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MANUFACTURER_LABEL' class SetManufacturerLabelWithData(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """SET the manufacturer label with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MANUFACTURER_LABEL' DATA = 'FOO BAR' class AllSubDevicesGetManufacturerLabel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get MANUFACTURER_LABEL to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'MANUFACTURER_LABEL' # Device Label #------------------------------------------------------------------------------ class GetDeviceLabel(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the device label.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' PROVIDES = ['device_label'] EXPECTED_FIELD = 'label' class GetDeviceLabelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the device label with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_LABEL' class SetDeviceLabel(TestMixins.SetLabelMixin, OptionalParameterTestFixture): """SET the device label.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' REQUIRES = ['device_label'] PROVIDES = ['set_device_label_supported'] def OldValue(self): return self.Property('device_label') class AllSubDevicesGetDeviceLabel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DEVICE_LABEL to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEVICE_LABEL' class SetVendorcastDeviceLabel(TestMixins.NonUnicastSetLabelMixin, OptionalParameterTestFixture): """SET the device label using the vendorcast address.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' REQUIRES = ['device_label', 'set_device_label_supported'] TEST_LABEL = 'vendorcast label' def Uid(self): return UID.VendorcastAddress(self._uid.manufacturer_id) def OldValue(self): return self.Property('device_label') class SetBroadcastDeviceLabel(TestMixins.NonUnicastSetLabelMixin, OptionalParameterTestFixture): """SET the device label using the broadcast address.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' REQUIRES = ['device_label', 'set_device_label_supported'] TEST_LABEL = 'broadcast label' def Uid(self): return UID.AllDevices() def OldValue(self): return self.Property('device_label') class SetFullSizeDeviceLabel(TestMixins.SetLabelMixin, OptionalParameterTestFixture): """SET the device label.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' REQUIRES = ['device_label'] TEST_LABEL = 'this is a string with 32 charact' def OldValue(self): return self.Property('device_label') class SetNonAsciiDeviceLabel(TestMixins.SetLabelMixin, OptionalParameterTestFixture): """SET the device label to something that contains non-ascii data.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' REQUIRES = ['device_label'] TEST_LABEL = 'string with\x0d non ascii\xc0' def ExpectedResults(self): return [ self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE), self.NackSetResult(RDMNack.NR_FORMAT_ERROR), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), self.AckSetResult(action=self.VerifySet) ] def OldValue(self): return self.Property('device_label') class SetEmptyDeviceLabel(TestMixins.SetLabelMixin, OptionalParameterTestFixture): """SET the device label with no data.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'DEVICE_LABEL' REQUIRES = ['device_label'] TEST_LABEL = '' def OldValue(self): return self.Property('device_label') class SetOversizedDeviceLabel(TestMixins.SetOversizedLabelMixin, OptionalParameterTestFixture): """SET the device label with more than 32 bytes of data.""" CATEGORY = TestCategory.ERROR_CONDITIONS REQUIRES = ['device_label'] PID = 'DEVICE_LABEL' def OldValue(self): return self.Property('device_label') # Language Capabilities #------------------------------------------------------------------------------ class GetLanguageCapabilities(OptionalParameterTestFixture): """GET the language capabilities pid.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'LANGUAGE_CAPABILITIES' PROVIDES = ['languages_capabilities'] def Test(self): self.languages = [] self.AddIfGetSupported(self.AckGetResult(field_names=['languages'])) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): self.SetProperty('languages_capabilities', []) return self.languages = [f['language'] for f in fields['languages']] if len(self.languages) == 0: self.AddWarning('No languages returned for LANGUAGE_CAPABILITIES') language_set = set() for language in self.languages: if language in language_set: self.AddAdvisory('%s listed twice in language capabilities' % language) language_set.add(language) self.SetProperty('languages_capabilities', language_set) class GetLanguageCapabilitiesWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the language capabilities pid with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LANGUAGE_CAPABILITIES' class AllSubDevicesGetLanguageCapablities(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get LANGUAGE_CAPABILITIES to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LANGUAGE_CAPABILITIES' # Language #------------------------------------------------------------------------------ class GetLanguage(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the language.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'LANGUAGE' PROVIDES = ['language'] EXPECTED_FIELD = 'language' class SetLanguage(OptionalParameterTestFixture): """SET the language.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'LANGUAGE' REQUIRES = ['language', 'languages_capabilities'] def Test(self): ack = self.AckSetResult(action=self.VerifySet) nack = self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS) available_langugages = list(self.Property('languages_capabilities')) if available_langugages: if len(available_langugages) > 1: # if the responder only supports 1 lang, we may not be able to set it self.AddIfSetSupported(ack) self.new_language = available_langugages[0] if self.new_language == self.Property('language'): self.new_language = available_langugages[1] else: self.new_language = available_langugages[0] self.AddIfSetSupported([ack, nack]) else: # Get languages returned no languages so we expect a nack self.AddIfSetSupported(nack) self.new_language = 'en' self.SendSet(ROOT_DEVICE, self.pid, [self.new_language]) def VerifySet(self): self.AddExpectedResults( self.AckGetResult(field_values={'language': self.new_language})) self.SendGet(ROOT_DEVICE, self.pid) class SetNonAsciiLanguage(OptionalParameterTestFixture): """Try to set the language to non-ascii characters.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'LANGUAGE' def Test(self): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendSet(ROOT_DEVICE, self.pid, ['\x0d\xc0']) class SetUnsupportedLanguage(OptionalParameterTestFixture): """Try to set a language that doesn't exist in Language Capabilities.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LANGUAGE' REQUIRES = ['languages_capabilities'] def Test(self): if 'zz' in self.Property('languages_capabilities'): self.SetBroken('zz exists in the list of available languages') self.Stop() return self.AddIfSetSupported([ self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE), ]) self.SendSet(ROOT_DEVICE, self.pid, ['zz']) class AllSubDevicesGetLanguage(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get LANGUAGE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LANGUAGE' # Software Version Label #------------------------------------------------------------------------------ class GetSoftwareVersionLabel(TestMixins.GetRequiredMixin, ResponderTestFixture): """GET the software version label.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'SOFTWARE_VERSION_LABEL' EXPECTED_FIELD = 'label' class GetSoftwareVersionLabelWithData(ResponderTestFixture): """GET the software_version_label with param data.""" # We don't use the GetLabelMixin here because this PID is mandatory CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SOFTWARE_VERSION_LABEL' def Test(self): self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_FORMAT_ERROR), self.AckGetResult( warning='Get %s with data returned an ack' % self.pid.name) ]) self.SendRawGet(ROOT_DEVICE, self.pid, 'foo') class SetSoftwareVersionLabel(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """Attempt to SET the software version label.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SOFTWARE_VERSION_LABEL' class AllSubDevicesGetSoftwareVersionLabel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SOFTWARE_VERSION_LABEL to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SOFTWARE_VERSION_LABEL' class GetSubDeviceSoftwareVersionLabel(ResponderTestFixture): """Check that SOFTWARE_VERSION_LABEL is supported on all sub devices.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SOFTWARE_VERSION_LABEL' REQUIRES = ['sub_device_addresses'] def Test(self): self._sub_devices = self.Property('sub_device_addresses').keys() self._sub_devices.reverse() self._GetSoftwareVersion(); def _GetSoftwareVersion(self): if not self._sub_devices: self.Stop() return self.AddExpectedResults(self.AckGetResult(action=self._GetSoftwareVersion)) sub_device = self._sub_devices.pop() self.SendGet(sub_device, self.pid) # Boot Software Version #------------------------------------------------------------------------------ class GetBootSoftwareVersion(OptionalParameterTestFixture): """GET the boot software version.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'BOOT_SOFTWARE_VERSION_ID' def Test(self): self.AddIfGetSupported(self.AckGetResult(field_names=['version'])) self.SendGet(ROOT_DEVICE, self.pid) class GetBootSoftwareVersionWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the boot software version with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'BOOT_SOFTWARE_VERSION_ID' class SetBootSoftwareVersion(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """Attempt to SET the boot software version.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'BOOT_SOFTWARE_VERSION_ID' class AllSubDevicesGetBootSoftwareVersion(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get BOOT_SOFTWARE_VERSION_ID to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'BOOT_SOFTWARE_VERSION_ID' # Boot Software Version Label #------------------------------------------------------------------------------ class GetBootSoftwareLabel(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the boot software label.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'BOOT_SOFTWARE_VERSION_LABEL' EXPECTED_FIELD = 'label' class GetBootSoftwareLabelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the boot software label with param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'BOOT_SOFTWARE_VERSION_LABEL' class SetBootSoftwareLabel(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """SET the boot software label.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'BOOT_SOFTWARE_VERSION_LABEL' class AllSubDevicesGetBootSoftwareVersionLabel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get BOOT_SOFTWARE_VERSION_LABEL to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'BOOT_SOFTWARE_VERSION_LABEL' # DMX Personality & DMX Personality Description #------------------------------------------------------------------------------ class GetZeroPersonalityDescription(TestMixins.GetZeroByteMixin, OptionalParameterTestFixture): """GET DMX_PERSONALITY_DESCRIPTION for personality 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_PERSONALITY_DESCRIPTION' class GetOutOfRangePersonalityDescription(OptionalParameterTestFixture): """GET the personality description for the N + 1 personality.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_PERSONALITY_DESCRIPTION' REQUIRES = ['personality_count'] def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) personality_count = self.Property('personality_count') self.SendGet(ROOT_DEVICE, self.pid, [personality_count + 1]) class AllSubDevicesGetPersonalityDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DMX_PERSONALITY_DESCRIPTION to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DMX_PERSONALITY_DESCRIPTION' DATA = [1] class GetPersonalityDescription(OptionalParameterTestFixture): """GET the personality description for the current personality.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_PERSONALITY_DESCRIPTION' REQUIRES = ['current_personality', 'dmx_footprint', 'personality_count'] def Test(self): personality_count = self.Property('personality_count') current_personality = self.Property('current_personality') if current_personality == 0 and personality_count > 0: # It's probably off by one, so fix it current_personality = 1 if personality_count > 0: # cross check against what we got from device info self.AddIfGetSupported(self.AckGetResult(field_values={ 'personality': current_personality, 'slots_required': self.Property('dmx_footprint'), })) self.SendGet(ROOT_DEVICE, self.pid, [current_personality]) else: self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendGet(ROOT_DEVICE, self.pid, [1]) class GetPersonality(OptionalParameterTestFixture): """Get the current personality settings.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_PERSONALITY' REQUIRES = ['current_personality', 'personality_count'] def Test(self): self.AddIfGetSupported(self.AckGetResult( field_names=['current_personality', 'personality_count'])) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): return current_personality = self.Property('current_personality') personality_count = self.Property('personality_count') warning_str = ("Personality information in device info doesn't match" ' that in dmx_personality') if current_personality != fields['current_personality']: self.SetFailed('%s: current_personality %d != %d' % ( warning_str, current_personality, fields['current_personality'])) if personality_count != fields['personality_count']: self.SetFailed('%s: personality_count %d != %d' % ( warning_str, personality_count, fields['personality_count'])) class GetPersonalityDescriptions(OptionalParameterTestFixture): """Get information about all the personalities.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_PERSONALITY_DESCRIPTION' REQUIRES = ['personality_count'] PROVIDES = ['personalities'] def Test(self): self._personalities = [] self._personality_count = self.Property('personality_count') self._current_index = 0 self._GetPersonality() def _GetPersonality(self): self._current_index += 1 if self._current_index > self._personality_count: if self._personality_count == 0: self.SetNotRun('No personalities declared') self.SetProperty('personalities', self._personalities) self.Stop() return if self._current_index >= MAX_PERSONALITY_NUMBER: # This should never happen because personality_count is a uint8 self.SetFailed('Could not find all personalities') self.Stop() return self.AddIfGetSupported(self.AckGetResult( field_names=['slots_required', 'name'], field_values={'personality': self._current_index}, action=self._GetPersonality)) self.SendGet(ROOT_DEVICE, self.pid, [self._current_index]) def VerifyResult(self, response, fields): """Save the personality for other tests to use.""" if response.WasAcked(): self._personalities.append(fields) class SetPersonality(OptionalParameterTestFixture): """Set the personality.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_PERSONALITY' REQUIRES = ['current_personality', 'personalities'] def Test(self): self._personalities = list(self.Property('personalities')) self._consumes_slots = False for personality in self._personalities: if personality['slots_required'] > 0: self._consumes_slots = True break if len(self._personalities) > 0: self._CheckPersonality() return # check we get a NR_UNKNOWN_PID self.AddExpectedResults(self.NackSetResult(RDMNack.NR_UNKNOWN_PID)) self.new_personality = {'personality': 1} # can use anything here really self.SendSet(ROOT_DEVICE, self.pid, [1]) def _CheckPersonality(self): if not self._personalities: # end of the list, we're done self.Stop() return self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid, [self._personalities[0]['personality']]) def VerifySet(self): self.AddIfGetSupported( self.AckGetResult( field_values={ 'current_personality': self._personalities[0]['personality'], }, action=self.VerifyDeviceInfo)) self.SendGet(ROOT_DEVICE, self.pid) def VerifyDeviceInfo(self): device_info_pid = self.LookupPid('DEVICE_INFO') next_action = self.NextPersonality if self._personalities[0]['slots_required'] == 0: # if this personality has a footprint of 0, verify the start address is # 0xffff next_action = self.VerifyFootprint0StartAddress self.AddExpectedResults( AckGetResult( device_info_pid.value, field_values={ 'current_personality': self._personalities[0]['personality'], 'dmx_footprint': self._personalities[0]['slots_required'], }, action=next_action)) self.SendGet(ROOT_DEVICE, device_info_pid) def VerifyFootprint0StartAddress(self): address_pid = self.LookupPid('DMX_START_ADDRESS') expected_results = [ AckGetResult( address_pid.value, field_values={'dmx_address': 0xffff}, action=self.NextPersonality), ] if not self._consumes_slots: expected_results.append( NackGetResult(address_pid.value, RDMNack.NR_UNKNOWN_PID, action=self.NextPersonality) ) self.AddExpectedResults(expected_results) self.SendGet(ROOT_DEVICE, address_pid) def NextPersonality(self): self._personalities = self._personalities[1:] self._CheckPersonality() def ResetState(self): # reset back to the old value personality = self.Property('current_personality') if personality == 0 or personality > 255: return self.SendSet(ROOT_DEVICE, self.pid, [self.Property('current_personality')]) self._wrapper.Run() class SetZeroPersonality(TestMixins.SetZeroByteMixin, OptionalParameterTestFixture): """Set DMX_PERSONALITY for personality 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_PERSONALITY' class SetOutOfRangePersonality(TestMixins.SetOutOfRangeByteMixin, OptionalParameterTestFixture): """Set DMX_PERSONALITY to an out-of-range value.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_PERSONALITY' REQUIRES = ['personality_count'] LABEL = 'personalities' class SetOversizedPersonality(OptionalParameterTestFixture): """Send an over-sized SET personality command.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_PERSONALITY' def Test(self): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_FORMAT_ERROR)) self.SendRawSet(ROOT_DEVICE, self.pid, 'foo') class AllSubDevicesGetPersonality(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DMX_PERSONALITY to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DMX_PERSONALITY' # DMX Start Address tests #------------------------------------------------------------------------------ class GetStartAddress(ResponderTestFixture): """GET the DMX start address.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_START_ADDRESS' REQUIRES = ['dmx_footprint', 'dmx_start_address'] PROVIDES = ['dmx_address'] def Test(self): if self.Property('dmx_footprint') > 0: results = self.AckGetResult(field_names=['dmx_address']) else: results = [ self.AckGetResult(field_values={'dmx_address': 0xffff}), self.NackGetResult(RDMNack.NR_UNKNOWN_PID), self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE), ] self.AddExpectedResults(results) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): self.SetProperty('dmx_address', None) return if self.Property('dmx_start_address') != fields['dmx_address']: self.SetFailed( 'DMX_START_ADDRESS (%d) doesn\'t match what was in DEVICE_INFO (%d)' % (self.Property('dmx_start_address'), fields['dmx_address'])) self.SetPropertyFromDict(fields, 'dmx_address') class SetStartAddress(TestMixins.SetStartAddressMixin, ResponderTestFixture): """Set the DMX start address.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_START_ADDRESS' REQUIRES = ['dmx_footprint', 'dmx_address'] PROVIDES = ['set_dmx_address_supported'] def Test(self): footprint = self.Property('dmx_footprint') current_address = self.Property('dmx_address') self.start_address = 1 if footprint == 0 or current_address == 0xffff: results = [ self.NackSetResult(RDMNack.NR_UNKNOWN_PID), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE) ] else: self.start_address = self.CalculateNewAddress(current_address, footprint) results = self.AckSetResult(action=self.VerifySet) self._test_state = self.SET self.AddExpectedResults(results) self.SendSet(ROOT_DEVICE, self.pid, [self.start_address]) def VerifyResult(self, response, fields): if self._test_state == self.SET: self.SetProperty(self.PROVIDES[0], response.WasAcked()) class SetVendorcastStartAddress(TestMixins.SetNonUnicastStartAddressMixin, ResponderTestFixture): """SET the dmx start address using the vendorcast address.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_START_ADDRESS' REQUIRES = ['dmx_footprint', 'dmx_address', 'set_dmx_address_supported'] def Uid(self): return UID.VendorcastAddress(self._uid.manufacturer_id) class SetBroadcastStartAddress(TestMixins.SetNonUnicastStartAddressMixin, ResponderTestFixture): """SET the dmx start address using the broadcast address.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_START_ADDRESS' REQUIRES = ['dmx_footprint', 'dmx_address', 'set_dmx_address_supported'] def Uid(self): return UID.AllDevices() class SetOutOfRangeStartAddress(ResponderTestFixture): """Check that the DMX address can't be set to > 512.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_START_ADDRESS' # we depend on dmx_address to make sure this runs after GetStartAddress DEPS = [GetStartAddress] REQUIRES = ['dmx_footprint'] def Test(self): if self.Property('dmx_footprint') > 0: self.AddExpectedResults(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) else: self.AddExpectedResults([self.NackSetResult(RDMNack.NR_UNKNOWN_PID), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE) ]) data = struct.pack('!H', MAX_DMX_ADDRESS + 1) self.SendRawSet(ROOT_DEVICE, self.pid, data) class SetZeroStartAddress(ResponderTestFixture): """Check the DMX address can't be set to 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_START_ADDRESS' # we depend on dmx_address to make sure this runs after GetStartAddress DEPS = [GetStartAddress] REQUIRES = ['dmx_footprint'] def Test(self): if self.Property('dmx_footprint') > 0: self.AddExpectedResults(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) else: self.AddExpectedResults([self.NackSetResult(RDMNack.NR_UNKNOWN_PID), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE) ]) data = struct.pack('!H', 0) self.SendRawSet(ROOT_DEVICE, self.pid, data) class SetOversizedStartAddress(ResponderTestFixture): """Send an over-sized SET dmx start address.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_START_ADDRESS' # we depend on dmx_address to make sure this runs after GetStartAddress DEPS = [GetStartAddress] REQUIRES = ['dmx_footprint'] def Test(self): if self.Property('dmx_footprint') > 0: self.AddExpectedResults(self.NackSetResult(RDMNack.NR_FORMAT_ERROR)) else: self.AddExpectedResults([ self.NackSetResult(RDMNack.NR_UNKNOWN_PID), self.NackSetResult(RDMNack.NR_FORMAT_ERROR), ]) self.SendRawSet(ROOT_DEVICE, self.pid, 'foo') class AllSubDevicesGetStartAddress(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DMX_START_ADDRESS to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DMX_START_ADDRESS' # Slot Info #------------------------------------------------------------------------------ class GetSlotInfo(OptionalParameterTestFixture): """Get SLOT_INFO.""" CATEGORY = TestCategory.DMX_SETUP PID = 'SLOT_INFO' PROVIDES = ['defined_slots', 'undefined_definition_slots', 'undefined_type_sec_slots'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): self.SetProperty('defined_slots', set()) self.SetProperty('undefined_definition_slots', []) self.SetProperty('undefined_type_sec_slots', []) return slots = [d['slot_offset'] for d in fields['slots']] self.SetProperty('defined_slots', set(slots)) undefined_definition_slots = [] undefined_type_sec_slots = [] for slot in fields['slots']: if slot['slot_type'] not in RDMConstants.SLOT_TYPE_TO_NAME: self.AddWarning('Unknown slot type %d for slot %d' % (slot['slot_type'], slot['slot_offset'])) if slot['slot_type'] == RDMConstants.SLOT_TYPES['ST_PRIMARY']: # slot_label_id must be valid if slot['slot_label_id'] not in RDMConstants.SLOT_DEFINITION_TO_NAME: self.AddWarning('Unknown slot id %d for slot %d' % (slot['slot_label_id'], slot['slot_offset'])) if (slot['slot_label_id'] == RDMConstants.SLOT_DEFINITIONS['SD_UNDEFINED']): undefined_definition_slots.append(slot['slot_offset']) else: # slot_label_id must reference a defined slot if slot['slot_label_id'] not in slots: self.AddWarning( 'Slot %d is of type secondary and references an unknown slot %d' % (slot['slot_offset'], slot['slot_label_id'])) if slot['slot_type'] == RDMConstants.SLOT_TYPES['ST_SEC_UNDEFINED']: undefined_type_sec_slots.append(slot['slot_offset']) self.SetProperty('undefined_definition_slots', undefined_definition_slots) self.SetProperty('undefined_type_sec_slots', undefined_type_sec_slots) class GetSlotInfoWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get SLOT_INFO with invalid data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SLOT_INFO' class SetSlotInfo(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """Set SLOT_INFO.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SLOT_INFO' class AllSubDevicesGetSlotInfo(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SLOT_INFO to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SLOT_INFO' # Slot Description #------------------------------------------------------------------------------ class GetSlotDescriptions(OptionalParameterTestFixture): """Get the slot descriptions for all defined slots.""" CATEGORY = TestCategory.DMX_SETUP PID = 'SLOT_DESCRIPTION' REQUIRES = ['dmx_footprint'] def Test(self): footprint = self.Property('dmx_footprint') if footprint == 0: self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendGet(ROOT_DEVICE, self.pid, [0]) else: self._slots = range(footprint) self._GetSlotDescription() def _GetSlotDescription(self): if not self._slots: self.Stop() return self.AddIfGetSupported([ self.AckGetResult(action=self._GetNextSlot), self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._GetNextSlot) ]) self.SendGet(ROOT_DEVICE, self.pid, [self._slots[0]]) def _GetNextSlot(self): self._slots.pop(0) self._GetSlotDescription() def VerifyResult(self, response, fields): if not response.WasAcked(): return if self._slots[0] != fields['slot_number']: self.AddWarning( 'Requested description for slot %d, message returned slot %d' % (self._slots[0], fields['slot_number'])) return class GetSlotDescriptionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get the slot description with no slot number specified.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SLOT_DESCRIPTION' class GetSlotDescriptionWithTooMuchData(OptionalParameterTestFixture): """Get the slot description with more than 2 bytes of data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SLOT_DESCRIPTION' def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_FORMAT_ERROR)) self.SendRawGet(ROOT_DEVICE, self.pid, 'foo') class GetUndefinedSlotDefinitionDescriptions(OptionalParameterTestFixture): """Get the slot description for all slots with undefined definition.""" CATEGORY = TestCategory.DMX_SETUP PID = 'SLOT_DESCRIPTION' REQUIRES = ['undefined_definition_slots'] def Test(self): self.undef_slots = self.Property('undefined_definition_slots')[:] if len(self.undef_slots) == 0: self.SetNotRun('No undefined definition slots found') return self._GetSlotDescription() def _GetSlotDescription(self): if len(self.undef_slots) == 0: self.Stop() return self.AddExpectedResults([ self.AckGetResult(action=self._GetSlotDescription), self.NackGetResult(RDMNack.NR_UNKNOWN_PID, action=self._GetSlotDescription), self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._GetSlotDescription) ]) self.current_slot = self.undef_slots.pop() self.SendGet(ROOT_DEVICE, self.pid, [self.current_slot]) def VerifyResult(self, response, fields): if not response.WasAcked(): if response.nack_reason == RDMNack.NR_UNKNOWN_PID: self.AddWarning( '%s not supported for slot %d with undefined ' 'definition' % (self.pid, self.current_slot)) if response.nack_reason == RDMNack.NR_DATA_OUT_OF_RANGE: self.AddWarning( 'Slot description for slot %d with undefined definition was missing' % (self.current_slot)) return if not fields['name']: self.AddWarning( 'Slot description for slot %d with undefined definition was blank' % (self.current_slot)) return class GetUndefinedSecondarySlotTypeDescriptions(OptionalParameterTestFixture): """Get the slot description for all secondary slots with an undefined type.""" CATEGORY = TestCategory.DMX_SETUP PID = 'SLOT_DESCRIPTION' REQUIRES = ['undefined_type_sec_slots'] def Test(self): self.undef_sec_slots = self.Property('undefined_type_sec_slots')[:] if len(self.undef_sec_slots) == 0: self.SetNotRun('No undefined type secondary slots found') return self._GetSlotDescription() def _GetSlotDescription(self): if len(self.undef_sec_slots) == 0: self.Stop() return self.AddExpectedResults([ self.AckGetResult(action=self._GetSlotDescription), self.NackGetResult(RDMNack.NR_UNKNOWN_PID, action=self._GetSlotDescription), self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._GetSlotDescription) ]) self.current_slot = self.undef_sec_slots.pop() self.SendGet(ROOT_DEVICE, self.pid, [self.current_slot]) def VerifyResult(self, response, fields): if not response.WasAcked(): if response.nack_reason == RDMNack.NR_UNKNOWN_PID: self.AddAdvisory( '%s not supported for secondary slot %d with undefined type' % (self.pid, self.current_slot)) if response.nack_reason == RDMNack.NR_DATA_OUT_OF_RANGE: self.AddAdvisory( 'Slot description for secondary slot %d with undefined type was ' 'missing' % (self.current_slot)) return if not fields['name']: self.AddAdvisory( 'Slot description for secondary slot %d with undefined type was ' 'blank' % (self.current_slot)) return class SetSlotDescription(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """Set SLOT_DESCRIPTION.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SLOT_DESCRIPTION' class AllSubDevicesGetSlotDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SLOT_DESCRIPTION to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SLOT_DESCRIPTION' DATA = [1] # Default Slot Value #------------------------------------------------------------------------------ class GetDefaultSlotValues(OptionalParameterTestFixture): """Get DEFAULT_SLOT_VALUE.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DEFAULT_SLOT_VALUE' REQUIRES = ['defined_slots'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): return defined_slots = self.Property('defined_slots') default_slots = set() for slot in fields['slot_values']: if slot['slot_offset'] not in defined_slots: self.AddAdvisory( "DEFAULT_SLOT_VALUE contained slot %d, which wasn't in SLOT_INFO" % slot['slot_offset']) default_slots.add(slot['slot_offset']) for slot_offset in defined_slots: if slot_offset not in default_slots: self.AddAdvisory( "SLOT_INFO contained slot %d, which wasn't in DEFAULT_SLOT_VALUE" % slot_offset) class GetDefaultSlotInfoWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get DEFAULT_SLOT_VALUE with invalid data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEFAULT_SLOT_VALUE' class SetDefaultSlotInfo(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """Set DEFAULT_SLOT_VALUE.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEFAULT_SLOT_VALUE' class AllSubDevicesGetDefaultSlotValue(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DEFAULT_SLOT_VALUE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEFAULT_SLOT_VALUE' DATA = [1] # Sensor Consistency Checks #------------------------------------------------------------------------------ class CheckSensorConsistency(ResponderTestFixture): """Check that sensor support is consistent.""" CATEGORY = TestCategory.SENSORS REQUIRES = ['sensor_count', 'sensor_recording_supported', 'supported_parameters'] def IsSupported(self, pid): return pid.value in self.Property('supported_parameters') def CheckConsistency(self, pid_name, check_for_support = True): pid = self.LookupPid(pid_name) if (check_for_support and (not self.IsSupported(pid)) and self.Property('sensor_count')) > 0: self.AddAdvisory('%s not supported but sensor count was > 0' % pid) if self.IsSupported(pid) and self.Property('sensor_count') == 0: self.AddAdvisory('%s supported but sensor count was 0' % pid) def Test(self): self.CheckConsistency('SENSOR_DEFINITION') self.CheckConsistency('SENSOR_VALUE') self.CheckConsistency('RECORD_SENSORS', self.Property('sensor_recording_supported')) self.SetPassed() self.Stop() # Sensor Definition #------------------------------------------------------------------------------ class GetSensorDefinition(OptionalParameterTestFixture): """Fetch all the sensor definitions.""" CATEGORY = TestCategory.SENSORS PID = 'SENSOR_DEFINITION' REQUIRES = ['sensor_count'] PROVIDES = ['sensor_definitions', 'sensor_recording_supported'] MAX_SENSOR_INDEX = 0xfe RECORDED_VALUE_MASK = 0x01 PREDICATE_DICT = { '==': operator.eq, '<': operator.lt, '>': operator.gt, } def Test(self): # default to false self._sensors = {} # stores the discovered sensors self._current_index = -1 # the current sensor we're trying to query self._sensor_holes = [] # indices of sensors that are missing self._CheckForSensor() def _MissingSensorWarning(self): max_sensor = max(self._sensors.keys()) missing_sensors = [i for i in self._sensor_holes if i < max_sensor] if missing_sensors: self.AddWarning('Sensors missing in positions %s' % missing_sensors) def _CheckForSensor(self): if self.PidSupported(): # If this pid is supported we attempt to locate all sensors if self._current_index == self.MAX_SENSOR_INDEX: if len(self._sensors) < self.Property('sensor_count'): self.AddWarning('Only found %d/%d sensors' % (len(self._sensors), self.Property('sensor_count'))) elif len(self._sensors) > self.Property('sensor_count'): self.AddWarning('Found too many %d/%d sensors' % (len(self._sensors), self.Property('sensor_count'))) self.SetProperty('sensor_definitions', self._sensors) supports_recording = False for sensor_def in self._sensors.itervalues(): supports_recording |= ( sensor_def['supports_recording'] & self.RECORDED_VALUE_MASK) self.SetProperty('sensor_recording_supported', supports_recording) self._MissingSensorWarning() self.Stop() return # For each message we should either see a NR_DATA_OUT_OF_RANGE or an ack self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._AddToHoles), self.AckGetResult(action=self._CheckForSensor) ]) else: # not supported, just check we get a NR_UNKNOWN_PID self.AddExpectedResults(self.NackGetResult(RDMNack.NR_UNKNOWN_PID)) self.SetProperty('sensor_definitions', {}) self._current_index += 1 self.SendGet(ROOT_DEVICE, self.pid, [self._current_index]) def _AddToHoles(self): self._sensor_holes.append(self._current_index) self._CheckForSensor() def VerifyResult(self, response, fields): if not response.WasAcked(): return sensor_number = fields['sensor_number'] if self._current_index != sensor_number: self.AddWarning( 'Requested sensor %d, message returned sensor %d' % (self._current_index, fields['sensor_number'])) return self._sensors[self._current_index] = fields # perform sanity checks on the sensor infomation if (fields['type'] not in RDMConstants.SENSOR_TYPE_TO_NAME and fields['type'] < 0x80): self.AddWarning('Unknown type %d for sensor %d' % (fields['type'], sensor_number)) if fields['unit'] not in RDMConstants.UNIT_TO_NAME: self.AddWarning('Unknown unit %d for sensor %d' % (fields['unit'], sensor_number)) if fields['prefix'] not in RDMConstants.PREFIX_TO_NAME: self.AddWarning('Unknown prefix %d for sensor %d' % (fields['prefix'], sensor_number)) self.CheckCondition(sensor_number, fields, 'range_min', '>', 'range_max') self.CheckCondition(sensor_number, fields, 'range_min', '==', 'range_max') self.CheckCondition(sensor_number, fields, 'normal_min', '>', 'normal_max') self.CheckCondition(sensor_number, fields, 'normal_min', '==', 'normal_max') self.CheckCondition(sensor_number, fields, 'normal_min', '<', 'range_min') self.CheckCondition(sensor_number, fields, 'normal_max', '>', 'range_max') if fields['supports_recording'] & 0xfc: self.AddWarning('bits 7-2 in the recorded message support fields are set' ' for sensor %d' % sensor_number) def CheckCondition(self, sensor_number, fields, lhs, predicate_str, rhs): """Check for a condition and add a warning if it isn't true.""" predicate = self.PREDICATE_DICT[predicate_str] if predicate(fields[lhs], fields[rhs]): self.AddAdvisory( 'Sensor %d, %s (%d) %s %s (%d)' % (sensor_number, lhs, fields[lhs], predicate_str, rhs, fields[rhs])) class GetSensorDefinitionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get the sensor definition with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_DEFINITION' class GetSensorDefinitionWithTooMuchData(OptionalParameterTestFixture): """Get the sensor definition with more than 1 byte of data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_DEFINITION' def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_FORMAT_ERROR)) self.SendRawGet(ROOT_DEVICE, self.pid, 'foo') class GetInvalidSensorDefinition(OptionalParameterTestFixture): """Get the sensor definition with the all sensor value (0xff).""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_DEFINITION' def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = struct.pack('!B', 0xff) self.SendRawGet(ROOT_DEVICE, self.pid, data) class SetSensorDefinition(TestMixins.UnsupportedSetMixin, OptionalParameterTestFixture): """SET the sensor definition.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_DEFINITION' class AllSubDevicesGetSensorDefinition(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SENSOR_DEFINITION to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SENSOR_DEFINITION' DATA = [1] # Sensor Value #------------------------------------------------------------------------------ class GetSensorValues(OptionalParameterTestFixture): """Get values for all defined sensors.""" CATEGORY = TestCategory.SENSORS PID = 'SENSOR_VALUE' REQUIRES = ['sensor_definitions'] PROVIDES = ['sensor_values'] HIGHEST_LOWEST_MASK = 0x02 RECORDED_VALUE_MASK = 0x01 def Test(self): # the head of the list is the current sensor we're querying self._sensors = self.Property('sensor_definitions').values() self._sensor_values = [] if self._sensors: # loop and get all values self._GetSensorValue() else: # no sensors found, make sure we get a NR_DATA_OUT_OF_RANGE self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendGet(ROOT_DEVICE, self.pid, [0]) def _GetSensorValue(self): if not self._sensors: # finished self.SetProperty('sensor_values', self._sensor_values) self.Stop() return sensor_index = self._sensors[0]['sensor_number'] self.AddExpectedResults([ self.AckGetResult(action=self._GetNextSensor), self.NackGetResult( RDMNack.NR_HARDWARE_FAULT, advisory="Sensor %d NACK'ed GET SENSOR_VALUE with NR_HARDWARE_FAULT" % sensor_index, action=self._GetNextSensor) ]) self.SendGet(ROOT_DEVICE, self.pid, [sensor_index]) def _GetNextSensor(self): self._sensors.pop(0) self._GetSensorValue() def VerifyResult(self, response, fields): if not response.WasAcked(): return sensor_def = self._sensors[0] sensor_number = fields['sensor_number'] if sensor_def['sensor_number'] != sensor_number: self.AddWarning( 'Requested sensor value for %d, message returned sensor %d' % (sensor_def['sensor_number'], fields['sensor_number'])) return self._sensor_values.append(fields) range_min = sensor_def['range_min'] range_max = sensor_def['range_max'] # perform sanity checks on the sensor infomation self._CheckValueWithinRange(sensor_number, fields, 'present_value', range_min, range_max) if sensor_def['supports_recording'] & self.HIGHEST_LOWEST_MASK: self._CheckValueWithinRange(sensor_number, fields, 'lowest', range_min, range_max) self._CheckValueWithinRange(sensor_number, fields, 'highest', range_min, range_max) else: self._CheckForZeroField(sensor_number, fields, 'lowest') self._CheckForZeroField(sensor_number, fields, 'highest') if sensor_def['supports_recording'] & self.RECORDED_VALUE_MASK: self._CheckValueWithinRange(sensor_number, fields, 'recorded', range_min, range_max) else: self._CheckForZeroField(sensor_number, fields, 'recorded') def _CheckValueWithinRange(self, sensor_number, fields, name, min, max): if fields[name] < min or fields[name] > max: self.AddWarning( '%s for sensor %d not within range %d - %d, was %d' % (name, sensor_number, min, max, fields[name])) def _CheckForZeroField(self, sensor_number, fields, name): if fields[name]: self.AddWarning( '%s value for sensor %d non-0, but support not declared, was %d' % (name, sensor_number, fields[name])) class GetUndefinedSensorValues(OptionalParameterTestFixture): """Attempt to get sensor values for all sensors that weren't defined.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_VALUE' REQUIRES = ['sensor_definitions'] def Test(self): sensors = self.Property('sensor_definitions') self._missing_sensors = [] for i in xrange(0, 0xff): if i not in sensors: self._missing_sensors.append(i) if self._missing_sensors: # loop and get all values self._GetSensorValue() else: self.SetNotRun('All sensors declared') return def _GetSensorValue(self): if not self._missing_sensors: self.Stop() return self.AddIfGetSupported( self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._GetSensorValue)) self.SendGet(ROOT_DEVICE, self.pid, [self._missing_sensors.pop(0)]) class GetInvalidSensorValue(OptionalParameterTestFixture): """Get the sensor value with the all sensor value (0xff).""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_VALUE' def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = struct.pack('!B', 0xff) self.SendRawGet(ROOT_DEVICE, self.pid, data) class GetSensorValueWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """GET sensor value without any sensor number.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_VALUE' class ResetSensorValue(OptionalParameterTestFixture): """Reset sensor values for all defined sensors.""" CATEGORY = TestCategory.SENSORS PID = 'SENSOR_VALUE' REQUIRES = ['sensor_definitions'] def Test(self): # the head of the list is the current sensor we're querying self._sensors = self.Property('sensor_definitions').values() self._sensor_values = [] if self._sensors: # loop and get all values self._ResetSensor() else: # no sensors found, make sure we get a NR_DATA_OUT_OF_RANGE self.AddIfSetSupported( [self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendSet(ROOT_DEVICE, self.pid, [0]) def _ResetSensor(self): if not self._sensors: # finished self.Stop() return sensor_index = self._sensors[0]['sensor_number'] self.AddExpectedResults([ self.AckSetResult(action=self._ResetNextSensor), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS, action=self._ResetNextSensor), self.NackSetResult( RDMNack.NR_HARDWARE_FAULT, advisory="Sensor %d NACK'ed Set SENSOR_VALUE with NR_HARDWARE_FAULT" % sensor_index, action=self._ResetNextSensor) ]) self.SendSet(ROOT_DEVICE, self.pid, [sensor_index]) def _ResetNextSensor(self): self._sensors.pop(0) self._ResetSensor() def VerifyResult(self, response, fields): # It's not clear at all what to expect in this case. # See http://www.rdmprotocol.org/showthread.php?p=2160 # TODO(simonn, e1.20 task group): figure this out pass class ResetAllSensorValues(OptionalParameterTestFixture): """Set SENSOR_VALUE with sensor number set to 0xff.""" CATEGORY = TestCategory.SENSORS PID = 'SENSOR_VALUE' REQUIRES = ['sensor_definitions'] RECORDED_VALUE_MASK = 0x01 ALL_SENSORS = 0xff def Test(self): supports_recording = False for sensor_def in self.Property('sensor_definitions').values(): supports_recording |= ( sensor_def['supports_recording'] & self.RECORDED_VALUE_MASK) # some devices don't have set results = [self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)] if supports_recording: results = [self.AckSetResult(), self.NackSetResult( RDMNack.NR_UNSUPPORTED_COMMAND_CLASS, warning="One or more recorded sensors found but Set" " SENSOR_VALUE wasn't supported")] else: results = [self.AckSetResult(), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)] self.AddIfSetSupported(results) self.SendSet(ROOT_DEVICE, self.pid, [self.ALL_SENSORS]) class ResetUndefinedSensorValues(TestMixins.SetUndefinedSensorValues, OptionalParameterTestFixture): """Attempt to reset sensor values for all sensors that weren't defined.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_VALUE' REQUIRES = ['sensor_definitions'] class ResetSensorValueWithNoData(OptionalParameterTestFixture): """SET sensor value without any sensor number.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SENSOR_VALUE' def Test(self): self.AddIfSetSupported([ self.NackSetResult(RDMNack.NR_FORMAT_ERROR), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendRawSet(ROOT_DEVICE, self.pid, '') class AllSubDevicesGetSensorValue(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SENSOR_VALUE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SENSOR_VALUE' DATA = [1] # Record Sensors #------------------------------------------------------------------------------ class GetRecordSensors(TestMixins.UnsupportedGetMixin, OptionalParameterTestFixture): """GET record sensors.""" CATEGORY = TestCategory.SENSORS PID = 'RECORD_SENSORS' class RecordSensorValues(OptionalParameterTestFixture): """Record values for all defined sensors.""" CATEGORY = TestCategory.SENSORS PID = 'RECORD_SENSORS' REQUIRES = ['sensor_definitions'] RECORDED_VALUE_MASK = 0x01 def Test(self): # the head of the list is the current sensor we're querying self._sensors = self.Property('sensor_definitions').values() self._sensor_values = [] if self._sensors: # loop and get all values self._RecordSensor() else: # no sensors found, make sure we get a NR_DATA_OUT_OF_RANGE self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendSet(ROOT_DEVICE, self.pid, [0]) def _RecordSensor(self): if not self._sensors: # finished self.Stop() return sensor_def = self._sensors[0] if sensor_def['supports_recording'] & self.RECORDED_VALUE_MASK: self.AddExpectedResults(self.AckSetResult(action=self._RecordNextSensor)) else: message = ("Sensor %d ack'ed RECORD_SENSOR but recorded support was not " "declared" % sensor_def['sensor_number']) self.AddIfSetSupported([ self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._RecordNextSensor), self.AckSetResult(action=self._RecordNextSensor, advisory=message), ]) self.SendSet(ROOT_DEVICE, self.pid, [self._sensors[0]['sensor_number']]) def _RecordNextSensor(self): self._sensors.pop(0) self._RecordSensor() class RecordAllSensorValues(OptionalParameterTestFixture): """Set RECORD_SENSORS with sensor number set to 0xff.""" CATEGORY = TestCategory.SENSORS PID = 'RECORD_SENSORS' REQUIRES = ['sensor_recording_supported'] ALL_SENSORS = 0xff def Test(self): if self.Property('sensor_recording_supported'): self.AddIfSetSupported(self.AckSetResult()) else: self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendSet(ROOT_DEVICE, self.pid, [self.ALL_SENSORS]) class RecordUndefinedSensorValues(TestMixins.SetUndefinedSensorValues, OptionalParameterTestFixture): """Attempt to reset sensor values for all sensors that weren't defined.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'RECORD_SENSORS' REQUIRES = ['sensor_definitions'] class RecordSensorValueWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """SET record sensors without any sensor number.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'RECORD_SENSORS' # Device Hours #------------------------------------------------------------------------------ class GetDeviceHours(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the device hours.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'DEVICE_HOURS' EXPECTED_FIELD = 'hours' PROVIDES = ['device_hours'] class GetDeviceHoursWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the device hours with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_HOURS' class SetDeviceHours(TestMixins.SetUInt32Mixin, OptionalParameterTestFixture): """Attempt to SET the device hours.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'DEVICE_HOURS' EXPECTED_FIELD = 'hours' PROVIDES = ['set_device_hours_supported'] REQUIRES = ['device_hours'] def OldValue(self): return self.Property('device_hours') def VerifyResult(self, response, fields): if response.command_class == PidStore.RDM_SET: self.SetProperty('set_device_hours_supported', response.WasAcked()) class SetDeviceHoursWithNoData(OptionalParameterTestFixture): """Set the device hours with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_HOURS' REQUIRES = ['set_device_hours_supported'] def Test(self): if self.Property('set_device_hours_supported'): expected_result = RDMNack.NR_FORMAT_ERROR else: expected_result = RDMNack.NR_UNSUPPORTED_COMMAND_CLASS self.AddIfSetSupported(self.NackSetResult(expected_result)) self.SendRawSet(ROOT_DEVICE, self.pid, '') class AllSubDevicesGetDeviceHours(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DEVICE_HOURS to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEVICE_HOURS' # Lamp Hours #------------------------------------------------------------------------------ class GetLampHours(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the device hours.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_HOURS' EXPECTED_FIELD = 'hours' PROVIDES = ['lamp_hours'] class GetLampHoursWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the device hours with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_HOURS' class SetLampHours(TestMixins.SetUInt32Mixin, OptionalParameterTestFixture): """Attempt to SET the device hours.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_HOURS' EXPECTED_FIELD = 'hours' PROVIDES = ['set_lamp_hours_supported'] REQUIRES = ['lamp_hours'] def OldValue(self): return self.Property('lamp_hours') def VerifyResult(self, response, fields): if response.command_class == PidStore.RDM_SET: self.SetProperty('set_lamp_hours_supported', response.WasAcked()) class SetLampHoursWithNoData(OptionalParameterTestFixture): """Set the device hours with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_HOURS' REQUIRES = ['set_lamp_hours_supported'] def Test(self): if self.Property('set_lamp_hours_supported'): expected_result = RDMNack.NR_FORMAT_ERROR else: expected_result = RDMNack.NR_UNSUPPORTED_COMMAND_CLASS self.AddIfSetSupported(self.NackSetResult(expected_result)) self.SendRawSet(ROOT_DEVICE, self.pid, '') class AllSubDevicesGetLampHours(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get LAMP_HOURS to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LAMP_HOURS' # Lamp Strikes #------------------------------------------------------------------------------ class GetLampStrikes(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the lamp strikes.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_STRIKES' EXPECTED_FIELD = 'strikes' PROVIDES = ['lamp_strikes'] class GetLampStrikesWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the lamp strikes with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_STRIKES' class SetLampStrikes(TestMixins.SetUInt32Mixin, OptionalParameterTestFixture): """Attempt to SET the lamp strikes.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_STRIKES' EXPECTED_FIELD = 'strikes' PROVIDES = ['set_lamp_strikes_supported'] REQUIRES = ['lamp_strikes'] def OldValue(self): return self.Property('lamp_strikes') def VerifyResult(self, response, fields): if response.command_class == PidStore.RDM_SET: self.SetProperty('set_lamp_strikes_supported', response.WasAcked()) class SetLampStrikesWithNoData(OptionalParameterTestFixture): """Set the lamp strikes with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_STRIKES' REQUIRES = ['set_lamp_strikes_supported'] def Test(self): if self.Property('set_lamp_strikes_supported'): expected_result = RDMNack.NR_FORMAT_ERROR else: expected_result = RDMNack.NR_UNSUPPORTED_COMMAND_CLASS self.AddIfSetSupported(self.NackSetResult(expected_result)) self.SendRawSet(ROOT_DEVICE, self.pid, '') class AllSubDevicesGetLampStrikes(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get LAMP_STRIKES to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LAMP_STRIKES' # Lamp State #------------------------------------------------------------------------------ class GetLampState(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the lamp state.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_STATE' EXPECTED_FIELD = 'state' PROVIDES = ['lamp_state'] class GetLampStateWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the lamp state with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_STATE' class SetLampState(TestMixins.SetBoolMixin, OptionalParameterTestFixture): """Attempt to SET the lamp state.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_STATE' EXPECTED_FIELD = 'state' REQUIRES = ['lamp_state'] def OldValue(self): # We use a bool here so we toggle between off and on # Some responders may not support standby & strike return bool(self.Property('lamp_state')) class SetLampStateWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the device state with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_STATE' class AllSubDevicesGetLampState(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get LAMP_STATE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LAMP_STATE' # Lamp On Mode #------------------------------------------------------------------------------ class GetLampOnMode(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the lamp on mode.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_ON_MODE' EXPECTED_FIELD = 'mode' PROVIDES = ['lamp_on_mode'] class GetLampOnModeWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the lamp on mode with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_ON_MODE' class SetLampOnMode(TestMixins.SetMixin, OptionalParameterTestFixture): """Attempt to SET the lamp on mode.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'LAMP_ON_MODE' EXPECTED_FIELD = 'mode' REQUIRES = ['lamp_on_mode'] ALLOWED_MODES = [0, 1, 2] ALL_MODES = ALLOWED_MODES + [3] + range(0x80, 0xe0) def OldValue(self): old = self.Property('lamp_on_mode') if old in self.ALL_MODES: return old return self.ALL_MODES[0] def NewValue(self): old_value = self.OldValue() try: index = self.ALLOWED_MODES.index(old_value) except ValueError: return self.ALLOWED_MODES[0] return self.ALLOWED_MODES[(old_value + 1) % len(self.ALLOWED_MODES)] class SetLampOnModeWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the device on mode with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LAMP_ON_MODE' class AllSubDevicesGetLampOnMode(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get LAMP_ON_MODE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LAMP_ON_MODE' # Device Hours #------------------------------------------------------------------------------ class GetDevicePowerCycles(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the device power_cycles.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'DEVICE_POWER_CYCLES' EXPECTED_FIELD = 'power_cycles' PROVIDES = ['power_cycles'] class GetDevicePowerCyclesWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the device power_cycles with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_POWER_CYCLES' class ResetDevicePowerCycles(TestMixins.SetUInt32Mixin, OptionalParameterTestFixture): """Attempt to SET the device power_cycles to zero.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'DEVICE_POWER_CYCLES' EXPECTED_FIELD = 'power_cycles' REQUIRES = ['power_cycles'] PROVIDES = ['set_device_power_cycles_supported'] def OldValue(self): return self.Property('power_cycles') def NewValue(self): return 0 def VerifyResult(self, response, fields): if response.command_class == PidStore.RDM_SET: self.SetProperty('set_device_power_cycles_supported', response.WasAcked()) class SetDevicePowerCycles(TestMixins.SetUInt32Mixin, OptionalParameterTestFixture): """Attempt to SET the device power_cycles.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'DEVICE_POWER_CYCLES' EXPECTED_FIELD = 'power_cycles' REQUIRES = ['power_cycles'] def OldValue(self): return self.Property('power_cycles') def Test(self): self.AddIfSetSupported([ self.AckSetResult(action=self.VerifySet), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE), self.NackSetResult( RDMNack.NR_UNSUPPORTED_COMMAND_CLASS, advisory='SET for %s returned unsupported command class' % self.PID), ]) self.SendSet(ROOT_DEVICE, self.pid, [self.NewValue()]) class SetDevicePowerCyclesWithNoData(OptionalParameterTestFixture): """Set the device power_cycles with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DEVICE_POWER_CYCLES' REQUIRES = ['set_device_power_cycles_supported'] def Test(self): if self.Property('set_device_power_cycles_supported'): expected_result= RDMNack.NR_FORMAT_ERROR else: expected_result= RDMNack.NR_UNSUPPORTED_COMMAND_CLASS self.AddIfSetSupported(self.NackSetResult(expected_result)) self.SendRawSet(ROOT_DEVICE, self.pid, '') class AllSubDevicesGetDevicePowerCycles(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DEVICE_POWER_CYCLES to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DEVICE_POWER_CYCLES' # Display Invert #------------------------------------------------------------------------------ class GetDisplayInvert(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the display invert setting.""" CATEGORY = TestCategory.DISPLAY_SETTINGS PID = 'DISPLAY_INVERT' EXPECTED_FIELD = 'invert_status' PROVIDES = ['display_invert'] class GetDisplayInvertWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the pan invert setting with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DISPLAY_INVERT' class SetDisplayInvert(TestMixins.SetMixin, OptionalParameterTestFixture): """Attempt to SET the display invert setting.""" CATEGORY = TestCategory.DISPLAY_SETTINGS PID = 'DISPLAY_INVERT' EXPECTED_FIELD = 'invert_status' REQUIRES = ['display_invert'] # some devices can't do auto so we just use on and off here ALLOWED_MODES = [0, 1] ALL_MODES = ALLOWED_MODES + [2] def OldValue(self): old = self.Property('display_invert') if old in self.ALL_MODES: return old return self.ALL_MODES[0] def NewValue(self): old_value = self.OldValue() try: index = self.ALLOWED_MODES.index(old_value) except ValueError: return self.ALLOWED_MODES[0] return self.ALLOWED_MODES[(old_value + 1) % len(self.ALLOWED_MODES)] class SetDisplayInvertWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the display invert with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DISPLAY_INVERT' class AllSubDevicesGetDisplayInvert(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DISPLAY_INVERT to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DISPLAY_INVERT' # Display Level #------------------------------------------------------------------------------ class GetDisplayLevel(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the display level setting.""" CATEGORY = TestCategory.DISPLAY_SETTINGS PID = 'DISPLAY_LEVEL' EXPECTED_FIELD = 'level' PROVIDES = ['display_level'] class GetDisplayLevelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the pan invert setting with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DISPLAY_LEVEL' class SetDisplayLevel(TestMixins.SetUInt8Mixin, OptionalParameterTestFixture): """Attempt to SET the display level setting.""" CATEGORY = TestCategory.DISPLAY_SETTINGS PID = 'DISPLAY_LEVEL' EXPECTED_FIELD = 'level' REQUIRES = ['display_level'] def OldValue(self): return self.Property('display_level') class SetDisplayLevelWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the display level with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DISPLAY_LEVEL' class AllSubDevicesGetDisplayLevel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get DISPLAY_LEVEL to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DISPLAY_LEVEL' # Pan Invert #------------------------------------------------------------------------------ class GetPanInvert(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the pan invert setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'PAN_INVERT' EXPECTED_FIELD = 'invert' PROVIDES = ['pan_invert'] class GetPanInvertWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the pan invert setting with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PAN_INVERT' class SetPanInvert(TestMixins.SetBoolMixin, OptionalParameterTestFixture): """Attempt to SET the pan invert setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'PAN_INVERT' EXPECTED_FIELD = 'invert' REQUIRES = ['pan_invert'] def OldValue(self): return self.Property('pan_invert') class SetPanInvertWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the pan invert with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PAN_INVERT' class AllSubDevicesGetPanInvert(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PAN_INVERT to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PAN_INVERT' # Tilt Invert #------------------------------------------------------------------------------ class GetTiltInvert(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the tilt invert setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'TILT_INVERT' EXPECTED_FIELD = 'invert' PROVIDES = ['tilt_invert'] class GetTiltInvertWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the tilt invert setting with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'TILT_INVERT' class SetTiltInvert(TestMixins.SetBoolMixin, OptionalParameterTestFixture): """Attempt to SET the tilt invert setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'TILT_INVERT' EXPECTED_FIELD = 'invert' REQUIRES = ['tilt_invert'] def OldValue(self): return self.Property('tilt_invert') class SetTiltInvertWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the tilt invert with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'TILT_INVERT' class AllSubDevicesGetTiltInvert(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get TILT_INVERT to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'TILT_INVERT' # Pan Tilt Swap Invert #------------------------------------------------------------------------------ class GetPanTiltSwap(TestMixins.GetMixin, OptionalParameterTestFixture): """GET the pan tilt swap setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'PAN_TILT_SWAP' EXPECTED_FIELD = 'swap' PROVIDES = ['pan_tilt_swap'] class GetPanTiltSwapWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the pan tilt swap setting with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PAN_TILT_SWAP' class SetPanTiltSwap(TestMixins.SetBoolMixin, OptionalParameterTestFixture): """Attempt to SET the pan tilt swap setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'PAN_TILT_SWAP' EXPECTED_FIELD = 'swap' REQUIRES = ['pan_tilt_swap'] def OldValue(self): return self.Property('pan_tilt_swap') class SetPanTiltSwapWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the pan tilt swap with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PAN_TILT_SWAP' class AllSubDevicesGetPanTiltSwap(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PAN_TILT_SWAP to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PAN_TILT_SWAP' # Real time clock #------------------------------------------------------------------------------ class GetRealTimeClock(OptionalParameterTestFixture): """GET the real time clock setting.""" CATEGORY = TestCategory.CONFIGURATION PID = 'REAL_TIME_CLOCK' ALLOWED_RANGES = { 'year': (2003, 65535), 'month': (1, 12), 'day': (1, 31), 'hour': (0, 23), 'minute': (0, 59), } def Test(self): self.AddIfGetSupported( self.AckGetResult(field_names=self.ALLOWED_RANGES.keys() + ['second'])) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): return for field, range in self.ALLOWED_RANGES.iteritems(): value = fields[field] if value < range[0] or value > range[1]: self.AddWarning('%s in GET %s is out of range, was %d, expeced %s' % (field, self.PID, value, range)) class GetRealTimeClockWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the teal time clock with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'REAL_TIME_CLOCK' class SetRealTimeClock(OptionalParameterTestFixture): """Set the real time clock.""" CATEGORY = TestCategory.CONFIGURATION PID = 'REAL_TIME_CLOCK' def Test(self): n = datetime.datetime.now() self.AddIfSetSupported( [self.AckSetResult(), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) args = [n.year, n.month, n.day, n.hour, n.minute, n.second] self.SendSet(ROOT_DEVICE, self.pid, args) class SetRealTimeClockWithNoData(OptionalParameterTestFixture): """Set the real time clock without any data.""" CATEGORY = TestCategory.CONFIGURATION PID = 'REAL_TIME_CLOCK' def Test(self): self.AddIfSetSupported([ self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), self.NackSetResult(RDMNack.NR_FORMAT_ERROR), ]) self.SendRawSet(ROOT_DEVICE, self.pid, '') class AllSubDevicesGetRealTimeClock(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get REAL_TIME_CLOCK to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'REAL_TIME_CLOCK' # Identify Device #------------------------------------------------------------------------------ class GetIdentifyDevice(TestMixins.GetRequiredMixin, ResponderTestFixture): """Get the identify state.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_DEVICE' PROVIDES = ['identify_state'] EXPECTED_FIELD = 'identify_state' class GetIdentifyDeviceWithData(ResponderTestFixture): """Get the identify state with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'IDENTIFY_DEVICE' def Test(self): # don't inherit from GetWithDataMixin because this is required self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_FORMAT_ERROR), self.AckGetResult( warning='Get %s with data returned an ack' % self.pid.name) ]) self.SendRawGet(ROOT_DEVICE, self.pid, 'foo') class SetIdentifyDevice(ResponderTestFixture): """Set the identify state.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_DEVICE' REQUIRES = ['identify_state'] def Test(self): self.identify_mode = self.Property('identify_state') self.new_mode = not self.identify_mode self.AddExpectedResults( self.AckSetResult(action=self.VerifyIdentifyMode)) self.SendSet(ROOT_DEVICE, self.pid, [self.new_mode]) def VerifyIdentifyMode(self): self.AddExpectedResults( self.AckGetResult(field_values={'identify_state': self.new_mode})) self.SendGet(ROOT_DEVICE, self.pid) def ResetState(self): # reset back to the old value self.SendSet(ROOT_DEVICE, self.pid, [self.identify_mode]) self._wrapper.Run() class SetOutOfRangeIdentifyDevice(ResponderTestFixture): """Set the identify state to a value which is out of range.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'IDENTIFY_DEVICE' REQUIRES = ['identify_state'] def Test(self): self.AddExpectedResults( self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendSet(ROOT_DEVICE, self.pid, [2]) def ResetState(self): # reset back to the old value self.SendSet(ROOT_DEVICE, self.pid, [self.Property('identify_state')]) self._wrapper.Run() class SetVendorcastIdentifyDevice(TestMixins.SetNonUnicastIdentifyMixin, ResponderTestFixture): """Set the identify state using the vendorcast uid.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_DEVICE' def Uid(self): return UID.VendorcastAddress(self._uid.manufacturer_id) class SetBroadcastIdentifyDevice(TestMixins.SetNonUnicastIdentifyMixin, ResponderTestFixture): """Set the identify state using the broadcast uid.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_DEVICE' def Uid(self): return UID.AllDevices() class SetOtherVendorcastIdentifyDevice(TestMixins.SetNonUnicastIdentifyMixin, ResponderTestFixture): """Send a vendorcast identify off to another manufacturer's ID.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_DEVICE' def States(self): return [ self.TurnOn, self.VerifyOn, self.TurnOff, self.VerifyOn, ] def Uid(self): # use a different vendor's vendorcast address vendorcast_id = self._uid.manufacturer_id if vendorcast_id == 0: vendorcast_id += 1 else: vendorcast_id -= 1 return UID(vendorcast_id, 0xffffffff) class SetIdentifyDeviceWithNoData(ResponderTestFixture): """Set the identify state with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'IDENTIFY_DEVICE' REQUIRES = ['identify_state'] def Test(self): self.AddExpectedResults(self.NackSetResult(RDMNack.NR_FORMAT_ERROR)) self.SendRawSet(ROOT_DEVICE, self.pid, '') def ResetState(self): self.SendSet(ROOT_DEVICE, self.pid, [self.Property('identify_state')]) self._wrapper.Run() class AllSubDevicesGetIdentifyDevice(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get IDENTIFY_DEVICE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'IDENTIFY_DEVICE' class GetSubDeviceIdentifyDevice(ResponderTestFixture): """Check that IDENTIFY_DEVICE is supported on all sub devices.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'IDENTIFY_DEVICE' REQUIRES = ['sub_device_addresses'] def Test(self): self._sub_devices = self.Property('sub_device_addresses').keys() self._sub_devices.reverse() self._GetIdentifyDevice(); def _GetIdentifyDevice(self): if not self._sub_devices: self.Stop() return self.AddExpectedResults(self.AckGetResult(action=self._GetIdentifyDevice)) sub_device = self._sub_devices.pop() self.SendGet(sub_device, self.pid) # Power State #------------------------------------------------------------------------------ class GetPowerState(TestMixins.GetMixin, OptionalParameterTestFixture): """Get the power state mode.""" CATEGORY = TestCategory.CONTROL PID = 'POWER_STATE' PROVIDES = ['power_state'] EXPECTED_FIELD = 'power_state' # The allowed power states ALLOWED_STATES = [0, 1, 2, 0xff] def VerifyResult(self, response, fields): super(GetPowerState, self).VerifyResult(response, fields) if response.WasAcked(): if fields['power_state'] not in self.ALLOWED_STATES: self.AddWarning('Power state of 0x%hx is not defined' % fields['power_state']) class GetPowerStateWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get the power state mode with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'POWER_STATE' class SetPowerState(TestMixins.SetMixin, OptionalParameterTestFixture): """Set the power state.""" CATEGORY = TestCategory.CONTROL PID = 'POWER_STATE' REQUIRES = ['power_state'] EXPECTED_FIELD = 'power_state' def OldValue(self): old = self.Property('power_state') if old in GetPowerState.ALLOWED_STATES: return old return GetPowerState.ALLOWED_STATES[0] def NewValue(self): old_value = self.Property('power_state') try: index = GetPowerState.ALLOWED_STATES.index(old_value) except ValueError: return GetPowerState.ALLOWED_STATES[0] length = len(GetPowerState.ALLOWED_STATES) return GetPowerState.ALLOWED_STATES[(old_value + 1) % length] def ResetState(self): if not self.OldValue(): return # reset back to the old value self.SendSet(ROOT_DEVICE, self.pid, [self.OldValue()]) self._wrapper.Run() class SetPowerStateWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the power state with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'POWER_STATE' class AllSubDevicesGetPowerState(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get POWER_STATE to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'POWER_STATE' # Self Test #------------------------------------------------------------------------------ class GetPerformSelfTest(TestMixins.GetMixin, OptionalParameterTestFixture): """Get the current self test settings.""" CATEGORY = TestCategory.CONTROL PID = 'PERFORM_SELFTEST' EXPECTED_FIELD = 'tests_active' class GetPerformSelfTestWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get the current self test settings with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PERFORM_SELFTEST' class SetPerformSelfTest(TestMixins.SetMixin, OptionalParameterTestFixture): """Turn any running self tests off.""" CATEGORY = TestCategory.CONTROL PID = 'PERFORM_SELFTEST' EXPECTED_FIELD = 'tests_active' def NewValue(self): return False def ResetState(self): # override this so we don't reset pass class SetPerformSelfTestWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the perform self test setting but don't provide any data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PERFORM_SELFTEST' class AllSubDevicesGetPerformSelfTest(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PERFORM_SELFTEST to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PERFORM_SELFTEST' # Self Test Description #------------------------------------------------------------------------------ class GetSelfTestDescription(OptionalParameterTestFixture): """Get the self test description.""" CATEGORY = TestCategory.CONTROL PID = 'SELF_TEST_DESCRIPTION' def Test(self): self.AddIfGetSupported([ self.AckGetResult(), self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE), ]) # try to get a description for the first self test self.SendGet(ROOT_DEVICE, self.pid, [1]) class GetSelfTestDescriptionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get the self test description with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'SELF_TEST_DESCRIPTION' class FindSelfTests(OptionalParameterTestFixture): """Locate the self tests by sending SELF_TEST_DESCRIPTION messages.""" CATEGORY = TestCategory.CONTROL PID = 'SELF_TEST_DESCRIPTION' PROVIDES = ['self_test_descriptions'] def Test(self): self._self_tests = {} # stores the discovered self tests self._current_index = 0 # the current self test we're trying to query self._CheckForSelfTest() def _CheckForSelfTest(self): # For each message we should either see a NR_DATA_OUT_OF_RANGE or an ack if self._current_index == 255: self.SetProperty('self_test_descriptions', self._self_tests) self.Stop() return if self.PidSupported(): self.AddExpectedResults([ self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self._CheckForSelfTest), self.AckGetResult(action=self._CheckForSelfTest) ]) else: self.AddExpectedResults(self.NackGetResult(RDMNack.NR_UNKNOWN_PID)) self._current_index += 1 self.SendGet(ROOT_DEVICE, self.pid, [self._current_index]) def VerifyResult(self, response, fields): if response.WasAcked(): if self._current_index != fields['test_number']: self.AddWarning( 'Requested self test %d, message returned self test %d' % (self._current_index, fields['test_number'])) else: self._self_tests[self._current_index] = fields['description'] class AllSubDevicesGetSelfTestDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get SELF_TEST_DESCRIPTION to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'SELF_TEST_DESCRIPTION' DATA = [1] # Factory Defaults #------------------------------------------------------------------------------ class GetFactoryDefaults(OptionalParameterTestFixture): """GET the factory defaults pid.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'FACTORY_DEFAULTS' def Test(self): self.AddIfGetSupported(self.AckGetResult(field_names=['using_defaults'])) self.SendGet(ROOT_DEVICE, self.pid) class GetFactoryDefaultsWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET the factory defaults pid with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'FACTORY_DEFAULTS' class ResetFactoryDefaults(OptionalParameterTestFixture): """Reset to factory defaults.""" CATEGORY = TestCategory.PRODUCT_INFORMATION PID = 'FACTORY_DEFAULTS' # Dependancies so that we don't reset the fields before checking them. DEPS = [GetStartAddress, GetPersonality] def Test(self): self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid) def VerifySet(self): self.AddIfGetSupported( self.AckGetResult(field_values={'using_defaults': True})) self.SendGet(ROOT_DEVICE, self.pid) class ResetFactoryDefaultsWithData(TestMixins.SetWithDataMixin, OptionalParameterTestFixture): """Reset to factory defaults with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'FACTORY_DEFAULTS' class AllSubDevicesGetFactoryDefaults(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get FACTORY_DEFAULTS to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'FACTORY_DEFAULTS' # CAPTURE_PRESET #------------------------------------------------------------------------------ class GetCapturePreset(TestMixins.UnsupportedGetMixin, OptionalParameterTestFixture): """GET capture preset.""" CATEGORY = TestCategory.CONTROL PID = 'CAPTURE_PRESET' class SetCapturePresetWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set capture preset with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CAPTURE_PRESET' class CapturePreset(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Capture preset information.""" CATEGORY = TestCategory.CONTROL PID = 'CAPTURE_PRESET' def Test(self): # this test doesn't check much because the standard is rather vague in this # area. There is also no way to read back preset data so it's impossible to # tell if this worked. self.AddIfSetSupported(self.AckSetResult()) # scene 1, no timing information self.SendSet(ROOT_DEVICE, self.pid, [1, 0, 0, 0]) # PRESET_PLAYBACK #------------------------------------------------------------------------------ class GetPresetPlaybackWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get the preset playback with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_PLAYBACK' class GetPresetPlayback(TestMixins.GetMixin, OptionalParameterTestFixture): """Get the preset playback.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_PLAYBACK' EXPECTED_FIELD = 'mode' class SetPresetPlaybackWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set preset playback with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_PLAYBACK' class SetPresetPlayback(OptionalParameterTestFixture): """Set the preset playback.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_PLAYBACK' OFF = 0 FULL = 0xff def Test(self): self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid, [self.OFF, self.FULL]) def VerifySet(self): self.AddExpectedResults( self.AckGetResult(field_values={ 'mode': self.OFF, 'level': self.FULL})) self.SendGet(ROOT_DEVICE, self.pid) class AllSubDevicesGetPresetPlayback(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Send a Get PRESET_PLAYBACK to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PRESET_PLAYBACK' # E1.37 PIDS #============================================================================== # IDENTIFY_MODE #------------------------------------------------------------------------------ class GetIdentifyMode(TestMixins.GetMixin, OptionalParameterTestFixture): """Get IDENTIFY_MODE.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_MODE' PROVIDES = ['identify_mode'] EXPECTED_FIELD = 'identify_mode' class GetIdentifyModeWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get IDENTIFY_MODE with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'IDENTIFY_MODE' class SetIdentifyMode(TestMixins.SetMixin, OptionalParameterTestFixture): """Set IDENTIFY_MODE with extra data.""" CATEGORY = TestCategory.CONTROL PID = 'IDENTIFY_MODE' REQUIRES = ['identify_mode'] LOUD = 0xff QUIET = 0x00 EXPECTED_FIELD = 'identify_mode' def OldValue(self): return self.Property('identify_mode') def NewValue(self): old_value = self.OldValue() if old_value is None: return self.QUIET if old_value: return self.QUIET else: return self.LOUD class SetIdentifyModeWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set IDENTIFY_MODE with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'IDENTIFY_MODE' class AllSubDevicesGetIdentifyMode(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get IDENTIFY_MODE addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'IDENTIFY_MODE' # DMX_BLOCK_ADDRESS #------------------------------------------------------------------------------ class GetDMXBlockAddress(OptionalParameterTestFixture): """Get DMX_BLOCK_ADDRESS.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_BLOCK_ADDRESS' PROVIDES = ['total_sub_device_footprint', 'base_dmx_address'] REQUIRES = ['sub_device_addresses', 'sub_device_footprints'] NON_CONTIGUOUS = 0xffff def Test(self): self.expected_footprint = sum( self.Property('sub_device_footprints').values()) self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): footprint = None base_address = None if response.WasAcked(): footprint = fields['sub_device_footprint'] base_address = fields['base_dmx_address'] if footprint > MAX_DMX_ADDRESS: self.AddWarning('Sub device footprint > 512, was %d' % footprint) if (base_address == 0 or (base_address > MAX_DMX_ADDRESS and base_address != self.NON_CONTIGUOUS)): self.AddWarning('Base DMX address is outside range 1- 512, was %d' % base_address) if footprint != self.expected_footprint: self.SetFailed( "Sub device footprint (%d) didn't match sum of sub-device " "footprints (%d)" % (fields['sub_device_footprint'], self.expected_footprint)) is_contiguous = self.CheckForContiguousSubDevices() if is_contiguous and base_address == self.NON_CONTIGUOUS: self.SetFailed( 'Sub device addresses are contiguous, but block address returned ' '0xffff') elif not (is_contiguous or base_address == self.NON_CONTIGUOUS): self.SetFailed( "Sub device addresses aren't contiguous, but block address " "didn't return 0xffff") self.SetProperty('total_sub_device_footprint', footprint) self.SetProperty('base_dmx_address', base_address) def CheckForContiguousSubDevices(self): addresses = self.Property('sub_device_addresses') footprints = self.Property('sub_device_footprints') next_address = None for index in sorted(addresses): if next_address == None: next_address = addresses[index] + footprints[index] elif addresses[index] != next_address: return False else: next_address += footprints[index] return True; class CheckBlockAddressConsistency(ResponderTestFixture): """Check that the device has subdevices if DMX_BLOCK_ADDRESS is supported.""" CATEGORY = TestCategory.CONTROL REQUIRES = ['sub_device_count', 'supported_parameters'] def Test(self): pid = self.LookupPid('DMX_BLOCK_ADDRESS') if (pid.value in self.Property('supported_parameters') and self.Property('sub_device_count') == 0): self.AddAdvisory('DMX_BLOCK_ADDRESS supported but sub device count was 0') self.SetPassed() self.Stop() class GetDMXBlockAddressWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get DMX_BLOCK_ADDRESS with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_BLOCK_ADDRESS' class SetDMXBlockAddress(TestMixins.SetMixin, OptionalParameterTestFixture): """SET the DMX_BLOCK_ADDRESS.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_BLOCK_ADDRESS' REQUIRES = ['total_sub_device_footprint', 'base_dmx_address'] EXPECTED_FIELD = 'base_dmx_address' def NewValue(self): base_address = self.Property('base_dmx_address') footprint = self.Property('total_sub_device_footprint') if base_address is None or footprint is None: return 1 if base_address == GetDMXBlockAddress.NON_CONTIGUOUS: return 1 new_address = base_address + 1 if new_address + footprint > MAX_DMX_ADDRESS: new_address = 1 return new_address def ResetState(self): # we can't reset as the addresses may not have been contiguous pass class SetZeroDMXBlockAddress(OptionalParameterTestFixture): """Set DMX_BLOCK_ADDRESS to 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_BLOCK_ADDRESS' DEPS = [SetDMXBlockAddress] def Test(self): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = struct.pack('!H', 0) self.SendRawSet(ROOT_DEVICE, self.pid, data) class SetOversizedDMXBlockAddress(OptionalParameterTestFixture): """Set DMX_BLOCK_ADDRESS to 513.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_BLOCK_ADDRESS' DEPS = [SetDMXBlockAddress] def Test(self): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = struct.pack('!H', MAX_DMX_ADDRESS + 1) self.SendRawSet(ROOT_DEVICE, self.pid, data) class SetDMXBlockAddressWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set DMX_BLOCK_ADDRESS with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_BLOCK_ADDRESS' class AllSubDevicesGetDmxBlockAddress(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get DMX_BLOCK_ADDRESS addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DMX_BLOCK_ADDRESS' # DMX_FAIL_MODE #------------------------------------------------------------------------------ class GetDmxFailMode(OptionalParameterTestFixture): """GET DMX_FAIL_MODE.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_FAIL_MODE' PROVIDES = ['dmx_fail_settings'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if fields is None: fields = {} self.SetProperty('dmx_fail_settings', fields) class GetFailUpModeWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """GET DMX_FAIL_MODE with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_FAIL_MODE' class SetDmxFailMode(OptionalParameterTestFixture): """Set DMX_FAIL_MODE without changing the settings.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_FAIL_MODE' PROVIDES = ['set_dmx_fail_mode_supported'] REQUIRES = ['dmx_fail_settings'] def Test(self): settings = self.Property('dmx_fail_settings', {}) self.AddIfSetSupported([ self.AckSetResult(), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendSet( ROOT_DEVICE, self.pid, [settings.get('scene_number', 0), settings.get('hold_time', 0), settings.get('loss_of_signal_delay', 0), settings.get('level', 0), ]) def VerifyResult(self, response, fields): self.SetProperty('set_dmx_fail_mode_supported', response.WasAcked()) class SetDmxFailModeMinimumTime(TestMixins.SetDmxFailModeMixin, OptionalParameterTestFixture): """Verify the minimum times in PRESET_INFO are supported in DMX_FAIL_MODE.""" def Test(self): self.in_get = False if self.Property('set_dmx_fail_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetFailMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) preset_info = self.Property('preset_info', {}) self.known_limits = preset_info != {} self.delay_time = preset_info.get('min_fail_delay_time', 0) self.hold_time = preset_info.get('min_fail_hold_time', 0) self.SendSet(ROOT_DEVICE, self.pid, [0, self.delay_time, self.hold_time, 255]) def GetFailMode(self): self.in_get = True fields = {} if self.known_limits: fields['loss_of_signal_delay'] = self.delay_time fields['hold_time'] = self.hold_time self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetDmxFailModeMaximumTime(TestMixins.SetDmxFailModeMixin, OptionalParameterTestFixture): """Verify the maximum times in PRESET_INFO are supported in DMX_FAIL_MODE.""" def Test(self): self.in_get = False if self.Property('set_dmx_fail_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetFailMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) preset_info = self.Property('preset_info', {}) self.known_limits = preset_info != {} self.delay_time = preset_info.get('max_fail_delay_time', self.INFINITE_TIME) self.hold_time = preset_info.get('max_fail_hold_time', self.INFINITE_TIME) self.SendSet(ROOT_DEVICE, self.pid, [0, self.delay_time, self.hold_time, 255]) def GetFailMode(self): self.in_get = True fields = {} if self.known_limits: fields['loss_of_signal_delay'] = self.delay_time fields['hold_time'] = self.hold_time self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetDmxFailModeInfiniteTimes(TestMixins.SetDmxFailModeMixin, OptionalParameterTestFixture): """Check if infinite times are supported for DMX_FAIL_MODE.""" def Test(self): self.in_get = False if self.Property('set_dmx_fail_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetFailMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [0, 'infinite', 'infinite', 255]) def GetFailMode(self): self.in_get = True self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked() or not self.in_get: return self.CheckField( 'delay time', self.Property('preset_info', {}).get('fail_infinite_delay_supported'), fields['loss_of_signal_delay']) self.CheckField( 'hold time', self.Property('preset_info', {}).get('fail_infinite_hold_supported'), fields['hold_time']) def CheckField(self, field_name, is_supported, new_value): if is_supported is None: # We can't tell is the new value is correct or not return; if is_supported and new_value != self.INFINITE_TIME: self.SetFailed( 'infinite %s was supported, but the value was truncated after a set.' ' Expected %d, got %d' % (field_name, self.INFINITE_TIME, new_value)) elif not is_supported and new_value == self.INFINITE_TIME: self.SetFailed( 'infinite %s was not supported, but the value was not truncated ' 'after a set.' % field_name) class SetDmxFailModeOutOfRangeMaximumTime(TestMixins.SetDmxFailModeMixin, OptionalParameterTestFixture): """Check that the maximum times for DMX_FAIL_MODE are honored.""" def Test(self): self.in_get = False preset_info = self.Property('preset_info', {}) self.max_delay_time = preset_info.get('max_fail_delay_time') self.max_hold_time = preset_info.get('max_fail_hold_time') if self.max_delay_time is None or self.max_hold_time is None: self.SetNotRun("Max times unknown - PRESET_INFO wasn't acked") return delay_time = self.max_delay_time # 0xffff means 'fail mode not supported' if self.max_delay_time * 10 < 0xfffe: delay_time = (self.max_delay_time * 10 + 1) / 10.0 # increment by 1 hold_time = self.max_hold_time # 0xffff means 'fail mode not supported' if self.max_hold_time * 10 < 0xfffe: hold_time = (self.max_hold_time * 10 + 1) / 10.0 # increment by 1 if self.Property('set_dmx_fail_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetFailMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [0, delay_time, hold_time, 255]) def GetFailMode(self): self.in_get = True fields = { 'loss_of_signal_delay': self.max_delay_time, 'hold_time': self.max_hold_time, } self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetDmxFailModeOutOfRangeMinimumTime(TestMixins.SetDmxFailModeMixin, OptionalParameterTestFixture): """Check that the minimum times for DMX_FAIL_MODE are honored.""" def Test(self): self.in_get = False preset_info = self.Property('preset_info', {}) self.min_delay_time = preset_info.get('min_fail_delay_time') self.min_hold_time = preset_info.get('min_fail_hold_time') if self.min_delay_time is None or self.min_hold_time is None: self.SetNotRun("Max times unknown - PRESET_INFO wasn't acked") return delay_time = self.min_delay_time # 0xffff means 'fail mode not supported' if self.min_delay_time * 10 > 1: delay_time = (self.min_delay_time * 10 - 1) / 10.0 # decrement by 1 hold_time = self.min_hold_time # 0xffff means 'fail mode not supported' if self.min_hold_time * 10 > 1: hold_time = (self.min_hold_time * 10 - 1) / 10.0 # decrement by 1 if self.Property('set_dmx_fail_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetFailMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [0, delay_time, hold_time, 255]) def GetFailMode(self): self.in_get = True fields = { 'loss_of_signal_delay': self.min_delay_time, 'hold_time': self.min_hold_time, } self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetFailModeWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set DMX_FAIL_MODE with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_FAIL_MODE' class AllSubDevicesGetDmxFailMode(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get DMX_FAIL_MODE addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DMX_FAIL_MODE' # DMX_STARTUP_MODE #------------------------------------------------------------------------------ class GetDmxStartupMode(OptionalParameterTestFixture): """Get DMX_STARTUP_MODE.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_STARTUP_MODE' PROVIDES = ['dmx_startup_settings'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if fields is None: fields = {} self.SetProperty('dmx_startup_settings', fields) class GetStartUpModeWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get DMX_STARTUP_MODE with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_STARTUP_MODE' class SetDmxStartupMode(OptionalParameterTestFixture): """Set DMX_STARTUP_MODE without changing the settings.""" CATEGORY = TestCategory.DMX_SETUP PID = 'DMX_FAIL_MODE' PROVIDES = ['set_dmx_startup_mode_supported'] REQUIRES = ['dmx_startup_settings'] def Test(self): settings = self.Property('dmx_startup_settings', {}) self.AddIfSetSupported([ self.AckSetResult(), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendSet( ROOT_DEVICE, self.pid, [settings.get('scene_number', 0), settings.get('hold_time', 0), settings.get('startup_delay', 0), settings.get('level', 0), ]) def VerifyResult(self, response, fields): self.SetProperty('set_dmx_startup_mode_supported', response.WasAcked()) class SetDmxStartupModeMinimumTime(TestMixins.SetDmxStartupModeMixin, OptionalParameterTestFixture): """Verify DMX_STARTUP_MODE supports the min. times from PRESET_INFO.""" def Test(self): self.in_get = False if self.Property('set_dmx_startup_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetStartupMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) preset_info = self.Property('preset_info', {}) self.known_limits = preset_info != {} self.delay_time = preset_info.get('min_startup_delay_time', 0) self.hold_time = preset_info.get('min_startup_hold_time', 0) self.SendSet(ROOT_DEVICE, self.pid, [0, self.delay_time, self.hold_time, 255]) def GetStartupMode(self): self.in_get = True fields = {} if self.known_limits: fields['startup_delay'] = self.delay_time fields['hold_time'] = self.hold_time self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetDmxStartupModeMaximumTime(TestMixins.SetDmxStartupModeMixin, OptionalParameterTestFixture): """Verify DMX_STARTUP_MODE supports the max. times from PRESET_INFO.""" def Test(self): self.in_get = False if self.Property('set_dmx_startup_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetStartupMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) preset_info = self.Property('preset_info', {}) self.known_limits = preset_info != {} self.delay_time = preset_info.get('max_startup_delay_time', self.INFINITE_TIME) self.hold_time = preset_info.get('max_startup_hold_time', self.INFINITE_TIME) self.SendSet(ROOT_DEVICE, self.pid, [0, self.delay_time, self.hold_time, 255]) def GetStartupMode(self): self.in_get = True fields = {} if self.known_limits: fields['startup_delay'] = self.delay_time fields['hold_time'] = self.hold_time self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetDmxStartupModeInfiniteTimes(TestMixins.SetDmxStartupModeMixin, OptionalParameterTestFixture): """Check if infinite times are supported for DMX_STARTUP_MODE.""" def Test(self): self.in_get = False if self.Property('set_dmx_startup_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetStartupMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [0, 'infinite', 'infinite', 255]) def GetStartupMode(self): self.in_get = True self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked() or not self.in_get: return self.CheckField( 'delay time', self.Property('preset_info', {}).get( 'startup_infinite_delay_supported'), fields['startup_delay']) self.CheckField( 'hold time', self.Property('preset_info', {}).get('startup_infinite_hold_supported'), fields['hold_time']) def CheckField(self, field_name, is_supported, new_value): if is_supported is None: # We can't tell is the new value is correct or not return; if is_supported and new_value != self.INFINITE_TIME: self.SetFailed( 'infinite %s was supported, but the value was truncated after a set.' ' Expected %d, got %d' % (field_name, self.INFINITE_TIME, new_value)) elif not is_supported and new_value == self.INFINITE_TIME: self.SetFailed( 'infinite %s was not supported, but the value was not truncated ' 'after a set.' % field_name) class SetDmxStartupModeOutOfRangeMaximumTime(TestMixins.SetDmxStartupModeMixin, OptionalParameterTestFixture): """Check that the maximum times for DMX_STARTUP_MODE are honored.""" def Test(self): self.in_get = False preset_info = self.Property('preset_info', {}) self.max_delay_time = preset_info.get('max_startup_delay_time') self.max_hold_time = preset_info.get('max_startup_hold_time') if self.max_delay_time is None or self.max_hold_time is None: self.SetNotRun("Max times unknown - PRESET_INFO wasn't acked") return delay_time = self.max_delay_time # 0xffff means 'startup mode not supported' if self.max_delay_time * 10 < 0xfffe: delay_time = (self.max_delay_time * 10 + 1) / 10.0 # increment by 1 hold_time = self.max_hold_time # 0xffff means 'startup mode not supported' if self.max_hold_time * 10 < 0xfffe: hold_time = (self.max_hold_time * 10 + 1) / 10.0 # increment by 1 if self.Property('set_dmx_startup_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetStartupMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [0, delay_time, hold_time, 255]) def GetStartupMode(self): self.in_get = True fields = { 'startup_delay': self.max_delay_time, 'hold_time': self.max_hold_time, } self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetDmxStartupModeOutOfRangeMinimumTime(TestMixins.SetDmxStartupModeMixin, OptionalParameterTestFixture): """Check that the minimum times for DMX_STARTUP_MODE are honored.""" def Test(self): self.in_get = False preset_info = self.Property('preset_info', {}) self.min_delay_time = preset_info.get('min_startup_delay_time') self.min_hold_time = preset_info.get('min_startup_hold_time') if self.min_delay_time is None or self.min_hold_time is None: self.SetNotRun("Max times unknown - PRESET_INFO wasn't acked") return delay_time = self.min_delay_time # 0xffff means 'startup mode not supported' if self.min_delay_time * 10 > 1: delay_time = (self.min_delay_time * 10 - 1) / 10.0 # decrement by 1 hold_time = self.min_hold_time # 0xffff means 'startup mode not supported' if self.min_hold_time * 10 > 1: hold_time = (self.min_hold_time * 10 - 1) / 10.0 # decrement by 1 if self.Property('set_dmx_startup_mode_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetStartupMode)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [0, delay_time, hold_time, 255]) def GetStartupMode(self): self.in_get = True fields = { 'startup_delay': self.min_delay_time, 'hold_time': self.min_hold_time, } self.AddIfGetSupported(self.AckGetResult(field_values=fields)) self.SendGet(ROOT_DEVICE, self.pid) class SetStartupModeWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set DMX_STARTUP_MODE with no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DMX_STARTUP_MODE' class AllSubDevicesGetDmxStartupMode(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get DMX_STARTUP_MODE addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DMX_STARTUP_MODE' # POWER_ON_SELF_TEST #------------------------------------------------------------------------------ class GetPowerOnSelfTest(TestMixins.GetMixin, OptionalParameterTestFixture): """Get the POWER_ON_SELF_TEST.""" CATEGORY = TestCategory.CONTROL PID = 'POWER_ON_SELF_TEST' EXPECTED_FIELD = 'power_on_self_test' PROVIDES = ['power_on_self_test'] class GetPowerOnSelfTestWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get the POWER_ON_SELF_TEST with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'POWER_ON_SELF_TEST' class SetPowerOnSelfTest(TestMixins.SetBoolMixin, OptionalParameterTestFixture): """Set POWER_ON_SELF_TEST.""" CATEGORY = TestCategory.CONTROL PID = 'POWER_ON_SELF_TEST' EXPECTED_FIELD = 'power_on_self_test' REQUIRES = ['power_on_self_test'] def OldValue(self): return self.Property('power_on_self_test') class SetPowerOnSelfTestWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set the POWER_ON_SELF_TEST with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'POWER_ON_SELF_TEST' class AllSubDevicesGetPowerOnSelfTest(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get POWER_ON_SELF_TEST addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'POWER_ON_SELF_TEST' # LOCK_STATE #------------------------------------------------------------------------------ class GetLockState(TestMixins.GetMixin, OptionalParameterTestFixture): """Get LOCK_STATE.""" CATEGORY = TestCategory.CONFIGURATION PID = "LOCK_STATE" PROVIDES = ['current_lock_state', 'number_of_lock_states'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): for key in self.PROVIDES: self.SetProperty(key, None) return self.SetPropertyFromDict(fields, 'current_lock_state') self.SetPropertyFromDict(fields, 'number_of_lock_states') if fields['current_lock_state'] > fields['number_of_lock_states']: self.SetFailed('Lock State %d exceeded number of lock states %d' % (fields['current_lock_state'], fields['number_of_lock_states'])) return class GetLockStateWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get LOCK_STATE with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_STATE' class AllSubDevicesGetLockState(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get LOCK_STATE addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LOCK_STATE' class SetLockStateWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set LOCK_STATE without no data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_STATE' class SetLockState(OptionalParameterTestFixture): """Set LOCK_STATE.""" CATEGORY = TestCategory.CONFIGURATION PID = 'LOCK_STATE' REQUIRES = ['current_lock_state', 'pin_code'] def Test(self): self.lock_state = self.Property('current_lock_state') if self.lock_state is None: self.SetNotRun('Unable to determine pin code') return self.pin = self.Property('pin_code') if self.pin is None: # try setting to a static value, we make old and new the same just on the # off chance this is actually the pin # http://www.datagenetics.com/blog/september32012/ self.AddIfSetSupported([ self.AckSetResult(), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE), ]) self.SendSet(PidStore.ROOT_DEVICE, self.pid, [439, self.lock_state]) else: self.AddIfSetSupported([ self.AckSetResult(action=self.VerifySet), self.NackSetResult( RDMNack.NR_UNSUPPORTED_COMMAND_CLASS, advisory='SET for %s returned unsupported command class' % self.PID), ]) self.SendSet(PidStore.ROOT_DEVICE, self.pid, [self.pin, self.lock_state]) def VerifySet(self): self.AddExpectedResults( self.AckGetResult(field_values={'current_lock_state': self.lock_state})) self.SendGet(PidStore.ROOT_DEVICE, self.pid) # LOCK_STATE_DESCRIPTION #------------------------------------------------------------------------------ class GetLockStateDescription(TestMixins.GetSettingDescriptionsMixin, OptionalParameterTestFixture): """Get LOCK_STATE_DESCRIPTION for all known states.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'LOCK_STATE_DESCRIPTION' REQUIRES = ['number_of_lock_states'] EXPECTED_FIELD = 'lock_state' class GetLockStateDescriptionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get LOCK_STATE_DESCRIPTION with no lock state specified.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_STATE_DESCRIPTION' class GetLockStateDescriptionWithExtraData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get LOCK_STATE_DESCRIPTION with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_STATE_DESCRIPTION' class GetZeroLockStateDescription(TestMixins.GetZeroByteMixin, OptionalParameterTestFixture): """Get LOCK_STATE_DESCRIPTION for lock state 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_STATE_DESCRIPTION' class GetOutOfRangeLockStateDescription(TestMixins.GetOutOfRangeByteMixin, OptionalParameterTestFixture): """Get LOCK_STATE_DESCRIPTION for an out-of-range lock state.""" PID = 'LOCK_STATE_DESCRIPTION' REQUIRES = ['number_of_lock_states'] LABEL = 'lock states' class SetLockStateDescription(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """Set the LOCK_STATE_DESCRIPTION.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_STATE_DESCRIPTION' class AllSubDevicesGetLockStateDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get LOCK_STATE_DESCRIPTION addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LOCK_STATE_DESCRIPTION' DATA = [1] # LOCK_PIN #------------------------------------------------------------------------------ class GetLockPin(OptionalParameterTestFixture): """Get LOCK_PIN.""" CATEGORY = TestCategory.CONFIGURATION PID = 'LOCK_PIN' PROVIDES = ['pin_code'] def Test(self): self.AddIfGetSupported([ self.AckGetResult(field_names=['pin_code']), self.NackGetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendGet(PidStore.ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if response.WasAcked(): self.SetPropertyFromDict(fields, 'pin_code') class GetLockPinWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get LOCK_PIN with data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_PIN' class AllSubDevicesGetLockPin(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get LOCK_PIN addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'LOCK_PIN' class SetLockPinWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set LOCK_PIN with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_PIN' class SetLockPin(OptionalParameterTestFixture): """Set LOCK_PIN.""" CATEGORY = TestCategory.CONFIGURATION PID = 'LOCK_PIN' REQUIRES = ['pin_code'] def Test(self): self.pin = self.Property('pin_code') if self.pin is None: # try setting to a static value, we make old and new the same just on the # off chance this is actually the pin # http://www.datagenetics.com/blog/september32012/ self.AddIfSetSupported([ self.AckSetResult(), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE), ]) self.SendSet(PidStore.ROOT_DEVICE, self.pid, [439, 439]) else: self.AddIfSetSupported([ self.AckSetResult(action=self.VerifySet), self.NackSetResult( RDMNack.NR_UNSUPPORTED_COMMAND_CLASS, advisory='SET for %s returned unsupported command class' % self.PID), ]) self.SendSet(PidStore.ROOT_DEVICE, self.pid, [self.pin, self.pin]) def VerifySet(self): self.AddExpectedResults([ self.AckGetResult(field_values={'pin_code': self.pin}), self.NackGetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendGet(PidStore.ROOT_DEVICE, self.pid) class SetInvalidLockPin(OptionalParameterTestFixture): """Set LOCK_PIN with the wrong pin code.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_PIN' REQUIRES = ['pin_code'] def Test(self): self.pin = self.Property('pin_code') if self.pin is None: self.SetNotRun('Unable to determine pin code') return bad_pin = (self.pin + 1) % 10000 self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendSet(ROOT_DEVICE, self.pid, [0, bad_pin]) class SetOutOfRangeLockPin(OptionalParameterTestFixture): """Set LOCK_PIN with an out-of-range pin code.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'LOCK_PIN' REQUIRES = ['pin_code'] def Test(self): self.pin = self.Property('pin_code') if self.pin is None: self.SetNotRun('Unable to determine pin code') return self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_FORMAT_ERROR)) data = struct.pack('!HH', 10001, self.pin) self.SendRawSet(ROOT_DEVICE, self.pid, data) # BURN_IN #------------------------------------------------------------------------------ class GetBurnIn(TestMixins.GetMixin, OptionalParameterTestFixture): """GET BURN_IN.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'BURN_IN' EXPECTED_FIELD = 'hours_remaining' PROVIDES = ['burn_in_hours'] class GetBurnInWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get BURN_IN with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'BURN_IN' class SetBurnIn(TestMixins.SetUInt8Mixin, OptionalParameterTestFixture): """Set BURN_IN.""" CATEGORY = TestCategory.POWER_LAMP_SETTINGS PID = 'BURN_IN' EXPECTED_FIELD = 'hours_remaining' REQUIRES = ['burn_in_hours'] def OldValue(self): return self.Property('burn_in_hours') def VerifySet(self): new_value = self.NewValue() results = [ self.AckGetResult(field_values={self.EXPECTED_FIELD: self.NewValue()}), ] # Since this is hours remaining, it may be decremented before we can read # it back if new_value: results.append( self.AckGetResult(field_values={self.EXPECTED_FIELD: new_value - 1})) self.AddExpectedResults(results) self.SendGet(ROOT_DEVICE, self.pid) class SetBurnInWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set BURN_IN with no param data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'BURN_IN' class AllSubDevicesGetBurnIn(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get BURN_IN addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'BURN_IN' # DIMMER_INFO #------------------------------------------------------------------------------ class GetDimmerInfo(OptionalParameterTestFixture): """GET dimmer info.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'DIMMER_INFO' REQUIRES = ['supported_parameters'] PROVIDES = ['minimum_level_lower', 'minimum_level_upper', 'maximum_level_lower', 'maximum_level_upper', 'number_curves_supported', 'levels_resolution', 'split_levels_supported'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): for field in self.PROVIDES: self.SetProperty(field, None) return if fields['minimum_level_lower'] > fields['minimum_level_upper']: self.AddAdvisory('minimum_level_lower (%d) > minimum_level_upper (%d)' % (fields['minimum_level_lower'], fields['minimum_level_upper'])) if fields['maximum_level_lower'] > fields['maximum_level_upper']: self.AddAdvisory('maximum_level_lower (%d) > maximum_level_upper (%d)' % (fields['maximum_level_lower'], fields['maximum_level_upper'])) self.SetPropertyFromDict(fields, 'minimum_level_lower') self.SetPropertyFromDict(fields, 'minimum_level_upper') self.SetPropertyFromDict(fields, 'maximum_level_lower') self.SetPropertyFromDict(fields, 'maximum_level_upper') self.SetPropertyFromDict(fields, 'number_curves_supported') self.SetPropertyFromDict(fields, 'levels_resolution') self.SetProperty('split_levels_supported', fields['split_levels_supported']) self.CheckFieldsAreUnsupported( 'MINIMUM_LEVEL', fields, {'minimum_level_lower': 0, 'minimum_level_upper': 0xffff, 'split_levels_supported': 0}) self.CheckFieldsAreUnsupported( 'MAXIMUM_LEVEL', fields, {'maximum_level_lower': 0, 'maximum_level_upper': 0xffff}) def CheckFieldsAreUnsupported(self, pid_name, fields, keys): if self.LookupPid(pid_name).value in self.Property('supported_parameters'): return for key, expected_value in keys.iteritems(): if fields[key] != expected_value: self.AddWarning( "%s isn't supported but %s in DIMMER_INFO was not %hx" % (pid_name, key, expected_value)) class GetDimmerInfoWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get DIMMER_INFO with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DIMMER_INFO' class SetDimmerInfo(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """Set DIMMER_INFO.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'DIMMER_INFO' class AllSubDevicesGetDimmerInfo(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get DIMMER_INFO addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'DIMMER_INFO' # MINIMUM_LEVEL #------------------------------------------------------------------------------ class GetMinimumLevel(TestMixins.GetMixin, OptionalParameterTestFixture): """Get MINIMUM_LEVEL.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "MINIMUM_LEVEL" REQUIRES = ['minimum_level_lower', 'minimum_level_upper', 'split_levels_supported'] PROVIDES = ['minimum_level_settings'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if fields is None: fields = {} self.SetProperty('minimum_level_settings', fields) if not response.WasAcked(): return min_increasing = fields['minimum_level_increasing'] min_decreasing = fields['minimum_level_decreasing'] lower_limit = self.Property('minimum_level_lower') upper_limit = self.Property('minimum_level_upper') if lower_limit is not None and upper_limit is not None: if min_increasing < lower_limit or min_increasing > upper_limit: self.SetFailed( 'minimum_level_increasing is outside the range [%d, %d] from ' 'DIMMER_INFO' % (lower_limit, upper_limit)) return if min_decreasing < lower_limit or min_decreasing > upper_limit: self.SetFailed( 'minimum_level_decreasing is outside the range [%d, %d] from ' 'DIMMER_INFO' % (lower_limit, upper_limit)) return split_supported = self.Property('split_levels_supported') if split_supported is not None: if not split_supported and min_increasing != min_decreasing: self.SetFailed( 'Split min levels not supported but min level increasing (%d)' ' != min level decreasing (%d)' % (min_increasing, min_decreasing)) class GetMinimumLevelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get MINIMUM_LEVEL with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MINIMUM_LEVEL' class AllSubDevicesGetMinimumLevel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get MINIMUM_LEVEL addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'MINIMUM_LEVEL' class SetMinimumLevel(OptionalParameterTestFixture): """Set MINIMUM_LEVEL without changing the settings.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'MINIMUM_LEVEL' REQUIRES = ['minimum_level_settings'] PROVIDES = ['set_minimum_level_supported'] def Test(self): settings = self.Property('minimum_level_settings') if not settings: self.SetNotRun('Unable to determine current MINIMUM_LEVEL settings') return self.AddIfSetSupported([ self.AckSetResult(), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendSet( ROOT_DEVICE, self.pid, [settings['minimum_level_increasing'], settings['minimum_level_decreasing'], settings['on_below_minimum']]) def VerifyResult(self, response, fields): self.SetProperty('set_minimum_level_supported', response.WasAcked()) class SetLowerIncreasingMiniumLevel(TestMixins.SetMinimumLevelMixin, OptionalParameterTestFixture): """Set MINIMUM_LEVEL to the smallest value from DIMMER_INFO.""" REQUIRES = TestMixins.SetMinimumLevelMixin.REQUIRES + ['minimum_level_lower'] def MinLevelIncreasing(self): return self.Property('minimum_level_lower') class SetUpperIncreasingMiniumLevel(TestMixins.SetMinimumLevelMixin, OptionalParameterTestFixture): """Set MINIMUM_LEVEL to the largest value from DIMMER_INFO.""" REQUIRES = TestMixins.SetMinimumLevelMixin.REQUIRES + ['minimum_level_upper'] def MinLevelIncreasing(self): return self.Property('minimum_level_upper') class SetOutOfRangeLowerIncreasingMiniumLevel(TestMixins.SetMinimumLevelMixin, OptionalParameterTestFixture): """Set MINIMUM_LEVEL to one less than the smallest value from DIMMER_INFO.""" REQUIRES = TestMixins.SetMinimumLevelMixin.REQUIRES + ['minimum_level_lower'] def ExpectedResults(self): return self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE) def ShouldSkip(self): self.lower = self.Property('minimum_level_lower') if self.lower == 0: self.SetNotRun('All values supported') return True return False def MinLevelIncreasing(self): return self.lower - 1 class SetOutOfRangeUpperIncreasingMiniumLevel(TestMixins.SetMinimumLevelMixin, OptionalParameterTestFixture): """Set MINIMUM_LEVEL to one more than the largest value from DIMMER_INFO.""" REQUIRES = TestMixins.SetMinimumLevelMixin.REQUIRES + ['minimum_level_upper'] def ExpectedResults(self): return self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE) def ShouldSkip(self): self.upper = self.Property('minimum_level_upper') if self.upper == 0xfffe: self.SetNotRun('All values supported') return True return False def MinLevelIncreasing(self): return self.upper + 1 # MAXIMUM_LEVEL #------------------------------------------------------------------------------ class GetMaximumLevel(TestMixins.GetMixin, OptionalParameterTestFixture): """Get MAXIMUM_LEVEL.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "MAXIMUM_LEVEL" PROVIDES = ['maximum_level'] EXPECTED_FIELD = 'maximum_level' class GetMaximumLevelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get MAXIMUM_LEVEL with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MAXIMUM_LEVEL' class SetMaximumLevel(OptionalParameterTestFixture): """Set MAXIMUM_LEVEL without changing the settings.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'MAXIMUM_LEVEL' REQUIRES = ['maximum_level'] PROVIDES = ['set_maximum_level_supported'] def Test(self): current_value = self.Property('maximum_level') if current_value is None: current_value = 0xffff self.AddIfSetSupported([ self.AckSetResult(), self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS), ]) self.SendSet(ROOT_DEVICE, self.pid, [current_value]) def VerifyResult(self, response, fields): self.SetProperty('set_maximum_level_supported', response.WasAcked()) class SetLowerMaximumLevel(TestMixins.SetMaximumLevelMixin, OptionalParameterTestFixture): """Set MAXIMUM_LEVEL to the smallest value from DIMMER_INFO.""" REQUIRES = TestMixins.SetMaximumLevelMixin.REQUIRES + ['maximum_level_lower'] def Test(self): self.value = self.Property('maximum_level_lower') if self.value is None: self.SetNotRun('No lower maximum level from DIMMER_INFO') return if self.Property('set_maximum_level_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetMaxLevel)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [self.value]) def GetMaxLevel(self): self.AddIfGetSupported(self.AckGetResult( field_values={'maximum_level': self.value})) self.SendGet(ROOT_DEVICE, self.pid) class SetUpperMaximumLevel(TestMixins.SetMaximumLevelMixin, OptionalParameterTestFixture): """Set MAXIMUM_LEVEL to the largest value from DIMMER_INFO.""" REQUIRES = TestMixins.SetMaximumLevelMixin.REQUIRES + ['maximum_level_upper'] def Test(self): self.value = self.Property('maximum_level_upper') if self.value is None: self.SetNotRun('No upper maximum level from DIMMER_INFO') return if self.Property('set_maximum_level_supported'): self.AddIfSetSupported(self.AckSetResult(action=self.GetMaxLevel)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [self.value]) def GetMaxLevel(self): self.AddIfGetSupported(self.AckGetResult( field_values={'maximum_level': self.value})) self.SendGet(ROOT_DEVICE, self.pid) class SetLowerOutOfRangeMaximumLevel(OptionalParameterTestFixture): """Set MAXIMUM_LEVEL a value smaller than the minimum.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'MAXIMUM_LEVEL' REQUIRES = ['maximum_level_lower', 'set_maximum_level_supported'] def Test(self): self.value = self.Property('maximum_level_lower') if self.value is None: self.SetNotRun('No lower maximum level from DIMMER_INFO') return if self.value == 0: self.SetNotRun('Range for maximum level begins at 0') return self.value -= 1 if self.Property('set_maximum_level_supported'): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [self.value]) class SetUpperOutOfRangeMaximumLevel(OptionalParameterTestFixture): """Set MAXIMUM_LEVEL a value larger than the maximum.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'MAXIMUM_LEVEL' REQUIRES = ['maximum_level_upper', 'set_maximum_level_supported'] def Test(self): self.value = self.Property('maximum_level_upper') if self.value is None: self.SetNotRun('No upper maximum level from DIMMER_INFO') return if self.value == 0xffff: self.SetNotRun('Range for maximum level ends at 0xffff') return self.value += 1 if self.Property('set_maximum_level_supported'): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) else: self.AddIfSetSupported( self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS)) self.SendSet(ROOT_DEVICE, self.pid, [self.value]) class AllSubDevicesGetMaximumLevel(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get MAXIMUM_LEVEL addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'MAXIMUM_LEVEL' # CURVE #------------------------------------------------------------------------------ class GetCurve(TestMixins.GetMixin, OptionalParameterTestFixture): """Get CURVE.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "CURVE" REQUIRES = ['number_curves_supported'] PROVIDES = ['current_curve', 'number_curves'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): for key in self.PROVIDES: self.SetProperty(key, None) return self.SetPropertyFromDict(fields, 'current_curve') self.SetPropertyFromDict(fields, 'number_curves') if fields['current_curve'] == 0: self.SetFailed('Curves must be numbered from 1') return if fields['current_curve'] > fields['number_curves']: self.SetFailed('Curve %d exceeded number of curves %d' % (fields['current_curve'], fields['number_curves'])) return expected_curves = self.Property('number_curves_supported') if expected_curves is not None: if expected_curves != fields['number_curves']: self.AddWarning( 'The number of curves reported in DIMMER_INFO (%d) does not ' 'match the number from CURVE (%d)' % (expected_curves, fields['number_curves'])) class GetCurveWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get CURVE with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CURVE' class SetCurve(OptionalParameterTestFixture): """Set CURVE.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "CURVE" REQUIRES = ['current_curve', 'number_curves'] def Test(self): curves = self.Property('number_curves') if curves: self.curves = [i + 1 for i in xrange(curves)] self._SetCurve() else: # check we get a NR_UNKNOWN_PID self.AddExpectedResults(self.NackSetResult(RDMNack.NR_UNKNOWN_PID)) self.curve = 1 # can use anything here really self.SendSet(ROOT_DEVICE, self.pid, [1]) def _SetCurve(self): if not self.curves: # end of the list, we're done self.Stop() return self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid, [self.curves[0]]) def VerifySet(self): self.AddIfGetSupported( self.AckGetResult( field_values={'current_curve': self.curves[0]}, action=self.NextCurve)) self.SendGet(ROOT_DEVICE, self.pid) def NextCurve(self): self.curves = self.curves[1:] self._SetCurve() def ResetState(self): if not self.PidSupported() or not self.Property('current_curve'): return self.SendSet(ROOT_DEVICE, self.pid, [self.Property('current_curve')]) self._wrapper.Run() class SetZeroCurve(TestMixins.SetZeroByteMixin, OptionalParameterTestFixture): """Set CURVE to 0.""" PID = 'CURVE' class SetOutOfRangeCurve(TestMixins.SetOutOfRangeByteMixin, OptionalParameterTestFixture): """Set CURVE to an out-of-range value.""" PID = 'CURVE' REQUIRES = ['number_curves'] LABEL = 'curves' class SetCurveWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set CURVE without any data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CURVE' class AllSubDevicesGetCurve(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get CURVE addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'CURVE' # CURVE_DESCRIPTION #------------------------------------------------------------------------------ class GetCurveDescription(TestMixins.GetSettingDescriptionsMixin, OptionalParameterTestFixture): """Get the CURVE_DESCRIPTION for all known curves.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'CURVE_DESCRIPTION' REQUIRES = ['number_curves'] EXPECTED_FIELD = 'curve_number' class GetCurveDescriptionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get CURVE_DESCRIPTION with no curve number specified.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CURVE_DESCRIPTION' class GetCurveDescriptionWithExtraData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get CURVE_DESCRIPTION with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CURVE_DESCRIPTION' class GetZeroCurveDescription(TestMixins.GetZeroByteMixin, OptionalParameterTestFixture): """Get CURVE_DESCRIPTION for curve 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CURVE_DESCRIPTION' class GetOutOfRangeCurveDescription(TestMixins.GetOutOfRangeByteMixin, OptionalParameterTestFixture): """Get CURVE_DESCRIPTION for an out-of-range curve.""" PID = 'CURVE_DESCRIPTION' REQUIRES = ['number_curves'] LABEL = 'curves' class SetCurveDescription(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """Set the CURVE_DESCRIPTION.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'CURVE_DESCRIPTION' class AllSubDevicesGetCurveDescription(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get CURVE_DESCRIPTION addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'CURVE_DESCRIPTION' DATA = [1] # OUTPUT_RESPONSE_TIME #------------------------------------------------------------------------------ class GetOutputResponseTime(TestMixins.GetMixin, OptionalParameterTestFixture): """Get OUTPUT_RESPONSE_TIME.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "OUTPUT_RESPONSE_TIME" PROVIDES = ['current_response_time', 'number_response_options'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): for key in self.PROVIDES: self.SetProperty(key, None) return self.SetPropertyFromDict(fields, 'current_response_time') self.SetPropertyFromDict(fields, 'number_response_options') if fields['current_response_time'] == 0: self.SetFailed('Output response times must be numbered from 1') return if fields['current_response_time'] > fields['number_response_options']: self.SetFailed( 'Output response time %d exceeded number of response times %d' % (fields['current_response_time'], fields['number_response_options'])) return class GetOutputResponseTimeWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get OUTPUT_RESPONSE_TIME with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'OUTPUT_RESPONSE_TIME' class SetOutputResponseTime(OptionalParameterTestFixture): """Set OUTPUT_RESPONSE_TIME.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "OUTPUT_RESPONSE_TIME" REQUIRES = ['current_response_time', 'number_response_options'] def Test(self): times = self.Property('number_response_options') if times: self.output_response_times = [i + 1 for i in xrange(times)] self._SetOutputResponseTime() else: # check we get a NR_UNKNOWN_PID self.AddExpectedResults(self.NackSetResult(RDMNack.NR_UNKNOWN_PID)) self.current_response_time = 1 # can use anything here really self.SendSet(ROOT_DEVICE, self.pid, [1]) def _SetOutputResponseTime(self): if not self.output_response_times: # end of the list, we're done self.Stop() return self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid, [self.output_response_times[0]]) def VerifySet(self): self.AddIfGetSupported( self.AckGetResult( field_values={'current_response_time': self.output_response_times[0]}, action=self.NextOutputResponseTime)) self.SendGet(ROOT_DEVICE, self.pid) def NextOutputResponseTime(self): self.output_response_times = self.output_response_times[1:] self._SetOutputResponseTime() def ResetState(self): if not self.PidSupported() or not self.Property('current_response_time'): return self.SendSet(ROOT_DEVICE, self.pid, [self.Property('current_response_time')]) self._wrapper.Run() class SetZeroOutputResponseTime(TestMixins.SetZeroByteMixin, OptionalParameterTestFixture): """Set OUTPUT_RESPONSE_TIME to 0.""" PID = 'OUTPUT_RESPONSE_TIME' class SetOutOfRangeOutputResponseTime(TestMixins.SetOutOfRangeByteMixin, OptionalParameterTestFixture): """Set OUTPUT_RESPONSE_TIME to an out-of-range value.""" PID = 'OUTPUT_RESPONSE_TIME' REQUIRES = ['number_response_options'] LABEL = 'output response times' class SetOutputResponseTimeWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set OUTPUT_RESPONSE_TIME without any data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'OUTPUT_RESPONSE_TIME' class AllSubDevicesGetOutputResponseTime(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get OUTPUT_RESPONSE_TIME addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'OUTPUT_RESPONSE_TIME' # OUTPUT_RESPONSE_TIME_DESCRIPTION #------------------------------------------------------------------------------ class GetOutputResponseTimeDescription(TestMixins.GetSettingDescriptionsMixin, OptionalParameterTestFixture): """Get the OUTPUT_RESPONSE_TIME_DESCRIPTION for all response times.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' REQUIRES = ['number_response_options'] EXPECTED_FIELD = 'response_time' class GetOutputResponseTimeDescriptionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get OUTPUT_RESPONSE_TIME_DESCRIPTION with no response time number.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' class GetOutputResponseTimeDescriptionWithExtraData( TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get OUTPUT_RESPONSE_TIME_DESCRIPTION with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' class GetZeroOutputResponseTimeDescription(TestMixins.GetZeroByteMixin, OptionalParameterTestFixture): """Get OUTPUT_RESPONSE_TIME_DESCRIPTION for response_time 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' class GetOutOfRangeOutputResponseTimeDescription( TestMixins.GetOutOfRangeByteMixin, OptionalParameterTestFixture): """Get OUTPUT_RESPONSE_TIME_DESCRIPTION for an out-of-range response time.""" PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' REQUIRES = ['number_response_options'] LABEL = 'response times' class SetOutputResponseTimeDescription(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """SET OUTPUT_RESPONSE_TIME_DESCRIPTION.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' class AllSubDevicesGetOutputResponseTimeDescription( TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get OUTPUT_RESPONSE_TIME_DESCRIPTION addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'OUTPUT_RESPONSE_TIME_DESCRIPTION' DATA = [1] # MODULATION_FREQUENCY #------------------------------------------------------------------------------ class GetModulationFrequency(TestMixins.GetMixin, OptionalParameterTestFixture): """Get MODULATION_FREQUENCY.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "MODULATION_FREQUENCY" PROVIDES = ['current_modulation_frequency', 'number_modulation_frequencies'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): for key in self.PROVIDES: self.SetProperty(key, None) return self.SetPropertyFromDict(fields, 'current_modulation_frequency') self.SetPropertyFromDict(fields, 'number_modulation_frequencies') if fields['current_modulation_frequency'] == 0: self.SetFailed('Modulation frequency must be numbered from 1') return if fields['current_modulation_frequency'] > fields['number_modulation_frequencies']: self.SetFailed( 'Modulation frequency %d exceeded number of modulation frequencies %d' % (fields['current_modulation_frequency'], fields['number_modulation_frequencies'])) return class GetModulationFrequencyWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get MODULATION_FREQUENCY with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MODULATION_FREQUENCY' class SetModulationFrequency(OptionalParameterTestFixture): """Set MODULATION_FREQUENCY.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = "MODULATION_FREQUENCY" REQUIRES = ['current_modulation_frequency', 'number_modulation_frequencies'] def Test(self): items = self.Property('number_modulation_frequencies') if items: self.frequencies = [i + 1 for i in xrange(items)] self._SetModulationFrequency() else: # check we get a NR_UNKNOWN_PID self.AddExpectedResults(self.NackSetResult(RDMNack.NR_UNKNOWN_PID)) self.frequency = 1 # can use anything here really self.SendSet(ROOT_DEVICE, self.pid, [1]) def _SetModulationFrequency(self): if not self.frequencies: # end of the list, we're done self.Stop() return self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid, [self.frequencies[0]]) def VerifySet(self): self.AddIfGetSupported( self.AckGetResult( field_values={'current_modulation_frequency': self.frequencies[0]}, action=self.NextModulationFrequency)) self.SendGet(ROOT_DEVICE, self.pid) def NextModulationFrequency(self): self.frequencies = self.frequencies[1:] self._SetModulationFrequency() def ResetState(self): if not self.PidSupported() or not self.Property('current_modulation_frequency'): return self.SendSet(ROOT_DEVICE, self.pid, [self.Property('current_modulation_frequency')]) self._wrapper.Run() class SetZeroModulationFrequency(TestMixins.SetZeroByteMixin, OptionalParameterTestFixture): """Set MODULATION_FREQUENCY with a frequency setting of 0.""" PID = 'MODULATION_FREQUENCY' class SetOutOfRangeModulationFrequency(TestMixins.SetOutOfRangeByteMixin, OptionalParameterTestFixture): """Set MODULATION_FREQUENCY to an out-of-range value.""" PID = 'MODULATION_FREQUENCY' REQUIRES = ['number_modulation_frequencies'] LABEL = 'modulation frequencies' class SetModulationFrequencyWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set MODULATION_FREQUENCY without any data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MODULATION_FREQUENCY' class AllSubDevicesGetModulationFrequency(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get MODULATION_FREQUENCY addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'MODULATION_FREQUENCY' # MODULATION_FREQUENCY_DESCRIPTION #------------------------------------------------------------------------------ class GetModulationFrequencyDescription(TestMixins.GetSettingDescriptionsMixin, OptionalParameterTestFixture): """Get the MODULATION_FREQUENCY_DESCRIPTION for all frequencies.""" CATEGORY = TestCategory.DIMMER_SETTINGS PID = 'MODULATION_FREQUENCY_DESCRIPTION' REQUIRES = ['number_modulation_frequencies'] EXPECTED_FIELD = 'modulation_frequency' class GetModulationFrequencyDescriptionWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get MODULATION_FREQUENCY_DESCRIPTION with no frequency number.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MODULATION_FREQUENCY_DESCRIPTION' class GetModulationFrequencyDescriptionWithExtraData( TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get MODULATION_FREQUENCY_DESCRIPTION with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MODULATION_FREQUENCY_DESCRIPTION' class GetZeroModulationFrequencyDescription(TestMixins.GetZeroByteMixin, OptionalParameterTestFixture): """Get MODULATION_FREQUENCY_DESCRIPTION for frequency 0.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MODULATION_FREQUENCY_DESCRIPTION' class GetOutOfRangeModulationFrequencyDescription( TestMixins.GetOutOfRangeByteMixin, OptionalParameterTestFixture): """Get MODULATION_FREQUENCY_DESCRIPTION for an out-of-range frequency.""" PID = 'MODULATION_FREQUENCY_DESCRIPTION' REQUIRES = ['number_modulation_frequencies'] LABEL = 'modulation frequencies' class SetModulationFrequencyDescription(TestMixins.UnsupportedSetMixin, ResponderTestFixture): """SET MODULATION_FREQUENCY_DESCRIPTION.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'MODULATION_FREQUENCY_DESCRIPTION' class AllSubDevicesGetModulationFrequencyDescription( TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get MODULATION_FREQUENCY_DESCRIPTION addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'MODULATION_FREQUENCY_DESCRIPTION' DATA = [1] # PRESET_INFO #------------------------------------------------------------------------------ class GetPresetInfo(TestMixins.GetMixin, OptionalParameterTestFixture): """Get PRESET_INFO.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_INFO' PROVIDES = ['preset_info', 'max_scene_number'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): self.SetProperty('preset_info', {}) self.SetProperty('max_scene_number', None) return self.CheckBounds(fields, 'preset_fade_time') self.CheckBounds(fields, 'preset_wait_time') self.CheckBounds(fields, 'fail_delay_time') self.CheckBounds(fields, 'fail_hold_time') self.CheckBounds(fields, 'startup_delay_time') self.CheckBounds(fields, 'startup_hold_time') if fields['max_scene_number'] == 0xffff: self.AddWarning('PRESET_INFO had max_scene_number of 0xffff') self.CrossCheckPidSupportIsZero('DMX_FAIL_MODE', fields, 'fail_infinite_hold_supported') self.CrossCheckPidSupportIsZero('DMX_FAIL_MODE', fields, 'fail_infinite_delay_supported') self.CrossCheckPidSupportIsZero('DMX_STARTUP_MODE', fields, 'startup_infinite_hold_supported') self.CrossCheckPidSupportIsMax('DMX_FAIL_MODE', fields, 'fail_delay_time') self.CrossCheckPidSupportIsMax('DMX_FAIL_MODE', fields, 'fail_hold_time') self.CrossCheckPidSupportIsMax('DMX_STARTUP_MODE', fields, 'startup_delay_time') self.CrossCheckPidSupportIsMax('DMX_STARTUP_MODE', fields, 'startup_hold_time') self.SetProperty('preset_info', fields) self.SetProperty('max_scene_number', fields['max_scene_number']) def CrossCheckPidSupportIsZero(self, pid_name, fields, key): if not (self.IsSupported(pid_name) or fields[key] == False): self.AddWarning('%s not supported, but %s in PRESET_INFO is non-0' % (pid_name, key)) def CrossCheckPidSupportIsMax(self, pid_name, fields, key): for key in ['min_%s' % key, 'max_%s' % key]: if not (self.IsSupported(pid_name) or fields[key] == 0xffff): self.AddWarning( '%s not supported, but %s in PRESET_INFO is not 0xffff' % (pid_name, key)) def IsSupported(self, pid_name): pid = self.LookupPid(pid_name) return pid.value in self.Property('supported_parameters') def CheckBounds(self, fields, key): min_key = 'min_%s' % key max_key = 'max_%s' % key if fields[min_key] > fields[max_key]: self.AddAdvisory('%s (%d) > %s (%d)' % (min_key, fields[min_key], max_key, fields[max_key])) class GetPresetInfoWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get PRESET_INFO with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_INFO' class SetPresetInfo(ResponderTestFixture, DeviceInfoTest): """Set PRESET_INFO.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_INFO' def Test(self): self.AddExpectedResults(TestMixins.UnsupportedSetNacks(self.pid)) self.SendRawSet(ROOT_DEVICE, self.pid) class AllSubDevicesGetPresetInfo(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get PRESET_INFO addressed 0to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PRESET_INFO' # PRESET_STATUS #------------------------------------------------------------------------------ class GetPresetStatusPresetOff(OptionalParameterTestFixture): """Get the PRESET_STATUS for PRESET_PLAYBACK_OFF.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = struct.pack('!H', 0) self.SendRawGet(ROOT_DEVICE, self.pid, data) class GetPresetStatusPresetScene(OptionalParameterTestFixture): """Get the PRESET_STATUS for PRESET_PLAYBACK_SCENE.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' def Test(self): self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = struct.pack('!H', 0xffff) self.SendRawGet(ROOT_DEVICE, self.pid, data) class GetOutOfRangePresetStatus(OptionalParameterTestFixture): """Get the PRESET_STATUS for max_scene + 1.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_STATUS' REQUIRES = ['max_scene_number'] def Test(self): max_scene = self.Property('max_scene_number') if max_scene is None or max_scene == 0xfffe: self.SetNotRun('Device supports all scenes') return self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) self.SendGet(ROOT_DEVICE, self.pid, [max_scene + 1]) class GetPresetStatus(OptionalParameterTestFixture): """Get the PRESET_STATUS for all scenes.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_STATUS' REQUIRES = ['max_scene_number', 'preset_info'] PROVIDES = ['scene_writable_states'] NOT_PROGRAMMED = 0 PROGRAMMED = 1 READ_ONLY = 2 def Test(self): self.scene_writable_states = {} self.index = 0 self.max_scene = self.Property('max_scene_number') preset_info = self.Property('preset_info') self.min_fade = preset_info.get('min_preset_fade_time', 0) self.max_fade = preset_info.get('max_preset_fade_time', 0xffff) self.min_wait = preset_info.get('min_preset_wait_time', 0) self.max_wait = preset_info.get('max_preset_wait_time', 0xffff) if self.max_scene is None or self.max_scene == 0: self.SetNotRun('No scenes supported') return self.FetchNextScene() def FetchNextScene(self): self.index += 1 if self.index > self.max_scene: self.SetProperty('scene_writable_states', self.scene_writable_states) self.Stop() return self.AddIfGetSupported(self.AckGetResult(action=self.FetchNextScene)) self.SendGet(ROOT_DEVICE, self.pid, [self.index]) def VerifyResult(self, response, fields): if not response.WasAcked(): return if fields['scene_number'] != self.index: self.SetFailed('Scene number mismatch, expected %d, got %d' % (self.index, fields['scene_number'])) self.Stop() if fields['programmed'] == self.NOT_PROGRAMMED: # assume that NOT_PROGRAMMED means that it's writable. self.scene_writable_states[self.index] = True self.CheckFieldIsZero(fields, 'down_fade_time') self.CheckFieldIsZero(fields, 'up_fade_time') self.CheckFieldIsZero(fields, 'wait_time') return elif fields['programmed'] == self.READ_ONLY: self.scene_writable_states[self.index] = False else: self.scene_writable_states[self.index] = True for key in ['up_fade_time', 'down_fade_time']: self.CheckFieldIsBetween(fields, key, self.min_fade, self.max_fade) self.CheckFieldIsBetween(fields, 'wait_time', self.min_wait, self.max_wait) def CheckFieldIsZero(self, fields, key): if fields[key] != 0: self.AddWarning( '%s for scene %d was not zero, value is %d' % (key, self.index, fields[key])) def CheckFieldIsBetween(self, fields, key, min_value, max_value): if fields[key] < min_value: self.AddWarning( '%s for scene %d (%d s) is less than the min of %s' % (key, self.index, fields[key], min_value)) if fields[key] > max_value: self.AddWarning( '%s for scene %d (%d s) is more than the min of %s' % (key, self.index, fields[key], max_value)) class GetPresetStatusWithNoData(TestMixins.GetWithNoDataMixin, OptionalParameterTestFixture): """Get the PRESET_STATUS with no preset number specified.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' class SetPresetStatusWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set PRESET_STATUS without any data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' class SetPresetStatusPresetOff(TestMixins.SetPresetStatusMixin, OptionalParameterTestFixture): """Set the PRESET_STATUS for PRESET_PLAYBACK_OFF.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' def Test(self): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = self.BuildPresetStatus(0) self.SendRawSet(ROOT_DEVICE, self.pid, data) class SetPresetStatusPresetScene(TestMixins.SetPresetStatusMixin, OptionalParameterTestFixture): """Set the PRESET_STATUS for PRESET_PLAYBACK_SCENE.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' def Test(self): self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = self.BuildPresetStatus(0xffff) self.SendRawSet(ROOT_DEVICE, self.pid, data) class SetOutOfRangePresetStatus(TestMixins.SetPresetStatusMixin, OptionalParameterTestFixture): """Set the PRESET_STATUS for max_scene + 1.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_STATUS' REQUIRES = ['max_scene_number', 'preset_info'] def Test(self): max_scene = self.Property('max_scene_number') if max_scene is None or max_scene == 0xfffe: self.SetNotRun('Device supports all scenes') return self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE)) data = self.BuildPresetStatus(max_scene + 1) self.SendRawSet(ROOT_DEVICE, self.pid, data) class ClearReadOnlyPresetStatus(OptionalParameterTestFixture): """Attempt to clear a read only preset.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_STATUS' REQUIRES = ['scene_writable_states', 'preset_info'] def Test(self): self.scene = None scene_writable_states = self.Property('scene_writable_states') if scene_writable_states is not None: for scene_number, is_writeable in scene_writable_states.iteritems(): if not is_writeable: self.scene = scene_number break if self.scene is None: self.SetNotRun('No read-only scenes found') return preset_info = self.Property('preset_info') fade_time = 0 wait_time = 0 if preset_info: fade_time = preset_info['min_preset_fade_time'] wait_time = preset_info['min_preset_wait_time'] # don't use AddIfSetSupported here, because we don't want to log an # advisory for NR_WRITE_PROTECT if self.PidSupported(): results = self.NackSetResult(RDMNack.NR_WRITE_PROTECT) else: results = self.NackSetResult(RDMNack.NR_UNKNOWN_PID) self.AddExpectedResults(results) self.SendSet(ROOT_DEVICE, self.pid, [self.scene, fade_time, fade_time, wait_time, True]) class SetPresetStatus(OptionalParameterTestFixture): """Set the PRESET_STATUS.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_STATUS' REQUIRES = ['scene_writable_states', 'preset_info'] def Test(self): self.scene = None scene_writable_states = self.Property('scene_writable_states') if scene_writable_states is not None: for scene_number, is_writeable in scene_writable_states.iteritems(): if is_writeable: self.scene = scene_number break if self.scene is None: self.SetNotRun('No writeable scenes found') return self.max_fade = 0xffff self.max_wait = 0xffff preset_info = self.Property('preset_info') if preset_info is not None: self.max_fade = round(preset_info['max_preset_fade_time'], 1) self.max_wait = round(preset_info['max_preset_wait_time'], 1) self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) self.SendSet(ROOT_DEVICE, self.pid, [self.scene, self.max_fade, self.max_fade, self.max_wait, False]) def VerifySet(self): self.AddExpectedResults(self.AckGetResult(field_values={ 'up_fade_time': self.max_fade, 'wait_time': self.max_wait, 'scene_number': self.scene, 'down_fade_time': self.max_fade, })) self.SendGet(ROOT_DEVICE, self.pid, [self.scene]) class ClearPresetStatus(OptionalParameterTestFixture): """Set the PRESET_STATUS with clear preset = 1""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_STATUS' REQUIRES = ['scene_writable_states', 'preset_info'] def Test(self): self.scene = None scene_writable_states = self.Property('scene_writable_states') if scene_writable_states is not None: for scene_number, is_writeable in scene_writable_states.iteritems(): if is_writeable: self.scene = scene_number break if self.scene is None: self.SetNotRun('No writeable scenes found') return self.AddIfSetSupported(self.AckSetResult(action=self.VerifySet)) # we use made up values here to check that the device doesn't use them self.SendSet(ROOT_DEVICE, self.pid, [self.scene, 10, 10, 20, True]) def VerifySet(self): self.AddExpectedResults(self.AckGetResult(field_values={ 'up_fade_time': 0.0, 'wait_time': 0.0, 'scene_number': self.scene, 'programmed': 0, 'down_fade_time': 0.0, })) self.SendGet(ROOT_DEVICE, self.pid, [self.scene]) class AllSubDevicesGetPresetStatus(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get PRESET_STATUS addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PRESET_STATUS' DATA = [1] # PRESET_MERGEMODE #------------------------------------------------------------------------------ class GetPresetMergeMode(TestMixins.GetMixin, OptionalParameterTestFixture): """Get PRESET_MERGEMODE with extra data.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_MERGEMODE' PROVIDES = ['preset_mergemode'] def Test(self): self.AddIfGetSupported(self.AckGetResult()) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if not response.WasAcked(): self.SetProperty('preset_mergemode', None) return self.SetProperty('preset_mergemode', fields['merge_mode']) class GetPresetMergeModeWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): """Get PRESET_MERGEMODE with extra data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_MERGEMODE' class SetPresetMergeMode(OptionalParameterTestFixture): """Set PRESET_MERGEMODE.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_MERGEMODE' REQUIRES = ['preset_mergemode'] PROVIDES = ['set_preset_mergemode_supported'] def Test(self): self.value = self.Property('preset_mergemode') if self.value is None: self.value = 0 self.in_set = True self.AddIfSetSupported([ self.AckSetResult(action=self.VerifySet), self.NackSetResult( RDMNack.NR_UNSUPPORTED_COMMAND_CLASS, advisory='SET for %s returned unsupported command class' % self.PID), ]) self.SendSet(ROOT_DEVICE, self.pid, [self.value]) def VerifySet(self): self.AddExpectedResults( self.AckGetResult(field_values={'merge_mode': self.value})) self.SendGet(ROOT_DEVICE, self.pid) def VerifyResult(self, response, fields): if self.in_set: self.SetProperty(self.PROVIDES[0], response.WasAcked()) self.in_set = False class SetAllPresetMergeModes(OptionalParameterTestFixture): """Set PRESET_MERGEMODE to each of the defined values.""" CATEGORY = TestCategory.CONTROL PID = 'PRESET_MERGEMODE' REQUIRES = ['preset_mergemode', 'set_preset_mergemode_supported'] MODES = [0, 1, 2, 3, 0xff] def Test(self): if not self.Property('set_preset_mergemode_supported'): self.SetNotRun('SET PRESET_MERGEMODE not supported') return self.old_value = self.Property('preset_mergemode') self.merge_modes = [m for m in self.MODES if m != self.old_value] # PerformSet pop's the last value, so we add a dummy value to the end of # the list. self.merge_modes.append(self.old_value) self.PerformSet() def PerformSet(self): self.merge_modes.pop() if not self.merge_modes: self.Stop() return self.AddIfSetSupported([ self.AckSetResult(action=self.VerifySet), self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE, action=self.PerformSet), ]) self.SendSet(ROOT_DEVICE, self.pid, [self.merge_modes[-1]]) def VerifySet(self): self.AddExpectedResults( self.AckGetResult(field_values={'merge_mode': self.merge_modes[-1]}, action=self.PerformSet)) self.SendGet(ROOT_DEVICE, self.pid) def ResetState(self): self.AddExpectedResults(self.AckSetResult()) self.SendSet(ROOT_DEVICE, self.pid, [self.old_value]) self._wrapper.Run() class SetPresetMergeModeWithNoData(TestMixins.SetWithNoDataMixin, OptionalParameterTestFixture): """Set PRESET_MERGEMODE without any data.""" CATEGORY = TestCategory.ERROR_CONDITIONS PID = 'PRESET_MERGEMODE' class AllSubDevicesGetPresetMergeMode(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): """Get PRESET_MERGEMODE addressed to ALL_SUB_DEVICES.""" CATEGORY = TestCategory.SUB_DEVICES PID = 'PRESET_MERGEMODE' # Cross check the control fields with various other properties #------------------------------------------------------------------------------ class SubDeviceControlField(TestFixture): """Check that the sub device control field is correct.""" CATEGORY = TestCategory.CORE REQUIRES = ['mute_control_fields', 'sub_device_count'] def Test(self): sub_device_field = self.Property('mute_control_fields') & 0x02 if self.Property('sub_device_count') > 0: if sub_device_field == 0: self.SetFailed('Sub devices reported but control field not set') return else: if sub_device_field: self.SetFailed('No Sub devices reported but control field is set') return self.SetPassed() class ProxiedDevicesControlField(TestFixture): """Check that the proxied devices control field is correct.""" CATEGORY = TestCategory.CORE REQUIRES = ['mute_control_fields', 'supported_parameters'] def Test(self): proxied_devices_pid = self.LookupPid('PROXIED_DEVICES') supports_proxied_devices_pid = ( proxied_devices_pid.value in self.Property('supported_parameters')) managed_proxy_field = self.Property('mute_control_fields') & 0x01 if supports_proxied_devices_pid and managed_proxy_field == 0: self.AddWarning( "Support for PROXIED_DEVICES declared but the managed " "proxy control field isn't set") return elif not supports_proxied_devices_pid and managed_proxy_field == 1: self.SetFailed( "Managed proxy control bit is set, but proxied devices isn't " "supported") return self.SetPassed()
lgpl-2.1
antepsis/anteplahmacun
sympy/matrices/expressions/tests/test_indexing.py
85
2177
from sympy import (symbols, MatrixSymbol, MatPow, BlockMatrix, Identity, ZeroMatrix, ImmutableMatrix, eye, Sum) from sympy.utilities.pytest import raises k, l, m, n = symbols('k l m n', integer=True) i, j = symbols('i j', integer=True) W = MatrixSymbol('W', k, l) X = MatrixSymbol('X', l, m) Y = MatrixSymbol('Y', l, m) Z = MatrixSymbol('Z', m, n) A = MatrixSymbol('A', 2, 2) B = MatrixSymbol('B', 2, 2) x = MatrixSymbol('x', 1, 2) y = MatrixSymbol('x', 2, 1) def test_symbolic_indexing(): x12 = X[1, 2] assert all(s in str(x12) for s in ['1', '2', X.name]) # We don't care about the exact form of this. We do want to make sure # that all of these features are present def test_add_index(): assert (X + Y)[i, j] == X[i, j] + Y[i, j] def test_mul_index(): assert (A*y)[0, 0] == A[0, 0]*y[0, 0] + A[0, 1]*y[1, 0] assert (A*B).as_mutable() == (A.as_mutable() * B.as_mutable()) X = MatrixSymbol('X', n, m) Y = MatrixSymbol('Y', m, k) result = (X*Y)[4,2] expected = Sum(X[4, i]*Y[i, 2], (i, 0, m - 1)) assert result.args[0].dummy_eq(expected.args[0], i) assert result.args[1][1:] == expected.args[1][1:] def test_pow_index(): Q = MatPow(A, 2) assert Q[0, 0] == A[0, 0]**2 + A[0, 1]*A[1, 0] def test_transpose_index(): assert X.T[i, j] == X[j, i] def test_Identity_index(): I = Identity(3) assert I[0, 0] == I[1, 1] == I[2, 2] == 1 assert I[1, 0] == I[0, 1] == I[2, 1] == 0 raises(IndexError, lambda: I[3, 3]) def test_block_index(): I = Identity(3) Z = ZeroMatrix(3, 3) B = BlockMatrix([[I, I], [I, I]]) e3 = ImmutableMatrix(eye(3)) BB = BlockMatrix([[e3, e3], [e3, e3]]) assert B[0, 0] == B[3, 0] == B[0, 3] == B[3, 3] == 1 assert B[4, 3] == B[5, 1] == 0 BB = BlockMatrix([[e3, e3], [e3, e3]]) assert B.as_explicit() == BB.as_explicit() BI = BlockMatrix([[I, Z], [Z, I]]) assert BI.as_explicit().equals(eye(6)) def test_slicing(): A.as_explicit()[0, :] # does not raise an error def test_errors(): raises(IndexError, lambda: Identity(2)[1, 2, 3, 4, 5]) raises(IndexError, lambda: Identity(2)[[1, 2, 3, 4, 5]])
bsd-3-clause
skim1420/spinnaker
spinbot/event/release_branch_pull_request_handler.py
1
2049
from .handler import Handler from .pull_request_event import GetBaseBranch, GetPullRequest, GetTitle, GetRepo from gh import ReleaseBranchFor, ParseCommitMessage format_message = ('Features cannot be merged into release branches. The following commits ' + 'are not tagged as one of "{}":\n\n{}\n\n' + 'Read more about [commit conventions](https://www.spinnaker.io/community/contributing/submitting/#commit-message-conventions) ' + 'and [patch releases](https://www.spinnaker.io/community/releases/release-cadence/#patching-the-release-candidate) here.') class ReleaseBranchPullRequestHandler(Handler): def __init__(self): super().__init__() self.omit_repos = self.config.get('omit_repos', []) self.allowed_types = self.config.get( 'allowed_types', ['fix', 'chore', 'docs', 'test'] ) def handles(self, event): return (event.type == 'PullRequestEvent' and event.payload.get('action') == 'opened' and ReleaseBranchFor(GetBaseBranch(event)) != None) def handle(self, g, event): repo = GetRepo(event) if repo in self.omit_repos: self.logging.info('Skipping {} because it\'s in omitted repo {}'.format(event, repo)) return pull_request = GetPullRequest(g, event) if pull_request is None: self.logging.warn('Unable to determine PR that created {}'.format(event)) return commits = pull_request.get_commits() bad_commits = [] for commit in commits: message = ParseCommitMessage(commit.commit.message) if message is None or message.get('type') not in self.allowed_types: bad_commits.append(commit.commit) if len(bad_commits) > 0: pull_request.create_issue_comment(format_message.format( ', '.join(self.allowed_types), '\n\n'.join(map(lambda c: '{}: {}'.format(c.sha, c.message), bad_commits)) )) ReleaseBranchPullRequestHandler()
apache-2.0
wjwwood/open-robotics-platform
template.py
1
1949
#!/usr/bin/env python -OO # encoding: utf-8 ########### # ORP - Open Robotics Platform # # Copyright (c) 2010 John Harrison, William Woodall # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ########## """ ${TM_NEW_FILE_BASENAME}.py - <PURPOSE> Created by ${TM_FULLNAME} on ${TM_DATE}. """ __author__ = "William Woodall" __copyright__ = "Copyright (c) 2010 John Harrison, William Woodall" ### Imports ### # Standard Python Libraries import sys import os try: # try to catch any missing dependancies # <PKG> for <PURPOSE> PKGNAME = '<EASY_INSTALL NAME>' import <LIBRARY NAME> del PKGNAME except ImportError as PKG_ERROR: # We are missing something, let them know... sys.stderr.write(str(PKG_ERROR)+"\nYou might not have the "+PKGNAME+" \ module, try 'easy_install "+PKGNAME+"', else consult google.") ### Class ### ### Functions ### def main(): pass ### IfMain ### if __name__ == '__main__': main()
mit
tiagofrepereira2012/tensorflow
tensorflow/contrib/learn/python/learn/ops/seq2seq_ops_test.py
79
4314
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Sequence-to-sequence tests.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.learn.python.learn import ops from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops import rnn_cell from tensorflow.python.platform import test class Seq2SeqOpsTest(test.TestCase): """Sequence-to-sequence tests.""" def test_sequence_classifier(self): with self.test_session() as session: decoding = [ array_ops.placeholder(dtypes.float32, [2, 2]) for _ in range(3) ] labels = [array_ops.placeholder(dtypes.float32, [2, 2]) for _ in range(3)] sampling_decoding = [ array_ops.placeholder(dtypes.float32, [2, 2]) for _ in range(3) ] predictions, loss = ops.sequence_classifier(decoding, labels, sampling_decoding) pred, cost = session.run( [predictions, loss], feed_dict={ decoding[0].name: [[0.1, 0.9], [0.7, 0.3]], decoding[1].name: [[0.9, 0.1], [0.8, 0.2]], decoding[2].name: [[0.5, 0.5], [0.4, 0.6]], labels[0].name: [[1, 0], [0, 1]], labels[1].name: [[1, 0], [0, 1]], labels[2].name: [[1, 0], [0, 1]], sampling_decoding[0].name: [[0.1, 0.9], [0.7, 0.3]], sampling_decoding[1].name: [[0.9, 0.1], [0.8, 0.2]], sampling_decoding[2].name: [[0.5, 0.5], [0.4, 0.6]], }) self.assertAllEqual(pred.argmax(axis=2), [[1, 0, 0], [0, 0, 1]]) self.assertAllClose(cost, 4.7839908599) def test_seq2seq_inputs(self): inp = np.array([[[1, 0], [0, 1], [1, 0]], [[0, 1], [1, 0], [0, 1]]]) out = np.array([[[0, 1, 0], [1, 0, 0]], [[1, 0, 0], [0, 1, 0]]]) with self.test_session() as session: x = array_ops.placeholder(dtypes.float32, [2, 3, 2]) y = array_ops.placeholder(dtypes.float32, [2, 2, 3]) in_x, in_y, out_y = ops.seq2seq_inputs(x, y, 3, 2) enc_inp = session.run(in_x, feed_dict={x.name: inp}) dec_inp = session.run(in_y, feed_dict={x.name: inp, y.name: out}) dec_out = session.run(out_y, feed_dict={x.name: inp, y.name: out}) # Swaps from batch x len x height to list of len of batch x height. self.assertAllEqual(enc_inp, np.swapaxes(inp, 0, 1)) self.assertAllEqual(dec_inp, [[[0, 0, 0], [0, 0, 0]], [[0, 1, 0], [1, 0, 0]], [[1, 0, 0], [0, 1, 0]]]) self.assertAllEqual(dec_out, [[[0, 1, 0], [1, 0, 0]], [[1, 0, 0], [0, 1, 0]], [[0, 0, 0], [0, 0, 0]]]) def test_rnn_decoder(self): with self.test_session(): decoder_inputs = [ array_ops.placeholder(dtypes.float32, [2, 2]) for _ in range(3) ] encoding = array_ops.placeholder(dtypes.float32, [2, 2]) cell = rnn_cell.GRUCell(2) outputs, states, sampling_outputs, sampling_states = ( ops.rnn_decoder(decoder_inputs, encoding, cell)) self.assertEqual(len(outputs), 3) self.assertEqual(outputs[0].get_shape(), [2, 2]) self.assertEqual(len(states), 4) self.assertEqual(states[0].get_shape(), [2, 2]) self.assertEqual(len(sampling_outputs), 3) self.assertEqual(sampling_outputs[0].get_shape(), [2, 2]) self.assertEqual(len(sampling_states), 4) self.assertEqual(sampling_states[0].get_shape(), [2, 2]) if __name__ == "__main__": test.main()
apache-2.0
voidz777/android_kernel_samsung_tuna
tools/perf/scripts/python/sctop.py
11180
1924
# system call top # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # Periodically displays system-wide system call totals, broken down by # syscall. If a [comm] arg is specified, only syscalls called by # [comm] are displayed. If an [interval] arg is specified, the display # will be refreshed every [interval] seconds. The default interval is # 3 seconds. import os, sys, thread, time sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * usage = "perf script -s sctop.py [comm] [interval]\n"; for_comm = None default_interval = 3 interval = default_interval if len(sys.argv) > 3: sys.exit(usage) if len(sys.argv) > 2: for_comm = sys.argv[1] interval = int(sys.argv[2]) elif len(sys.argv) > 1: try: interval = int(sys.argv[1]) except ValueError: for_comm = sys.argv[1] interval = default_interval syscalls = autodict() def trace_begin(): thread.start_new_thread(print_syscall_totals, (interval,)) pass def raw_syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, args): if for_comm is not None: if common_comm != for_comm: return try: syscalls[id] += 1 except TypeError: syscalls[id] = 1 def print_syscall_totals(interval): while 1: clear_term() if for_comm is not None: print "\nsyscall events for %s:\n\n" % (for_comm), else: print "\nsyscall events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "----------"), for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \ reverse = True): try: print "%-40s %10d\n" % (syscall_name(id), val), except TypeError: pass syscalls.clear() time.sleep(interval)
gpl-2.0
rodrigc/buildbot
master/buildbot/mq/wamp.py
3
4065
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from autobahn.wamp.exception import TransportLost from autobahn.wamp.types import PublishOptions from autobahn.wamp.types import SubscribeOptions from twisted.internet import defer from twisted.python import log from buildbot.mq import base from buildbot.util import service from buildbot.util import toJson class WampMQ(service.ReconfigurableServiceMixin, base.MQBase): NAMESPACE = "org.buildbot.mq" def produce(self, routingKey, data): d = self._produce(routingKey, data) d.addErrback( log.err, "Problem while producing message on topic " + repr(routingKey)) @classmethod def messageTopic(cls, routingKey): def ifNone(v, default): return default if v is None else v # replace None values by "" in routing key routingKey = [ifNone(key, "") for key in routingKey] # then join them with "dot", and add the prefix return cls.NAMESPACE + "." + ".".join(routingKey) @classmethod def routingKeyFromMessageTopic(cls, topic): # just split the topic, and remove the NAMESPACE prefix return tuple(topic[len(WampMQ.NAMESPACE) + 1:].split(".")) def _produce(self, routingKey, data): _data = json.loads(json.dumps(data, default=toJson)) options = PublishOptions(exclude_me=False) return self.master.wamp.publish(self.messageTopic(routingKey), _data, options=options) def startConsuming(self, callback, _filter, persistent_name=None): if persistent_name is not None: log.err('wampmq: persistent queues are not persisted: {} {}'.format(persistent_name, _filter)) qr = QueueRef(self, callback) self._startConsuming(qr, callback, _filter) return defer.succeed(qr) def _startConsuming(self, qr, callback, _filter, persistent_name=None): return qr.subscribe(self.master.wamp, self, _filter) class QueueRef(base.QueueRef): def __init__(self, mq, callback): super().__init__(callback) self.unreg = None self.mq = mq @defer.inlineCallbacks def subscribe(self, connector_service, wamp_service, _filter): self.filter = _filter self.emulated = False options = dict(details_arg=str('details')) if None in _filter: options["match"] = "wildcard" options = SubscribeOptions(**options) _filter = WampMQ.messageTopic(_filter) self.unreg = yield connector_service.subscribe(self.wampInvoke, _filter, options=options) if self.callback is None: yield self.stopConsuming() def wampInvoke(self, msg, details): if details.topic is not None: # in the case of a wildcard, wamp router sends the topic topic = WampMQ.routingKeyFromMessageTopic(details.topic) else: # in the case of an exact match, then we can use our own topic topic = self.filter self.mq.invokeQref(self, topic, msg) @defer.inlineCallbacks def stopConsuming(self): self.callback = None if self.unreg is not None: unreg = self.unreg self.unreg = None try: yield unreg.unsubscribe() except TransportLost: pass
gpl-2.0
Ebag333/Pyfa
eos/effects/subsystembonusgallentedefensivearmoredwarfare.py
1
1528
# subSystemBonusGallenteDefensiveArmoredWarfare # # Used by: # Subsystem: Proteus Defensive - Warfare Processor type = "passive" def handler(fit, src, context): fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "buffDuration", src.getModifiedItemAttr("subsystemBonusGallenteDefensive"), skill="Gallente Defensive Systems") fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff3Value", src.getModifiedItemAttr("subsystemBonusGallenteDefensive"), skill="Gallente Defensive Systems") fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff4Value", src.getModifiedItemAttr("subsystemBonusGallenteDefensive"), skill="Gallente Defensive Systems") fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff2Value", src.getModifiedItemAttr("subsystemBonusGallenteDefensive"), skill="Gallente Defensive Systems") fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff1Value", src.getModifiedItemAttr("subsystemBonusGallenteDefensive"), skill="Gallente Defensive Systems")
gpl-3.0
mancoast/CPythonPyc_test
cpython/270_test_pep247.py
3
2042
""" Test suite to check compilance with PEP 247, the standard API for hashing algorithms """ import warnings warnings.filterwarnings('ignore', 'the md5 module is deprecated.*', DeprecationWarning) warnings.filterwarnings('ignore', 'the sha module is deprecated.*', DeprecationWarning) import hmac import md5 import sha import unittest from test import test_support class Pep247Test(unittest.TestCase): def check_module(self, module, key=None): self.assertTrue(hasattr(module, 'digest_size')) self.assertTrue(module.digest_size is None or module.digest_size > 0) if not key is None: obj1 = module.new(key) obj2 = module.new(key, 'string') h1 = module.new(key, 'string').digest() obj3 = module.new(key) obj3.update('string') h2 = obj3.digest() else: obj1 = module.new() obj2 = module.new('string') h1 = module.new('string').digest() obj3 = module.new() obj3.update('string') h2 = obj3.digest() self.assertEquals(h1, h2) self.assertTrue(hasattr(obj1, 'digest_size')) if not module.digest_size is None: self.assertEquals(obj1.digest_size, module.digest_size) self.assertEquals(obj1.digest_size, len(h1)) obj1.update('string') obj_copy = obj1.copy() self.assertEquals(obj1.digest(), obj_copy.digest()) self.assertEquals(obj1.hexdigest(), obj_copy.hexdigest()) digest, hexdigest = obj1.digest(), obj1.hexdigest() hd2 = "" for byte in digest: hd2 += '%02x' % ord(byte) self.assertEquals(hd2, hexdigest) def test_md5(self): self.check_module(md5) def test_sha(self): self.check_module(sha) def test_hmac(self): self.check_module(hmac, key='abc') def test_main(): test_support.run_unittest(Pep247Test) if __name__ == '__main__': test_main()
gpl-3.0
xgwubin/vitess
py/vtproto/queryservice_pb2.py
8
17493
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: queryservice.proto from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() import query_pb2 as query__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='queryservice.proto', package='queryservice', syntax='proto3', serialized_pb=b'\n\x12queryservice.proto\x12\x0cqueryservice\x1a\x0bquery.proto2\xe9\x04\n\x05Query\x12I\n\x0cGetSessionId\x12\x1a.query.GetSessionIdRequest\x1a\x1b.query.GetSessionIdResponse\"\x00\x12:\n\x07\x45xecute\x12\x15.query.ExecuteRequest\x1a\x16.query.ExecuteResponse\"\x00\x12I\n\x0c\x45xecuteBatch\x12\x1a.query.ExecuteBatchRequest\x1a\x1b.query.ExecuteBatchResponse\"\x00\x12N\n\rStreamExecute\x12\x1b.query.StreamExecuteRequest\x1a\x1c.query.StreamExecuteResponse\"\x00\x30\x01\x12\x34\n\x05\x42\x65gin\x12\x13.query.BeginRequest\x1a\x14.query.BeginResponse\"\x00\x12\x37\n\x06\x43ommit\x12\x14.query.CommitRequest\x1a\x15.query.CommitResponse\"\x00\x12=\n\x08Rollback\x12\x16.query.RollbackRequest\x1a\x17.query.RollbackResponse\"\x00\x12\x43\n\nSplitQuery\x12\x18.query.SplitQueryRequest\x1a\x19.query.SplitQueryResponse\"\x00\x12K\n\x0cStreamHealth\x12\x1a.query.StreamHealthRequest\x1a\x1b.query.StreamHealthResponse\"\x00\x30\x01\x62\x06proto3' , dependencies=[query__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) import abc from grpc.beta import implementations as beta_implementations from grpc.early_adopter import implementations as early_adopter_implementations from grpc.framework.alpha import utilities as alpha_utilities from grpc.framework.common import cardinality from grpc.framework.interfaces.face import utilities as face_utilities class EarlyAdopterQueryServicer(object): """<fill me in later!>""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def GetSessionId(self, request, context): raise NotImplementedError() @abc.abstractmethod def Execute(self, request, context): raise NotImplementedError() @abc.abstractmethod def ExecuteBatch(self, request, context): raise NotImplementedError() @abc.abstractmethod def StreamExecute(self, request, context): raise NotImplementedError() @abc.abstractmethod def Begin(self, request, context): raise NotImplementedError() @abc.abstractmethod def Commit(self, request, context): raise NotImplementedError() @abc.abstractmethod def Rollback(self, request, context): raise NotImplementedError() @abc.abstractmethod def SplitQuery(self, request, context): raise NotImplementedError() @abc.abstractmethod def StreamHealth(self, request, context): raise NotImplementedError() class EarlyAdopterQueryServer(object): """<fill me in later!>""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def start(self): raise NotImplementedError() @abc.abstractmethod def stop(self): raise NotImplementedError() class EarlyAdopterQueryStub(object): """<fill me in later!>""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def GetSessionId(self, request): raise NotImplementedError() GetSessionId.async = None @abc.abstractmethod def Execute(self, request): raise NotImplementedError() Execute.async = None @abc.abstractmethod def ExecuteBatch(self, request): raise NotImplementedError() ExecuteBatch.async = None @abc.abstractmethod def StreamExecute(self, request): raise NotImplementedError() StreamExecute.async = None @abc.abstractmethod def Begin(self, request): raise NotImplementedError() Begin.async = None @abc.abstractmethod def Commit(self, request): raise NotImplementedError() Commit.async = None @abc.abstractmethod def Rollback(self, request): raise NotImplementedError() Rollback.async = None @abc.abstractmethod def SplitQuery(self, request): raise NotImplementedError() SplitQuery.async = None @abc.abstractmethod def StreamHealth(self, request): raise NotImplementedError() StreamHealth.async = None def early_adopter_create_Query_server(servicer, port, private_key=None, certificate_chain=None): import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 method_service_descriptions = { "Begin": alpha_utilities.unary_unary_service_description( servicer.Begin, query_pb2.BeginRequest.FromString, query_pb2.BeginResponse.SerializeToString, ), "Commit": alpha_utilities.unary_unary_service_description( servicer.Commit, query_pb2.CommitRequest.FromString, query_pb2.CommitResponse.SerializeToString, ), "Execute": alpha_utilities.unary_unary_service_description( servicer.Execute, query_pb2.ExecuteRequest.FromString, query_pb2.ExecuteResponse.SerializeToString, ), "ExecuteBatch": alpha_utilities.unary_unary_service_description( servicer.ExecuteBatch, query_pb2.ExecuteBatchRequest.FromString, query_pb2.ExecuteBatchResponse.SerializeToString, ), "GetSessionId": alpha_utilities.unary_unary_service_description( servicer.GetSessionId, query_pb2.GetSessionIdRequest.FromString, query_pb2.GetSessionIdResponse.SerializeToString, ), "Rollback": alpha_utilities.unary_unary_service_description( servicer.Rollback, query_pb2.RollbackRequest.FromString, query_pb2.RollbackResponse.SerializeToString, ), "SplitQuery": alpha_utilities.unary_unary_service_description( servicer.SplitQuery, query_pb2.SplitQueryRequest.FromString, query_pb2.SplitQueryResponse.SerializeToString, ), "StreamExecute": alpha_utilities.unary_stream_service_description( servicer.StreamExecute, query_pb2.StreamExecuteRequest.FromString, query_pb2.StreamExecuteResponse.SerializeToString, ), "StreamHealth": alpha_utilities.unary_stream_service_description( servicer.StreamHealth, query_pb2.StreamHealthRequest.FromString, query_pb2.StreamHealthResponse.SerializeToString, ), } return early_adopter_implementations.server("queryservice.Query", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain) def early_adopter_create_Query_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None): import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 method_invocation_descriptions = { "Begin": alpha_utilities.unary_unary_invocation_description( query_pb2.BeginRequest.SerializeToString, query_pb2.BeginResponse.FromString, ), "Commit": alpha_utilities.unary_unary_invocation_description( query_pb2.CommitRequest.SerializeToString, query_pb2.CommitResponse.FromString, ), "Execute": alpha_utilities.unary_unary_invocation_description( query_pb2.ExecuteRequest.SerializeToString, query_pb2.ExecuteResponse.FromString, ), "ExecuteBatch": alpha_utilities.unary_unary_invocation_description( query_pb2.ExecuteBatchRequest.SerializeToString, query_pb2.ExecuteBatchResponse.FromString, ), "GetSessionId": alpha_utilities.unary_unary_invocation_description( query_pb2.GetSessionIdRequest.SerializeToString, query_pb2.GetSessionIdResponse.FromString, ), "Rollback": alpha_utilities.unary_unary_invocation_description( query_pb2.RollbackRequest.SerializeToString, query_pb2.RollbackResponse.FromString, ), "SplitQuery": alpha_utilities.unary_unary_invocation_description( query_pb2.SplitQueryRequest.SerializeToString, query_pb2.SplitQueryResponse.FromString, ), "StreamExecute": alpha_utilities.unary_stream_invocation_description( query_pb2.StreamExecuteRequest.SerializeToString, query_pb2.StreamExecuteResponse.FromString, ), "StreamHealth": alpha_utilities.unary_stream_invocation_description( query_pb2.StreamHealthRequest.SerializeToString, query_pb2.StreamHealthResponse.FromString, ), } return early_adopter_implementations.stub("queryservice.Query", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override) class BetaQueryServicer(object): """<fill me in later!>""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def GetSessionId(self, request, context): raise NotImplementedError() @abc.abstractmethod def Execute(self, request, context): raise NotImplementedError() @abc.abstractmethod def ExecuteBatch(self, request, context): raise NotImplementedError() @abc.abstractmethod def StreamExecute(self, request, context): raise NotImplementedError() @abc.abstractmethod def Begin(self, request, context): raise NotImplementedError() @abc.abstractmethod def Commit(self, request, context): raise NotImplementedError() @abc.abstractmethod def Rollback(self, request, context): raise NotImplementedError() @abc.abstractmethod def SplitQuery(self, request, context): raise NotImplementedError() @abc.abstractmethod def StreamHealth(self, request, context): raise NotImplementedError() class BetaQueryStub(object): """The interface to which stubs will conform.""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def GetSessionId(self, request, timeout): raise NotImplementedError() GetSessionId.future = None @abc.abstractmethod def Execute(self, request, timeout): raise NotImplementedError() Execute.future = None @abc.abstractmethod def ExecuteBatch(self, request, timeout): raise NotImplementedError() ExecuteBatch.future = None @abc.abstractmethod def StreamExecute(self, request, timeout): raise NotImplementedError() @abc.abstractmethod def Begin(self, request, timeout): raise NotImplementedError() Begin.future = None @abc.abstractmethod def Commit(self, request, timeout): raise NotImplementedError() Commit.future = None @abc.abstractmethod def Rollback(self, request, timeout): raise NotImplementedError() Rollback.future = None @abc.abstractmethod def SplitQuery(self, request, timeout): raise NotImplementedError() SplitQuery.future = None @abc.abstractmethod def StreamHealth(self, request, timeout): raise NotImplementedError() def beta_create_Query_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 request_deserializers = { ('queryservice.Query', 'Begin'): query_pb2.BeginRequest.FromString, ('queryservice.Query', 'Commit'): query_pb2.CommitRequest.FromString, ('queryservice.Query', 'Execute'): query_pb2.ExecuteRequest.FromString, ('queryservice.Query', 'ExecuteBatch'): query_pb2.ExecuteBatchRequest.FromString, ('queryservice.Query', 'GetSessionId'): query_pb2.GetSessionIdRequest.FromString, ('queryservice.Query', 'Rollback'): query_pb2.RollbackRequest.FromString, ('queryservice.Query', 'SplitQuery'): query_pb2.SplitQueryRequest.FromString, ('queryservice.Query', 'StreamExecute'): query_pb2.StreamExecuteRequest.FromString, ('queryservice.Query', 'StreamHealth'): query_pb2.StreamHealthRequest.FromString, } response_serializers = { ('queryservice.Query', 'Begin'): query_pb2.BeginResponse.SerializeToString, ('queryservice.Query', 'Commit'): query_pb2.CommitResponse.SerializeToString, ('queryservice.Query', 'Execute'): query_pb2.ExecuteResponse.SerializeToString, ('queryservice.Query', 'ExecuteBatch'): query_pb2.ExecuteBatchResponse.SerializeToString, ('queryservice.Query', 'GetSessionId'): query_pb2.GetSessionIdResponse.SerializeToString, ('queryservice.Query', 'Rollback'): query_pb2.RollbackResponse.SerializeToString, ('queryservice.Query', 'SplitQuery'): query_pb2.SplitQueryResponse.SerializeToString, ('queryservice.Query', 'StreamExecute'): query_pb2.StreamExecuteResponse.SerializeToString, ('queryservice.Query', 'StreamHealth'): query_pb2.StreamHealthResponse.SerializeToString, } method_implementations = { ('queryservice.Query', 'Begin'): face_utilities.unary_unary_inline(servicer.Begin), ('queryservice.Query', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), ('queryservice.Query', 'Execute'): face_utilities.unary_unary_inline(servicer.Execute), ('queryservice.Query', 'ExecuteBatch'): face_utilities.unary_unary_inline(servicer.ExecuteBatch), ('queryservice.Query', 'GetSessionId'): face_utilities.unary_unary_inline(servicer.GetSessionId), ('queryservice.Query', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), ('queryservice.Query', 'SplitQuery'): face_utilities.unary_unary_inline(servicer.SplitQuery), ('queryservice.Query', 'StreamExecute'): face_utilities.unary_stream_inline(servicer.StreamExecute), ('queryservice.Query', 'StreamHealth'): face_utilities.unary_stream_inline(servicer.StreamHealth), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) return beta_implementations.server(method_implementations, options=server_options) def beta_create_Query_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 import query_pb2 request_serializers = { ('queryservice.Query', 'Begin'): query_pb2.BeginRequest.SerializeToString, ('queryservice.Query', 'Commit'): query_pb2.CommitRequest.SerializeToString, ('queryservice.Query', 'Execute'): query_pb2.ExecuteRequest.SerializeToString, ('queryservice.Query', 'ExecuteBatch'): query_pb2.ExecuteBatchRequest.SerializeToString, ('queryservice.Query', 'GetSessionId'): query_pb2.GetSessionIdRequest.SerializeToString, ('queryservice.Query', 'Rollback'): query_pb2.RollbackRequest.SerializeToString, ('queryservice.Query', 'SplitQuery'): query_pb2.SplitQueryRequest.SerializeToString, ('queryservice.Query', 'StreamExecute'): query_pb2.StreamExecuteRequest.SerializeToString, ('queryservice.Query', 'StreamHealth'): query_pb2.StreamHealthRequest.SerializeToString, } response_deserializers = { ('queryservice.Query', 'Begin'): query_pb2.BeginResponse.FromString, ('queryservice.Query', 'Commit'): query_pb2.CommitResponse.FromString, ('queryservice.Query', 'Execute'): query_pb2.ExecuteResponse.FromString, ('queryservice.Query', 'ExecuteBatch'): query_pb2.ExecuteBatchResponse.FromString, ('queryservice.Query', 'GetSessionId'): query_pb2.GetSessionIdResponse.FromString, ('queryservice.Query', 'Rollback'): query_pb2.RollbackResponse.FromString, ('queryservice.Query', 'SplitQuery'): query_pb2.SplitQueryResponse.FromString, ('queryservice.Query', 'StreamExecute'): query_pb2.StreamExecuteResponse.FromString, ('queryservice.Query', 'StreamHealth'): query_pb2.StreamHealthResponse.FromString, } cardinalities = { 'Begin': cardinality.Cardinality.UNARY_UNARY, 'Commit': cardinality.Cardinality.UNARY_UNARY, 'Execute': cardinality.Cardinality.UNARY_UNARY, 'ExecuteBatch': cardinality.Cardinality.UNARY_UNARY, 'GetSessionId': cardinality.Cardinality.UNARY_UNARY, 'Rollback': cardinality.Cardinality.UNARY_UNARY, 'SplitQuery': cardinality.Cardinality.UNARY_UNARY, 'StreamExecute': cardinality.Cardinality.UNARY_STREAM, 'StreamHealth': cardinality.Cardinality.UNARY_STREAM, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) return beta_implementations.dynamic_stub(channel, 'queryservice.Query', cardinalities, options=stub_options) # @@protoc_insertion_point(module_scope)
bsd-3-clause
dakcarto/QGIS
python/plugins/processing/algs/qgis/LinesToPolygons.py
10
3214
# -*- coding: utf-8 -*- """ *************************************************************************** LinesToPolygons.py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from qgis.core import QGis, QgsFeature, QgsGeometry from processing.core.GeoAlgorithm import GeoAlgorithm from processing.core.parameters import ParameterVector from processing.core.outputs import OutputVector from processing.tools import dataobjects, vector class LinesToPolygons(GeoAlgorithm): INPUT = 'INPUT' OUTPUT = 'OUTPUT' def defineCharacteristics(self): self.name, self.i18n_name = self.trAlgorithm('Lines to polygons') self.group, self.i18n_group = self.trAlgorithm('Vector geometry tools') self.addParameter(ParameterVector(self.INPUT, self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_LINE])) self.addOutput(OutputVector(self.OUTPUT, self.tr('Polygons from lines'))) def processAlgorithm(self, progress): layer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT)) writer = self.getOutputFromName(self.OUTPUT).getVectorWriter( layer.pendingFields().toList(), QGis.WKBPolygon, layer.crs() ) outFeat = QgsFeature() current = 0 features = vector.features(layer) total = 100.0 / float(len(features)) for f in features: outGeomList = [] if f.geometry().isMultipart(): outGeomList = f.geometry().asMultiPolyline() else: outGeomList.append(f.geometry().asPolyline()) polyGeom = self.removeBadLines(outGeomList) if len(polyGeom) != 0: outFeat.setGeometry(QgsGeometry.fromPolygon(polyGeom)) attrs = f.attributes() outFeat.setAttributes(attrs) writer.addFeature(outFeat) current += 1 progress.setPercentage(int(current * total)) del writer def removeBadLines(self, lines): geom = [] if len(lines) == 1: if len(lines[0]) > 2: geom = lines else: geom = [] else: geom = [elem for elem in lines if len(elem) > 2] return geom
gpl-2.0
vinhlh/bite-project
deps/mrtaskman/server/mapreduce/lib/pipeline/simplejson/scanner.py
43
2596
#!/usr/bin/env python """JSON token scanner """ import re def _import_c_make_scanner(): try: from mapreduce.lib.simplejson._speedups import make_scanner return make_scanner except ImportError: return None c_make_scanner = _import_c_make_scanner() __all__ = ['make_scanner'] NUMBER_RE = re.compile( r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?', (re.VERBOSE | re.MULTILINE | re.DOTALL)) def py_make_scanner(context): parse_object = context.parse_object parse_array = context.parse_array parse_string = context.parse_string match_number = NUMBER_RE.match encoding = context.encoding strict = context.strict parse_float = context.parse_float parse_int = context.parse_int parse_constant = context.parse_constant object_hook = context.object_hook object_pairs_hook = context.object_pairs_hook memo = context.memo def _scan_once(string, idx): try: nextchar = string[idx] except IndexError: raise StopIteration if nextchar == '"': return parse_string(string, idx + 1, encoding, strict) elif nextchar == '{': return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook, object_pairs_hook, memo) elif nextchar == '[': return parse_array((string, idx + 1), _scan_once) elif nextchar == 'n' and string[idx:idx + 4] == 'null': return None, idx + 4 elif nextchar == 't' and string[idx:idx + 4] == 'true': return True, idx + 4 elif nextchar == 'f' and string[idx:idx + 5] == 'false': return False, idx + 5 m = match_number(string, idx) if m is not None: integer, frac, exp = m.groups() if frac or exp: res = parse_float(integer + (frac or '') + (exp or '')) else: res = parse_int(integer) return res, m.end() elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': return parse_constant('NaN'), idx + 3 elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': return parse_constant('Infinity'), idx + 8 elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': return parse_constant('-Infinity'), idx + 9 else: raise StopIteration def scan_once(string, idx): try: return _scan_once(string, idx) finally: memo.clear() return scan_once make_scanner = c_make_scanner or py_make_scanner
apache-2.0