code
stringlengths
1
199k
""" Amazon EC2, Eucalyptus and Nimbus drivers. """ from __future__ import with_statement import sys import base64 import os import copy from xml.etree import ElementTree as ET from libcloud.utils.py3 import b from libcloud.utils.xml import fixxpath, findtext, findattr, findall from libcloud.common.aws import AWSBaseResponse, SignedAWSConnection from libcloud.common.types import (InvalidCredsError, MalformedResponseError, LibcloudError) from libcloud.compute.providers import Provider from libcloud.compute.types import NodeState from libcloud.compute.base import Node, NodeDriver, NodeLocation, NodeSize from libcloud.compute.base import NodeImage, StorageVolume API_VERSION = '2010-08-31' NAMESPACE = 'http://ec2.amazonaws.com/doc/%s/' % (API_VERSION) """ Sizes must be hardcoded, because Amazon doesn't provide an API to fetch them. From http://aws.amazon.com/ec2/instance-types/ """ INSTANCE_TYPES = { 't1.micro': { 'id': 't1.micro', 'name': 'Micro Instance', 'ram': 613, 'disk': 15, 'bandwidth': None }, 'm1.small': { 'id': 'm1.small', 'name': 'Small Instance', 'ram': 1740, 'disk': 160, 'bandwidth': None }, 'm1.medium': { 'id': 'm1.medium', 'name': 'Medium Instance', 'ram': 3700, 'disk': 410, 'bandwidth': None }, 'm1.large': { 'id': 'm1.large', 'name': 'Large Instance', 'ram': 7680, 'disk': 850, 'bandwidth': None }, 'm1.xlarge': { 'id': 'm1.xlarge', 'name': 'Extra Large Instance', 'ram': 15360, 'disk': 1690, 'bandwidth': None }, 'c1.medium': { 'id': 'c1.medium', 'name': 'High-CPU Medium Instance', 'ram': 1740, 'disk': 350, 'bandwidth': None }, 'c1.xlarge': { 'id': 'c1.xlarge', 'name': 'High-CPU Extra Large Instance', 'ram': 7680, 'disk': 1690, 'bandwidth': None }, 'm2.xlarge': { 'id': 'm2.xlarge', 'name': 'High-Memory Extra Large Instance', 'ram': 17510, 'disk': 420, 'bandwidth': None }, 'm2.2xlarge': { 'id': 'm2.2xlarge', 'name': 'High-Memory Double Extra Large Instance', 'ram': 35021, 'disk': 850, 'bandwidth': None }, 'm2.4xlarge': { 'id': 'm2.4xlarge', 'name': 'High-Memory Quadruple Extra Large Instance', 'ram': 70042, 'disk': 1690, 'bandwidth': None }, 'm3.xlarge': { 'id': 'm3.xlarge', 'name': 'Extra Large Instance', 'ram': 15360, 'disk': None, 'bandwidth': None }, 'm3.2xlarge': { 'id': 'm3.2xlarge', 'name': 'Double Extra Large Instance', 'ram': 30720, 'disk': None, 'bandwidth': None }, 'cg1.4xlarge': { 'id': 'cg1.4xlarge', 'name': 'Cluster GPU Quadruple Extra Large Instance', 'ram': 22528, 'disk': 1690, 'bandwidth': None }, 'cc1.4xlarge': { 'id': 'cc1.4xlarge', 'name': 'Cluster Compute Quadruple Extra Large Instance', 'ram': 23552, 'disk': 1690, 'bandwidth': None }, 'cc2.8xlarge': { 'id': 'cc2.8xlarge', 'name': 'Cluster Compute Eight Extra Large Instance', 'ram': 63488, 'disk': 3370, 'bandwidth': None }, 'cr1.8xlarge': { 'id': 'cr1.8xlarge', 'name': 'High Memory Cluster Eight Extra Large', 'ram': 244000, 'disk': 240, 'bandwidth': None }, 'hs1.8xlarge': { 'id': 'hs1.8xlarge', 'name': 'High Storage Eight Extra Large Instance', 'ram': 119808, 'disk': 48000, 'bandwidth': None } } REGION_DETAILS = { 'us-east-1': { 'endpoint': 'ec2.us-east-1.amazonaws.com', 'api_name': 'ec2_us_east', 'country': 'USA', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'm3.xlarge', 'm3.2xlarge', 'c1.medium', 'c1.xlarge', 'cc1.4xlarge', 'cc2.8xlarge', 'cg1.4xlarge', 'cr1.8xlarge', 'hs1.8xlarge' ] }, 'us-west-1': { 'endpoint': 'ec2.us-west-1.amazonaws.com', 'api_name': 'ec2_us_west', 'country': 'USA', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'm3.xlarge', 'm3.2xlarge', 'c1.medium', 'c1.xlarge' ] }, 'us-west-2': { 'endpoint': 'ec2.us-west-2.amazonaws.com', 'api_name': 'ec2_us_west_oregon', 'country': 'US', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'c1.medium', 'c1.xlarge', 'cc2.8xlarge' ] }, 'eu-west-1': { 'endpoint': 'ec2.eu-west-1.amazonaws.com', 'api_name': 'ec2_eu_west', 'country': 'Ireland', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'm3.xlarge', 'm3.2xlarge', 'c1.medium', 'c1.xlarge', 'cc2.8xlarge' ] }, 'ap-southeast-1': { 'endpoint': 'ec2.ap-southeast-1.amazonaws.com', 'api_name': 'ec2_ap_southeast', 'country': 'Singapore', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'm3.xlarge', 'm3.2xlarge', 'c1.medium', 'c1.xlarge' ] }, 'ap-northeast-1': { 'endpoint': 'ec2.ap-northeast-1.amazonaws.com', 'api_name': 'ec2_ap_northeast', 'country': 'Japan', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'm3.xlarge', 'm3.2xlarge', 'c1.medium', 'c1.xlarge' ] }, 'sa-east-1': { 'endpoint': 'ec2.sa-east-1.amazonaws.com', 'api_name': 'ec2_sa_east', 'country': 'Brazil', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'c1.medium', 'c1.xlarge' ] }, 'ap-southeast-2': { 'endpoint': 'ec2.ap-southeast-2.amazonaws.com', 'api_name': 'ec2_ap_southeast_2', 'country': 'Australia', 'instance_types': [ 't1.micro', 'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge', 'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge', 'm3.xlarge', 'm3.2xlarge', 'c1.medium', 'c1.xlarge' ] }, 'nimbus': { # Nimbus clouds have 3 EC2-style instance types but their particular # RAM allocations are configured by the admin 'country': 'custom', 'instance_types': [ 'm1.small', 'm1.large', 'm1.xlarge' ] } } VALID_EC2_DATACENTERS = REGION_DETAILS.keys() VALID_EC2_DATACENTERS = [d for d in VALID_EC2_DATACENTERS if d != 'nimbus'] class EC2NodeLocation(NodeLocation): def __init__(self, id, name, country, driver, availability_zone): super(EC2NodeLocation, self).__init__(id, name, country, driver) self.availability_zone = availability_zone def __repr__(self): return (('<EC2NodeLocation: id=%s, name=%s, country=%s, ' 'availability_zone=%s driver=%s>') % (self.id, self.name, self.country, self.availability_zone, self.driver.name)) class EC2Response(AWSBaseResponse): """ EC2 specific response parsing and error handling. """ def parse_error(self): err_list = [] # Okay, so for Eucalyptus, you can get a 403, with no body, # if you are using the wrong user/password. msg = "Failure: 403 Forbidden" if self.status == 403 and self.body[:len(msg)] == msg: raise InvalidCredsError(msg) try: body = ET.XML(self.body) except: raise MalformedResponseError("Failed to parse XML", body=self.body, driver=EC2NodeDriver) for err in body.findall('Errors/Error'): code, message = err.getchildren() err_list.append("%s: %s" % (code.text, message.text)) if code.text == "InvalidClientTokenId": raise InvalidCredsError(err_list[-1]) if code.text == "SignatureDoesNotMatch": raise InvalidCredsError(err_list[-1]) if code.text == "AuthFailure": raise InvalidCredsError(err_list[-1]) if code.text == "OptInRequired": raise InvalidCredsError(err_list[-1]) if code.text == "IdempotentParameterMismatch": raise IdempotentParamError(err_list[-1]) return "\n".join(err_list) class EC2Connection(SignedAWSConnection): """ Represents a single connection to the EC2 Endpoint. """ version = API_VERSION host = REGION_DETAILS['us-east-1']['endpoint'] responseCls = EC2Response class ExEC2AvailabilityZone(object): """ Extension class which stores information about an EC2 availability zone. Note: This class is EC2 specific. """ def __init__(self, name, zone_state, region_name): self.name = name self.zone_state = zone_state self.region_name = region_name def __repr__(self): return (('<ExEC2AvailabilityZone: name=%s, zone_state=%s, ' 'region_name=%s>') % (self.name, self.zone_state, self.region_name)) class BaseEC2NodeDriver(NodeDriver): """ Base Amazon EC2 node driver. Used for main EC2 and other derivate driver classes to inherit from it. """ connectionCls = EC2Connection path = '/' features = {'create_node': ['ssh_key']} NODE_STATE_MAP = { 'pending': NodeState.PENDING, 'running': NodeState.RUNNING, 'shutting-down': NodeState.UNKNOWN, 'terminated': NodeState.TERMINATED } def _pathlist(self, key, arr): """ Converts a key and an array of values into AWS query param format. """ params = {} i = 0 for value in arr: i += 1 params["%s.%s" % (key, i)] = value return params def _get_boolean(self, element): tag = "{%s}%s" % (NAMESPACE, 'return') return element.findtext(tag) == 'true' def _get_state_boolean(self, element): """ Checks for the instances's state """ state = findall(element=element, xpath='instancesSet/item/currentState/name', namespace=NAMESPACE)[0].text return state in ('stopping', 'pending', 'starting') def _get_terminate_boolean(self, element): status = element.findtext(".//{%s}%s" % (NAMESPACE, 'name')) return any([term_status == status for term_status in ('shutting-down', 'terminated')]) def _to_nodes(self, object, xpath, groups=None): return [self._to_node(el, groups=groups) for el in object.findall(fixxpath(xpath=xpath, namespace=NAMESPACE))] def _to_node(self, element, groups=None): try: state = self.NODE_STATE_MAP[findattr(element=element, xpath="instanceState/name", namespace=NAMESPACE) ] except KeyError: state = NodeState.UNKNOWN instance_id = findtext(element=element, xpath='instanceId', namespace=NAMESPACE) tags = dict((findtext(element=item, xpath='key', namespace=NAMESPACE), findtext(element=item, xpath='value', namespace=NAMESPACE)) for item in findall(element=element, xpath='tagSet/item', namespace=NAMESPACE) ) name = tags.get('Name', instance_id) public_ip = findtext(element=element, xpath='ipAddress', namespace=NAMESPACE) public_ips = [public_ip] if public_ip else [] private_ip = findtext(element=element, xpath='privateIpAddress', namespace=NAMESPACE) private_ips = [private_ip] if private_ip else [] n = Node( id=findtext(element=element, xpath='instanceId', namespace=NAMESPACE), name=name, state=state, public_ips=public_ips, private_ips=private_ips, driver=self.connection.driver, extra={ 'dns_name': findattr(element=element, xpath="dnsName", namespace=NAMESPACE), 'instanceId': findattr(element=element, xpath="instanceId", namespace=NAMESPACE), 'imageId': findattr(element=element, xpath="imageId", namespace=NAMESPACE), 'private_dns': findattr(element=element, xpath="privateDnsName", namespace=NAMESPACE), 'status': findattr(element=element, xpath="instanceState/name", namespace=NAMESPACE), 'keyname': findattr(element=element, xpath="keyName", namespace=NAMESPACE), 'launchindex': findattr(element=element, xpath="amiLaunchIndex", namespace=NAMESPACE), 'productcode': [ p.text for p in findall( element=element, xpath="productCodesSet/item/productCode", namespace=NAMESPACE )], 'instancetype': findattr(element=element, xpath="instanceType", namespace=NAMESPACE), 'launchdatetime': findattr(element=element, xpath="launchTime", namespace=NAMESPACE), 'availability': findattr(element, xpath="placement/availabilityZone", namespace=NAMESPACE), 'kernelid': findattr(element=element, xpath="kernelId", namespace=NAMESPACE), 'ramdiskid': findattr(element=element, xpath="ramdiskId", namespace=NAMESPACE), 'clienttoken': findattr(element=element, xpath="clientToken", namespace=NAMESPACE), 'groups': groups, 'tags': tags } ) return n def _to_images(self, object): return [self._to_image(el) for el in object.findall( fixxpath(xpath='imagesSet/item', namespace=NAMESPACE)) ] def _to_image(self, element): n = NodeImage( id=findtext(element=element, xpath='imageId', namespace=NAMESPACE), name=findtext(element=element, xpath='imageLocation', namespace=NAMESPACE), driver=self.connection.driver, extra={ 'state': findattr(element=element, xpath="imageState", namespace=NAMESPACE), 'ownerid': findattr(element=element, xpath="imageOwnerId", namespace=NAMESPACE), 'owneralias': findattr(element=element, xpath="imageOwnerAlias", namespace=NAMESPACE), 'ispublic': findattr(element=element, xpath="isPublic", namespace=NAMESPACE), 'architecture': findattr(element=element, xpath="architecture", namespace=NAMESPACE), 'imagetype': findattr(element=element, xpath="imageType", namespace=NAMESPACE), 'platform': findattr(element=element, xpath="platform", namespace=NAMESPACE), 'rootdevicetype': findattr(element=element, xpath="rootDeviceType", namespace=NAMESPACE), 'virtualizationtype': findattr( element=element, xpath="virtualizationType", namespace=NAMESPACE), 'hypervisor': findattr(element=element, xpath="hypervisor", namespace=NAMESPACE) } ) return n def _to_volume(self, element, name): volId = findtext(element=element, xpath='volumeId', namespace=NAMESPACE) size = findtext(element=element, xpath='size', namespace=NAMESPACE) return StorageVolume(id=volId, name=name, size=int(size), driver=self) def list_nodes(self, ex_node_ids=None): """ List all nodes Ex_node_ids parameter is used to filter the list of nodes that should be returned. Only the nodes with the corresponding node ids will be returned. @param ex_node_ids: List of C{node.id} @type ex_node_ids: C{list} of C{str} @rtype: C{list} of L{Node} """ params = {'Action': 'DescribeInstances'} if ex_node_ids: params.update(self._pathlist('InstanceId', ex_node_ids)) elem = self.connection.request(self.path, params=params).object nodes = [] for rs in findall(element=elem, xpath='reservationSet/item', namespace=NAMESPACE): groups = [g.findtext('') for g in findall(element=rs, xpath='groupSet/item/groupId', namespace=NAMESPACE)] nodes += self._to_nodes(rs, 'instancesSet/item', groups) nodes_elastic_ips_mappings = self.ex_describe_addresses(nodes) for node in nodes: ips = nodes_elastic_ips_mappings[node.id] node.public_ips.extend(ips) return nodes def list_sizes(self, location=None): available_types = REGION_DETAILS[self.region_name]['instance_types'] sizes = [] for instance_type in available_types: attributes = INSTANCE_TYPES[instance_type] attributes = copy.deepcopy(attributes) price = self._get_size_price(size_id=instance_type) attributes.update({'price': price}) sizes.append(NodeSize(driver=self, **attributes)) return sizes def list_images(self, location=None, ex_image_ids=None): """ List all images Ex_image_ids parameter is used to filter the list of images that should be returned. Only the images with the corresponding image ids will be returned. @param ex_image_ids: List of C{NodeImage.id} @type ex_image_ids: C{list} of C{str} @rtype: C{list} of L{NodeImage} """ params = {'Action': 'DescribeImages'} if ex_image_ids: params.update(self._pathlist('ImageId', ex_image_ids)) images = self._to_images( self.connection.request(self.path, params=params).object ) return images def list_locations(self): locations = [] for index, availability_zone in \ enumerate(self.ex_list_availability_zones()): locations.append(EC2NodeLocation( index, availability_zone.name, self.country, self, availability_zone) ) return locations def create_volume(self, size, name, location=None, snapshot=None): params = { 'Action': 'CreateVolume', 'Size': str(size)} if location is not None: params['AvailabilityZone'] = location.availability_zone.name volume = self._to_volume( self.connection.request(self.path, params=params).object, name=name) self.ex_create_tags(volume, {'Name': name}) return volume def destroy_volume(self, volume): params = { 'Action': 'DeleteVolume', 'VolumeId': volume.id} response = self.connection.request(self.path, params=params).object return self._get_boolean(response) def attach_volume(self, node, volume, device): params = { 'Action': 'AttachVolume', 'VolumeId': volume.id, 'InstanceId': node.id, 'Device': device} self.connection.request(self.path, params=params) return True def detach_volume(self, volume): params = { 'Action': 'DetachVolume', 'VolumeId': volume.id} self.connection.request(self.path, params=params) return True def ex_create_keypair(self, name): """Creates a new keypair @note: This is a non-standard extension API, and only works for EC2. @param name: The name of the keypair to Create. This must be unique, otherwise an InvalidKeyPair.Duplicate exception is raised. @type name: C{str} @rtype: C{dict} """ params = { 'Action': 'CreateKeyPair', 'KeyName': name, } response = self.connection.request(self.path, params=params).object key_material = findtext(element=response, xpath='keyMaterial', namespace=NAMESPACE) key_fingerprint = findtext(element=response, xpath='keyFingerprint', namespace=NAMESPACE) return { 'keyMaterial': key_material, 'keyFingerprint': key_fingerprint, } def ex_import_keypair(self, name, keyfile): """ imports a new public key @note: This is a non-standard extension API, and only works for EC2. @param name: The name of the public key to import. This must be unique, otherwise an InvalidKeyPair.Duplicate exception is raised. @type name: C{str} @param keyfile: The filename with path of the public key to import. @type keyfile: C{str} @rtype: C{dict} """ with open(os.path.expanduser(keyfile)) as fh: content = fh.read() base64key = base64.b64encode(content) params = { 'Action': 'ImportKeyPair', 'KeyName': name, 'PublicKeyMaterial': base64key } response = self.connection.request(self.path, params=params).object key_name = findtext(element=response, xpath='keyName', namespace=NAMESPACE) key_fingerprint = findtext(element=response, xpath='keyFingerprint', namespace=NAMESPACE) return { 'keyName': key_name, 'keyFingerprint': key_fingerprint, } def ex_describe_all_keypairs(self): """ Describes all keypairs. @note: This is a non-standard extension API, and only works for EC2. @rtype: C{list} of C{str} """ params = { 'Action': 'DescribeKeyPairs' } response = self.connection.request(self.path, params=params).object names = [] for elem in findall(element=response, xpath='keySet/item', namespace=NAMESPACE): name = findtext(element=elem, xpath='keyName', namespace=NAMESPACE) names.append(name) return names def ex_describe_keypairs(self, name): """Describes a keypair by name @note: This is a non-standard extension API, and only works for EC2. @param name: The name of the keypair to describe. @type name: C{str} @rtype: C{dict} """ params = { 'Action': 'DescribeKeyPairs', 'KeyName.1': name } response = self.connection.request(self.path, params=params).object key_name = findattr(element=response, xpath='keySet/item/keyName', namespace=NAMESPACE) return { 'keyName': key_name } def ex_list_security_groups(self): """ List existing Security Groups. @note: This is a non-standard extension API, and only works for EC2. @rtype: C{list} of C{str} """ params = {'Action': 'DescribeSecurityGroups'} response = self.connection.request(self.path, params=params).object groups = [] for group in findall(element=response, xpath='securityGroupInfo/item', namespace=NAMESPACE): name = findtext(element=group, xpath='groupName', namespace=NAMESPACE) groups.append(name) return groups def ex_create_security_group(self, name, description): """ Creates a new Security Group @note: This is a non-standard extension API, and only works for EC2. @param name: The name of the security group to Create. This must be unique. @type name: C{str} @param description: Human readable description of a Security Group. @type description: C{str} @rtype: C{str} """ params = {'Action': 'CreateSecurityGroup', 'GroupName': name, 'GroupDescription': description} return self.connection.request(self.path, params=params).object def ex_authorize_security_group(self, name, from_port, to_port, cidr_ip, protocol='tcp'): """ Edit a Security Group to allow specific traffic. @note: This is a non-standard extension API, and only works for EC2. @param name: The name of the security group to edit @type name: C{str} @param from_port: The beginning of the port range to open @type from_port: C{str} @param to_port: The end of the port range to open @type to_port: C{str} @param cidr_ip: The ip to allow traffic for. @type cidr_ip: C{str} @param protocol: tcp/udp/icmp @type protocol: C{str} @rtype: C{bool} """ params = {'Action': 'AuthorizeSecurityGroupIngress', 'GroupName': name, 'IpProtocol': protocol, 'FromPort': str(from_port), 'ToPort': str(to_port), 'CidrIp': cidr_ip} try: resp = self.connection.request( self.path, params=params.copy()).object return bool(findtext(element=resp, xpath='return', namespace=NAMESPACE)) except Exception: e = sys.exc_info()[1] if e.args[0].find('InvalidPermission.Duplicate') == -1: raise e def ex_authorize_security_group_permissive(self, name): """ Edit a Security Group to allow all traffic. @note: This is a non-standard extension API, and only works for EC2. @param name: The name of the security group to edit @type name: C{str} @rtype: C{list} of C{str} """ results = [] params = {'Action': 'AuthorizeSecurityGroupIngress', 'GroupName': name, 'IpProtocol': 'tcp', 'FromPort': '0', 'ToPort': '65535', 'CidrIp': '0.0.0.0/0'} try: results.append( self.connection.request(self.path, params=params.copy()).object ) except Exception: e = sys.exc_info()[1] if e.args[0].find("InvalidPermission.Duplicate") == -1: raise e params['IpProtocol'] = 'udp' try: results.append( self.connection.request(self.path, params=params.copy()).object ) except Exception: e = sys.exc_info()[1] if e.args[0].find("InvalidPermission.Duplicate") == -1: raise e params.update({'IpProtocol': 'icmp', 'FromPort': '-1', 'ToPort': '-1'}) try: results.append( self.connection.request(self.path, params=params.copy()).object ) except Exception: e = sys.exc_info()[1] if e.args[0].find("InvalidPermission.Duplicate") == -1: raise e return results def ex_list_availability_zones(self, only_available=True): """ Return a list of L{ExEC2AvailabilityZone} objects for the current region. Note: This is an extension method and is only available for EC2 driver. @keyword only_available: If true, return only availability zones with state 'available' @type only_available: C{str} @rtype: C{list} of L{ExEC2AvailabilityZone} """ params = {'Action': 'DescribeAvailabilityZones'} if only_available: params.update({'Filter.0.Name': 'state'}) params.update({'Filter.0.Value.0': 'available'}) params.update({'Filter.1.Name': 'region-name'}) params.update({'Filter.1.Value.0': self.region_name}) result = self.connection.request(self.path, params=params.copy()).object availability_zones = [] for element in findall(element=result, xpath='availabilityZoneInfo/item', namespace=NAMESPACE): name = findtext(element=element, xpath='zoneName', namespace=NAMESPACE) zone_state = findtext(element=element, xpath='zoneState', namespace=NAMESPACE) region_name = findtext(element=element, xpath='regionName', namespace=NAMESPACE) availability_zone = ExEC2AvailabilityZone( name=name, zone_state=zone_state, region_name=region_name ) availability_zones.append(availability_zone) return availability_zones def ex_describe_tags(self, resource): """ Return a dictionary of tags for a resource (Node or StorageVolume). @param resource: resource which should be used @type resource: L{Node} or L{StorageVolume} @return: dict Node tags @rtype: C{dict} """ params = {'Action': 'DescribeTags', 'Filter.0.Name': 'resource-id', 'Filter.0.Value.0': resource.id, 'Filter.1.Name': 'resource-type', 'Filter.1.Value.0': 'instance', } result = self.connection.request(self.path, params=params.copy()).object tags = {} for element in findall(element=result, xpath='tagSet/item', namespace=NAMESPACE): key = findtext(element=element, xpath='key', namespace=NAMESPACE) value = findtext(element=element, xpath='value', namespace=NAMESPACE) tags[key] = value return tags def ex_create_tags(self, resource, tags): """ Create tags for a resource (Node or StorageVolume). @param resource: Resource to be tagged @type resource: L{Node} or L{StorageVolume} @param tags: A dictionary or other mapping of strings to strings, associating tag names with tag values. @type tags: C{dict} @rtype: C{bool} """ if not tags: return params = {'Action': 'CreateTags', 'ResourceId.0': resource.id} for i, key in enumerate(tags): params['Tag.%d.Key' % i] = key params['Tag.%d.Value' % i] = tags[key] result = self.connection.request(self.path, params=params.copy()).object element = findtext(element=result, xpath='return', namespace=NAMESPACE) return element == 'true' def ex_delete_tags(self, resource, tags): """ Delete tags from a resource. @param resource: Resource to be tagged @type resource: L{Node} or L{StorageVolume} @param tags: A dictionary or other mapping of strings to strings, specifying the tag names and tag values to be deleted. @type tags: C{dict} @rtype: C{bool} """ if not tags: return params = {'Action': 'DeleteTags', 'ResourceId.0': resource.id} for i, key in enumerate(tags): params['Tag.%d.Key' % i] = key params['Tag.%d.Value' % i] = tags[key] result = self.connection.request(self.path, params=params.copy()).object element = findtext(element=result, xpath='return', namespace=NAMESPACE) return element == 'true' def _add_instance_filter(self, params, node): """ Add instance filter to the provided params dictionary. """ params.update({ 'Filter.0.Name': 'instance-id', 'Filter.0.Value.0': node.id }) def ex_describe_all_addresses(self, only_allocated=False): """ Return all the Elastic IP addresses for this account optionally, return only the allocated addresses @param only_allocated: If true, return only those addresses that are associated with an instance @type only_allocated: C{str} @return: list list of elastic ips for this particular account. @rtype: C{list} of C{str} """ params = {'Action': 'DescribeAddresses'} result = self.connection.request(self.path, params=params.copy()).object # the list which we return elastic_ip_addresses = [] for element in findall(element=result, xpath='addressesSet/item', namespace=NAMESPACE): instance_id = findtext(element=element, xpath='instanceId', namespace=NAMESPACE) # if only allocated addresses are requested if only_allocated and not instance_id: continue ip_address = findtext(element=element, xpath='publicIp', namespace=NAMESPACE) elastic_ip_addresses.append(ip_address) return elastic_ip_addresses def ex_associate_addresses(self, node, elastic_ip_address): """ Associate an IP address with a particular node. @param node: Node instance @type node: L{Node} @param elastic_ip_address: IP address which should be used @type elastic_ip_address: C{str} @rtype: C{bool} """ params = {'Action': 'AssociateAddress'} params.update(self._pathlist('InstanceId', [node.id])) params.update({'PublicIp': elastic_ip_address}) res = self.connection.request(self.path, params=params).object return self._get_boolean(res) def ex_describe_addresses(self, nodes): """ Return Elastic IP addresses for all the nodes in the provided list. @param nodes: List of C{Node} instances @type nodes: C{list} of L{Node} @return: Dictionary where a key is a node ID and the value is a list with the Elastic IP addresses associated with this node. @rtype: C{dict} """ if not nodes: return {} params = {'Action': 'DescribeAddresses'} if len(nodes) == 1: self._add_instance_filter(params, nodes[0]) result = self.connection.request(self.path, params=params.copy()).object node_instance_ids = [node.id for node in nodes] nodes_elastic_ip_mappings = {} for node_id in node_instance_ids: nodes_elastic_ip_mappings.setdefault(node_id, []) for element in findall(element=result, xpath='addressesSet/item', namespace=NAMESPACE): instance_id = findtext(element=element, xpath='instanceId', namespace=NAMESPACE) ip_address = findtext(element=element, xpath='publicIp', namespace=NAMESPACE) if instance_id not in node_instance_ids: continue nodes_elastic_ip_mappings[instance_id].append(ip_address) return nodes_elastic_ip_mappings def ex_describe_addresses_for_node(self, node): """ Return a list of Elastic IP addresses associated with this node. @param node: Node instance @type node: L{Node} @return: list Elastic IP addresses attached to this node. @rtype: C{list} of C{str} """ node_elastic_ips = self.ex_describe_addresses([node]) return node_elastic_ips[node.id] def ex_modify_instance_attribute(self, node, attributes): """ Modify node attributes. A list of valid attributes can be found at http://goo.gl/gxcj8 @param node: Node instance @type node: L{Node} @param attributes: Dictionary with node attributes @type attributes: C{dict} @return: True on success, False otherwise. @rtype: C{bool} """ attributes = attributes or {} attributes.update({'InstanceId': node.id}) params = {'Action': 'ModifyInstanceAttribute'} params.update(attributes) result = self.connection.request(self.path, params=params.copy()).object element = findtext(element=result, xpath='return', namespace=NAMESPACE) return element == 'true' def ex_change_node_size(self, node, new_size): """ Change the node size. Note: Node must be turned of before changing the size. @param node: Node instance @type node: L{Node} @param new_size: NodeSize intance @type new_size: L{NodeSize} @return: True on success, False otherwise. @rtype: C{bool} """ if 'instancetype' in node.extra: current_instance_type = node.extra['instancetype'] if current_instance_type == new_size.id: raise ValueError('New instance size is the same as' + 'the current one') attributes = {'InstanceType.Value': new_size.id} return self.ex_modify_instance_attribute(node, attributes) def create_node(self, **kwargs): """Create a new EC2 node Reference: http://bit.ly/8ZyPSy [docs.amazonwebservices.com] @inherits: L{NodeDriver.create_node} @keyword ex_mincount: Minimum number of instances to launch @type ex_mincount: C{int} @keyword ex_maxcount: Maximum number of instances to launch @type ex_maxcount: C{int} @keyword ex_securitygroup: Name of security group @type ex_securitygroup: C{str} @keyword ex_keyname: The name of the key pair @type ex_keyname: C{str} @keyword ex_userdata: User data @type ex_userdata: C{str} @keyword ex_clienttoken: Unique identifier to ensure idempotency @type ex_clienttoken: C{str} @keyword ex_blockdevicemappings: C{list} of C{dict} block device mappings. Example: [{'DeviceName': '/dev/sdb', 'VirtualName': 'ephemeral0'}] @type ex_blockdevicemappings: C{list} of C{dict} """ image = kwargs["image"] size = kwargs["size"] params = { 'Action': 'RunInstances', 'ImageId': image.id, 'MinCount': kwargs.get('ex_mincount', '1'), 'MaxCount': kwargs.get('ex_maxcount', '1'), 'InstanceType': size.id } if 'ex_securitygroup' in kwargs: if not isinstance(kwargs['ex_securitygroup'], list): kwargs['ex_securitygroup'] = [kwargs['ex_securitygroup']] for sig in range(len(kwargs['ex_securitygroup'])): params['SecurityGroup.%d' % (sig + 1,)] =\ kwargs['ex_securitygroup'][sig] if 'location' in kwargs: availability_zone = getattr(kwargs['location'], 'availability_zone', None) if availability_zone: if availability_zone.region_name != self.region_name: raise AttributeError('Invalid availability zone: %s' % (availability_zone.name)) params['Placement.AvailabilityZone'] = availability_zone.name if 'ex_keyname' in kwargs: params['KeyName'] = kwargs['ex_keyname'] if 'ex_userdata' in kwargs: params['UserData'] = base64.b64encode(b(kwargs['ex_userdata']))\ .decode('utf-8') if 'ex_clienttoken' in kwargs: params['ClientToken'] = kwargs['ex_clienttoken'] if 'ex_blockdevicemappings' in kwargs: for index, mapping in enumerate(kwargs['ex_blockdevicemappings']): params['BlockDeviceMapping.%d.DeviceName' % (index + 1)] = \ mapping['DeviceName'] params['BlockDeviceMapping.%d.VirtualName' % (index + 1)] = \ mapping['VirtualName'] object = self.connection.request(self.path, params=params).object nodes = self._to_nodes(object, 'instancesSet/item') for node in nodes: tags = {'Name': kwargs['name']} try: self.ex_create_tags(resource=node, tags=tags) except Exception: continue node.name = kwargs['name'] node.extra.update({'tags': tags}) if len(nodes) == 1: return nodes[0] else: return nodes def reboot_node(self, node): params = {'Action': 'RebootInstances'} params.update(self._pathlist('InstanceId', [node.id])) res = self.connection.request(self.path, params=params).object return self._get_boolean(res) def ex_start_node(self, node): """ Start the node by passing in the node object, does not work with instance store backed instances @param node: Node which should be used @type node: L{Node} @rtype: C{bool} """ params = {'Action': 'StartInstances'} params.update(self._pathlist('InstanceId', [node.id])) res = self.connection.request(self.path, params=params).object return self._get_state_boolean(res) def ex_stop_node(self, node): """ Stop the node by passing in the node object, does not work with instance store backed instances @param node: Node which should be used @type node: L{Node} @rtype: C{bool} """ params = {'Action': 'StopInstances'} params.update(self._pathlist('InstanceId', [node.id])) res = self.connection.request(self.path, params=params).object return self._get_state_boolean(res) def destroy_node(self, node): params = {'Action': 'TerminateInstances'} params.update(self._pathlist('InstanceId', [node.id])) res = self.connection.request(self.path, params=params).object return self._get_terminate_boolean(res) class EC2NodeDriver(BaseEC2NodeDriver): """ Amazon EC2 node driver. """ connectionCls = EC2Connection type = Provider.EC2 name = 'Amazon EC2' website = 'http://aws.amazon.com/ec2/' path = '/' region_name = 'us-east-1' country = 'USA' api_name = 'ec2_us_east' features = {'create_node': ['ssh_key']} NODE_STATE_MAP = { 'pending': NodeState.PENDING, 'running': NodeState.RUNNING, 'shutting-down': NodeState.UNKNOWN, 'terminated': NodeState.TERMINATED } class IdempotentParamError(LibcloudError): """ Request used the same client token as a previous, but non-identical request. """ def __str__(self): return repr(self.value) class EC2EUConnection(EC2Connection): """ Connection class for EC2 in the Western Europe Region """ host = REGION_DETAILS['eu-west-1']['endpoint'] class EC2EUNodeDriver(EC2NodeDriver): """ Driver class for EC2 in the Western Europe Region. """ api_name = 'ec2_eu_west' name = 'Amazon EC2 (eu-west-1)' friendly_name = 'Amazon Europe Ireland' country = 'IE' region_name = 'eu-west-1' connectionCls = EC2EUConnection class EC2USWestConnection(EC2Connection): """ Connection class for EC2 in the Western US Region """ host = REGION_DETAILS['us-west-1']['endpoint'] class EC2USWestNodeDriver(EC2NodeDriver): """ Driver class for EC2 in the Western US Region """ api_name = 'ec2_us_west' name = 'Amazon EC2 (us-west-1)' friendly_name = 'Amazon US N. California' country = 'US' region_name = 'us-west-1' connectionCls = EC2USWestConnection class EC2USWestOregonConnection(EC2Connection): """ Connection class for EC2 in the Western US Region (Oregon). """ host = REGION_DETAILS['us-west-2']['endpoint'] class EC2USWestOregonNodeDriver(EC2NodeDriver): """ Driver class for EC2 in the US West Oregon region. """ api_name = 'ec2_us_west_oregon' name = 'Amazon EC2 (us-west-2)' friendly_name = 'Amazon US West - Oregon' country = 'US' region_name = 'us-west-2' connectionCls = EC2USWestOregonConnection class EC2APSEConnection(EC2Connection): """ Connection class for EC2 in the Southeast Asia Pacific Region. """ host = REGION_DETAILS['ap-southeast-1']['endpoint'] class EC2APNEConnection(EC2Connection): """ Connection class for EC2 in the Northeast Asia Pacific Region. """ host = REGION_DETAILS['ap-northeast-1']['endpoint'] class EC2APSENodeDriver(EC2NodeDriver): """ Driver class for EC2 in the Southeast Asia Pacific Region. """ api_name = 'ec2_ap_southeast' name = 'Amazon EC2 (ap-southeast-1)' friendly_name = 'Amazon Asia-Pacific Singapore' country = 'SG' region_name = 'ap-southeast-1' connectionCls = EC2APSEConnection class EC2APNENodeDriver(EC2NodeDriver): """ Driver class for EC2 in the Northeast Asia Pacific Region. """ api_name = 'ec2_ap_northeast' name = 'Amazon EC2 (ap-northeast-1)' friendly_name = 'Amazon Asia-Pacific Tokyo' country = 'JP' region_name = 'ap-northeast-1' connectionCls = EC2APNEConnection class EC2SAEastConnection(EC2Connection): """ Connection class for EC2 in the South America (Sao Paulo) Region. """ host = REGION_DETAILS['sa-east-1']['endpoint'] class EC2SAEastNodeDriver(EC2NodeDriver): """ Driver class for EC2 in the South America (Sao Paulo) Region. """ api_name = 'ec2_sa_east' name = 'Amazon EC2 (sa-east-1)' friendly_name = 'Amazon South America Sao Paulo' country = 'BR' region_name = 'sa-east-1' connectionCls = EC2SAEastConnection class EC2APSESydneyConnection(EC2Connection): """ Connection class for EC2 in the Southeast Asia Pacific (Sydney) Region. """ host = REGION_DETAILS['ap-southeast-2']['endpoint'] class EC2APSESydneyNodeDriver(EC2NodeDriver): """ Driver class for EC2 in the Southeast Asia Pacific (Sydney) Region. """ api_name = 'ec2_ap_southeast_2' name = 'Amazon EC2 (ap-southeast-2)' friendly_name = 'Amazon Asia-Pacific Sydney' country = 'AU' region_name = 'ap-southeast-2' connectionCls = EC2APSESydneyConnection class EucConnection(EC2Connection): """ Connection class for Eucalyptus """ host = None class EucNodeDriver(BaseEC2NodeDriver): """ Driver class for Eucalyptus """ name = 'Eucalyptus' website = 'http://www.eucalyptus.com/' api_name = 'ec2_us_east' region_name = 'us-east-1' connectionCls = EucConnection def __init__(self, key, secret=None, secure=True, host=None, path=None, port=None): """ @inherits: L{EC2NodeDriver.__init__} @param path: The host where the API can be reached. @type path: C{str} """ super(EucNodeDriver, self).__init__(key, secret, secure, host, port) if path is None: path = "/services/Eucalyptus" self.path = path def list_locations(self): raise NotImplementedError( 'list_locations not implemented for this driver') def _add_instance_filter(self, params, node): """ Eucalyptus driver doesn't support filtering on instance id so this is a no-op. """ pass class NimbusConnection(EC2Connection): """ Connection class for Nimbus """ host = None class NimbusNodeDriver(BaseEC2NodeDriver): """ Driver class for Nimbus """ type = Provider.NIMBUS name = 'Nimbus' website = 'http://www.nimbusproject.org/' country = 'Private' api_name = 'nimbus' region_name = 'nimbus' friendly_name = 'Nimbus Private Cloud' connectionCls = NimbusConnection def ex_describe_addresses(self, nodes): """ Nimbus doesn't support elastic IPs, so this is a passthrough. @inherits: L{EC2NodeDriver.ex_describe_addresses} """ nodes_elastic_ip_mappings = {} for node in nodes: # empty list per node nodes_elastic_ip_mappings[node.id] = [] return nodes_elastic_ip_mappings def ex_create_tags(self, resource, tags): """ Nimbus doesn't support creating tags, so this is a passthrough. @inherits: L{EC2NodeDriver.ex_create_tags} """ pass
__version__ = "1.12.0" __version_info__ = ( 1, 12, 0 )
"""Backport from python2.7 to python <= 2.6.""" from itertools import repeat as _repeat, chain as _chain, starmap as _starmap try: from itertools import izip_longest as _zip_longest except ImportError: from itertools import izip def _zip_longest(*args, **kwds): # izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D- fillvalue = kwds.get('fillvalue') def sentinel(counter = ([fillvalue]*(len(args)-1)).pop): yield counter() # yields the fillvalue, or raises IndexError fillers = _repeat(fillvalue) iters = [_chain(it, sentinel(), fillers) for it in args] try: for tup in izip(*iters): yield tup except IndexError: pass class OrderedDict(dict): def __init__(self, *args, **kwds): if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) if not hasattr(self, '_keys'): self._keys = [] self.update(*args, **kwds) def clear(self): del self._keys[:] dict.clear(self) def __setitem__(self, key, value): if key not in self: self._keys.append(key) dict.__setitem__(self, key, value) def __delitem__(self, key): dict.__delitem__(self, key) self._keys.remove(key) def __iter__(self): return iter(self._keys) def __reversed__(self): return reversed(self._keys) def popitem(self): if not self: raise KeyError('dictionary is empty') key = self._keys.pop() value = dict.pop(self, key) return key, value def __reduce__(self): items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() inst_dict.pop('_keys', None) return (self.__class__, (items,), inst_dict) def setdefault(self, key, default=None): try: return self[key] except KeyError: self[key] = default return default def update(self, other=(), **kwds): if hasattr(other, "keys"): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value __marker = object() def pop(self, key, default=__marker): try: value = self[key] except KeyError: if default is self.__marker: raise return default else: del self[key] return value def keys(self): return list(self) def values(self): return [self[key] for key in self] def items(self): return [(key, self[key]) for key in self] def __repr__(self): if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, list(self.items())) def copy(self): return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): if isinstance(other, OrderedDict): return all(p==q for p, q in _zip_longest(self.items(), other.items())) return dict.__eq__(self, other)
''' Author: Christopher Duffy Date: February 2015 Name: nmap_scanner.py Purpose: To scan a network Copyright (c) 2015, Christopher Duffy All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' import sys try: import nmap except: sys.exit("[!] Install the nmap library: pip install python-nmap") if len(sys.argv) != 3: sys.exit("Please provide two arguments the first being the targets the second the ports") ports = str(sys.argv[2]) addrs = str(sys.argv[1]) scanner = nmap.PortScanner() scanner.scan(addrs, ports) for host in scanner.all_hosts(): if “” in host: print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname()) else: print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
from os.path import join from time import strftime from qiita_db.util import get_mountpoint from qiita_db.sql_connection import SQLConnectionHandler from qiita_db.metadata_template import SampleTemplate, PrepTemplate conn_handler = SQLConnectionHandler() _id, fp_base = get_mountpoint('templates')[0] for study_id in conn_handler.execute_fetchall( "SELECT study_id FROM qiita.study"): study_id = study_id[0] if SampleTemplate.exists(study_id): st = SampleTemplate(study_id) fp = join(fp_base, '%d_%s.txt' % (study_id, strftime("%Y%m%d-%H%M%S"))) st.to_file(fp) st.add_filepath(fp) for prep_template_id in conn_handler.execute_fetchall( "SELECT prep_template_id FROM qiita.prep_template"): prep_template_id = prep_template_id[0] pt = PrepTemplate(prep_template_id) study_id = pt.study_id fp = join(fp_base, '%d_prep_%d_%s.txt' % (pt.study_id, prep_template_id, strftime("%Y%m%d-%H%M%S"))) pt.to_file(fp) pt.add_filepath(fp)
from __future__ import print_function import numpy as np from bokeh.client import push_session from bokeh.io import curdoc from bokeh.models import (ColumnDataSource, DataRange1d, Plot, Circle, WidgetBox, Row, Button, TapTool) N = 9 x = np.linspace(-2, 2, N) y = x**2 source1 = ColumnDataSource(dict(x = x, y = y, radius = [0.1]*N)) xdr1 = DataRange1d() ydr1 = DataRange1d() plot1 = Plot(x_range=xdr1, y_range=ydr1, plot_width=400, plot_height=400) plot1.title.text = "Plot1" plot1.tools.append(TapTool(plot=plot1)) plot1.add_glyph(source1, Circle(x="x", y="y", radius="radius", fill_color="red")) source2 = ColumnDataSource(dict(x = x, y = y, color = ["blue"]*N)) xdr2 = DataRange1d() ydr2 = DataRange1d() plot2 = Plot(x_range=xdr2, y_range=ydr2, plot_width=400, plot_height=400) plot2.title.text = "Plot2" plot2.tools.append(TapTool(plot=plot2)) plot2.add_glyph(source2, Circle(x="x", y="y", radius=0.1, fill_color="color")) def on_selection_change1(attr, _, inds): color = ["blue"]*N if inds['1d']['indices']: indices = inds['1d']['indices'] for i in indices: color[i] = "red" source2.data["color"] = color source1.on_change('selected', on_selection_change1) def on_selection_change2(attr, _, inds): inds = inds['1d']['indices'] if inds: [index] = inds radius = [0.1]*N radius[index] = 0.2 else: radius = [0.1]*N source1.data["radius"] = radius source2.on_change('selected', on_selection_change2) reset = Button(label="Reset") def on_reset_click(): source1.selected = { '0d': {'flag': False, 'indices': []}, '1d': {'indices': []}, '2d': {'indices': {}} } source2.selected = { '0d': {'flag': False, 'indices': []}, '1d': {'indices': []}, '2d': {'indices': {}} } reset.on_click(on_reset_click) widgetBox = WidgetBox(children=[reset], width=150) row = Row(children=[widgetBox, plot1, plot2]) document = curdoc() document.add_root(row) if __name__ == "__main__": print("\npress ctrl-C to exit") session = push_session(document) session.show() session.loop_until_closed()
""" BrowserID support """ from social.backends.base import BaseAuth from social.exceptions import AuthFailed, AuthMissingParameter class PersonaAuth(BaseAuth): """BrowserID authentication backend""" name = 'persona' def get_user_id(self, details, response): """Use BrowserID email as ID""" return details['email'] def get_user_details(self, response): """Return user details, BrowserID only provides Email.""" # {'status': 'okay', # 'audience': 'localhost:8000', # 'expires': 1328983575529, # 'email': 'name@server.com', # 'issuer': 'browserid.org'} email = response['email'] return {'username': email.split('@', 1)[0], 'email': email, 'fullname': '', 'first_name': '', 'last_name': ''} def extra_data(self, user, uid, response, details): """Return users extra data""" return {'audience': response['audience'], 'issuer': response['issuer']} def auth_complete(self, *args, **kwargs): """Completes loging process, must return user instance""" if not 'assertion' in self.data: raise AuthMissingParameter(self, 'assertion') response = self.get_json('https://browserid.org/verify', data={ 'assertion': self.data['assertion'], 'audience': self.strategy.request_host() }, method='POST') if response.get('status') == 'failure': raise AuthFailed(self) kwargs.update({'response': response, 'backend': self}) return self.strategy.authenticate(*args, **kwargs)
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String from sqlalchemy.dialects import postgresql from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import relationship from sqlalchemy.sql.functions import GenericFunction from qcfractal.storage_sockets.models.sql_base import Base, MsgpackExt class json_build_object(GenericFunction): type = postgresql.JSON class CollectionORM(Base): """ A base collection class of precomuted workflows such as datasets, .. This is a dynamic document, so it will accept any number of extra fields (expandable and uncontrolled schema) """ __tablename__ = "collection" id = Column(Integer, primary_key=True) collection_type = Column(String) # for inheritance collection = Column(String(100), nullable=False) lname = Column(String(100), nullable=False) name = Column(String(100), nullable=False) tags = Column(JSON) tagline = Column(String) description = Column(String) group = Column(String(100), nullable=False) visibility = Column(Boolean, nullable=False) view_url_hdf5 = Column(String) view_url_plaintext = Column(String) view_metadata = Column(JSON) view_available = Column(Boolean, nullable=False) provenance = Column(JSON) extra = Column(JSON) # extra data related to specific collection type def update_relations(self, **kwarg): pass __table_args__ = ( Index("ix_collection_lname", "collection", "lname", unique=True), Index("ix_collection_type", "collection_type"), ) __mapper_args__ = {"polymorphic_on": "collection_type"} class DatasetMixin: """ Mixin class for common Dataset attributes. """ default_benchmark = Column(String) default_keywords = Column(JSON) default_driver = Column(String) default_units = Column(String) alias_keywords = Column(JSON) default_program = Column(String) history_keys = Column(JSON) history = Column(JSON) class ContributedValuesORM(Base): """One group of a contibuted values per dataset Each dataset can have multiple rows in this table""" __tablename__ = "contributed_values" collection_id = Column(Integer, ForeignKey("collection.id", ondelete="cascade"), primary_key=True) name = Column(String, nullable=False, primary_key=True) values = Column(MsgpackExt, nullable=False) index = Column(MsgpackExt, nullable=False) values_structure = Column(JSON, nullable=False) theory_level = Column(JSON, nullable=False) units = Column(String, nullable=False) theory_level_details = Column(JSON) citations = Column(JSON) external_url = Column(String) doi = Column(String) comments = Column(String) class DatasetEntryORM(Base): """Association table for many to many""" __tablename__ = "dataset_entry" dataset_id = Column(Integer, ForeignKey("dataset.id", ondelete="cascade"), primary_key=True) # TODO: check the cascase_delete with molecule molecule_id = Column(Integer, ForeignKey("molecule.id"), nullable=False) name = Column(String, nullable=False, primary_key=True) comment = Column(String) local_results = Column(JSON) class DatasetORM(CollectionORM, DatasetMixin): """ The Dataset class for homogeneous computations on many molecules. """ __tablename__ = "dataset" id = Column(Integer, ForeignKey("collection.id", ondelete="CASCADE"), primary_key=True) contributed_values_obj = relationship(ContributedValuesORM, lazy="selectin", cascade="all, delete-orphan") records_obj = relationship( DatasetEntryORM, lazy="selectin", cascade="all, delete-orphan", backref="dataset" # lazy='noload', ) @hybrid_property def contributed_values(self): return self._contributed_values(self.contributed_values_obj) @staticmethod def _contributed_values(contributed_values_obj): if not contributed_values_obj: return {} if not isinstance(contributed_values_obj, list): contributed_values_obj = [contributed_values_obj] ret = {} try: for obj in contributed_values_obj: ret[obj.name.lower()] = obj.to_dict(exclude=["collection_id"]) except Exception as err: pass return ret @contributed_values.setter def contributed_values(self, dict_values): return dict_values @hybrid_property def records(self): """calculated property when accessed, not saved in the DB A view of the many to many relation""" return self._records(self.records_obj) @staticmethod def _records(records_obj): if not records_obj: return [] if not isinstance(records_obj, list): records_obj = [records_obj] ret = [] try: for rec in records_obj: ret.append(rec.to_dict(exclude=["dataset_id"])) except Exception as err: # raises exception of first access!! pass return ret @records.setter def records(self, dict_values): return dict_values def update_relations(self, records=None, contributed_values=None, **kwarg): self.records_obj = [] records = [] if not records else records for rec_dict in records: rec = DatasetEntryORM(dataset_id=int(self.id), **rec_dict) self.records_obj.append(rec) self.contributed_values_obj = [] contributed_values = {} if not contributed_values else contributed_values for key, rec_dict in contributed_values.items(): rec = ContributedValuesORM(collection_id=int(self.id), **rec_dict) self.contributed_values_obj.append(rec) __table_args__ = ( # Index('ix_results_molecule', 'molecule'), # b-tree index # UniqueConstraint("program", "driver", "method", "basis", "keywords", "molecule", name='uix_results_keys'), ) __mapper_args__ = { "polymorphic_identity": "dataset", # to have separate select when querying CollectionORM "polymorphic_load": "selectin", } class ReactionDatasetEntryORM(Base): """Association table for many to many""" __tablename__ = "reaction_dataset_entry" reaction_dataset_id = Column(Integer, ForeignKey("reaction_dataset.id", ondelete="cascade"), primary_key=True) attributes = Column(JSON) name = Column(String, nullable=False, primary_key=True) reaction_results = Column(JSON) stoichiometry = Column(JSON) extras = Column(JSON) class ReactionDatasetORM(CollectionORM, DatasetMixin): """ Reaction Dataset """ __tablename__ = "reaction_dataset" id = Column(Integer, ForeignKey("collection.id", ondelete="CASCADE"), primary_key=True) ds_type = Column(String) records_obj = relationship( ReactionDatasetEntryORM, lazy="selectin", cascade="all, delete-orphan", backref="reaction_dataset" ) contributed_values_obj = relationship(ContributedValuesORM, lazy="selectin", cascade="all, delete-orphan") @hybrid_property def contributed_values(self): return self._contributed_values(self.contributed_values_obj) @staticmethod def _contributed_values(contributed_values_obj): return DatasetORM._contributed_values(contributed_values_obj) @contributed_values.setter def contributed_values(self, dict_values): return dict_values def update_relations(self, records=None, contributed_values=None, **kwarg): self.records_obj = [] records = records or [] for rec_dict in records: rec = ReactionDatasetEntryORM(reaction_dataset_id=int(self.id), **rec_dict) self.records_obj.append(rec) self.contributed_values_obj = [] contributed_values = {} if not contributed_values else contributed_values for key, rec_dict in contributed_values.items(): rec = ContributedValuesORM(collection_id=int(self.id), **rec_dict) self.contributed_values_obj.append(rec) @hybrid_property def records(self): """calculated property when accessed, not saved in the DB A view of the many to many relation""" return self._records(self.records_obj) @staticmethod def _records(records_obj): if not records_obj: return [] if not isinstance(records_obj, list): records_obj = [records_obj] ret = [] try: for rec in records_obj: ret.append(rec.to_dict(exclude=["reaction_dataset_id"])) except Exception as err: # raises exception of first access!! pass return ret @records.setter def records(self, dict_values): return dict_values __table_args__ = ( # Index('ix_results_molecule', 'molecule'), # b-tree index # UniqueConstraint("program", "driver", "method", "basis", "keywords", "molecule", name='uix_results_keys'), ) __mapper_args__ = { "polymorphic_identity": "reactiondataset", # to have separate select when querying CollectionORM "polymorphic_load": "selectin", }
import sys sys.path.extend(['../sympy', 'ext']) extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.mathjax', 'numpydoc', 'sympylive',] mathjax_path = 'https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?config=TeX-AMS_HTML-full' templates_path = ['.templates'] source_suffix = '.rst' master_doc = 'index' project = 'SymPy' copyright = '2008, 2009, 2010, 2011, 2012 SymPy Development Team' version = '0.7.2' release = '0.7.2-git' today_fmt = '%B %d, %Y' locale_dirs = ["i18n/"] pygments_style = 'sphinx' html_style = 'default.css' html_static_path = ['_static'] html_last_updated_fmt = '%b %d, %Y' html_logo = '_static/sympylogo.png' html_favicon = '../logo/SymPy-Favicon.ico' html_theme_options = {'collapsiblesidebar': True} htmlhelp_basename = 'SymPydoc' latex_documents = [('index', 'sympy-%s.tex' % release, 'SymPy Documentation', 'SymPy Development Team', 'manual', True)] latex_elements = { 'babel': '', 'fontenc': r''' \usepackage{amssymb} \usepackage{fontspec} \defaultfontfeatures{Mapping=tex-text} \setmainfont{DejaVu Serif} \setsansfont{DejaVu Sans} \setmonofont{DejaVu Sans Mono} ''', 'fontpkg': '', 'inputenc': '', 'utf8extra': '', 'preamble': '' } latex_logo = '_static/sympylogo.png' latex_show_pagerefs = True latex_use_modindex = False default_role = 'math' pngmath_divpng_args = ['-gamma 1.5','-D 110'] pngmath_latex_preamble = '\\usepackage{amsmath}\n'+\ '\\usepackage{bm}\n'+\ '\\usepackage{amsfonts}\n'+\ '\\usepackage{amssymb}\n'+\ '\\setlength{\\parindent}{0pt}\n' texinfo_documents = [ (master_doc, 'sympy', 'SymPy Documentation', 'SymPy Development Team', 'SymPy', 'Computer algebra system (CAS) in Python', 'Programming', 1), ]
d = {} for i in range(100000): d[i] = i JS_CODE = ''' var d = {}; for (var i = 0; i < 100000; i++) { d[i] = i; } '''
import os import sys import subprocess from flask.ext.script import Manager, Shell, Server from flask.ext.migrate import MigrateCommand from foobar.app import create_app from foobar.user.models import User from foobar.settings import DevConfig, ProdConfig from foobar.database import db if os.environ.get("FOOBAR_ENV") == 'prod': app = create_app(ProdConfig) else: app = create_app(DevConfig) HERE = os.path.abspath(os.path.dirname(__file__)) TEST_PATH = os.path.join(HERE, 'tests') manager = Manager(app) def _make_context(): """Return context dict for a shell session so you can access app, db, and the User model by default. """ return {'app': app, 'db': db, 'User': User} @manager.command def test(): """Run the tests.""" import pytest exit_code = pytest.main([TEST_PATH, '--verbose']) return exit_code manager.add_command('server', Server()) manager.add_command('shell', Shell(make_context=_make_context)) manager.add_command('db', MigrateCommand) if __name__ == '__main__': manager.run()
import git from git.exc import InvalidGitRepositoryError from git.config import GitConfigParser from io import BytesIO import weakref from typing import Any, Sequence, TYPE_CHECKING, Union from git.types import PathLike if TYPE_CHECKING: from .base import Submodule from weakref import ReferenceType from git.repo import Repo from git.refs import Head from git import Remote from git.refs import RemoteReference __all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch', 'SubmoduleConfigParser') def sm_section(name: str) -> str: """:return: section title used in .gitmodules configuration file""" return f'submodule "{name}"' def sm_name(section: str) -> str: """:return: name of the submodule as parsed from the section name""" section = section.strip() return section[11:-1] def mkhead(repo: 'Repo', path: PathLike) -> 'Head': """:return: New branch/head instance""" return git.Head(repo, git.Head.to_full_path(path)) def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference': """Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError""" for remote in remotes: try: return remote.refs[branch_name] except IndexError: continue # END exception handling # END for remote raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name) class SubmoduleConfigParser(GitConfigParser): """ Catches calls to _write, and updates the .gitmodules blob in the index with the new data, if we have written into a stream. Otherwise it will add the local file to the index to make it correspond with the working tree. Additionally, the cache must be cleared Please note that no mutating method will work in bare mode """ def __init__(self, *args: Any, **kwargs: Any) -> None: self._smref: Union['ReferenceType[Submodule]', None] = None self._index = None self._auto_write = True super(SubmoduleConfigParser, self).__init__(*args, **kwargs) #{ Interface def set_submodule(self, submodule: 'Submodule') -> None: """Set this instance's submodule. It must be called before the first write operation begins""" self._smref = weakref.ref(submodule) def flush_to_index(self) -> None: """Flush changes in our configuration file to the index""" assert self._smref is not None # should always have a file here assert not isinstance(self._file_or_files, BytesIO) sm = self._smref() if sm is not None: index = self._index if index is None: index = sm.repo.index # END handle index index.add([sm.k_modules_file], write=self._auto_write) sm._clear_cache() # END handle weakref #} END interface #{ Overridden Methods def write(self) -> None: # type: ignore[override] rval: None = super(SubmoduleConfigParser, self).write() self.flush_to_index() return rval # END overridden methods
from sympy.core import pi, oo, symbols, Function, Rational, Integer, GoldenRatio, EulerGamma, Catalan, Lambda, Dummy, Eq from sympy.functions import Piecewise, sin, cos, Abs, exp, ceiling, sqrt, gamma from sympy.utilities.pytest import raises from sympy.printing.ccode import CCodePrinter from sympy.utilities.lambdify import implemented_function from sympy.tensor import IndexedBase, Idx from sympy import ccode x, y, z = symbols('x,y,z') g = Function('g') def test_printmethod(): class fabs(Abs): def _ccode(self, printer): return "fabs(%s)" % printer._print(self.args[0]) assert ccode(fabs(x)) == "fabs(x)" def test_ccode_sqrt(): assert ccode(sqrt(x)) == "sqrt(x)" assert ccode(x**0.5) == "sqrt(x)" assert ccode(sqrt(x)) == "sqrt(x)" def test_ccode_Pow(): assert ccode(x**3) == "pow(x, 3)" assert ccode(x**(y**3)) == "pow(x, pow(y, 3))" assert ccode(1/(g(x)*3.5)**(x - y**x)/(x**2 + y)) == \ "pow(3.5*g(x), -x + pow(y, x))/(pow(x, 2) + y)" assert ccode(x**-1.0) == '1.0/x' assert ccode(x**Rational(2, 3)) == 'pow(x, 2.0L/3.0L)' _cond_cfunc = [(lambda base, exp: exp.is_integer, "dpowi"), (lambda base, exp: not exp.is_integer, "pow")] assert ccode(x**3, user_functions={'Pow': _cond_cfunc}) == 'dpowi(x, 3)' assert ccode(x**3.2, user_functions={'Pow': _cond_cfunc}) == 'pow(x, 3.2)' def test_ccode_constants_mathh(): assert ccode(exp(1)) == "M_E" assert ccode(pi) == "M_PI" assert ccode(oo) == "HUGE_VAL" assert ccode(-oo) == "-HUGE_VAL" def test_ccode_constants_other(): assert ccode(2*GoldenRatio) == "double const GoldenRatio = 1.61803398874989;\n2*GoldenRatio" assert ccode( 2*Catalan) == "double const Catalan = 0.915965594177219;\n2*Catalan" assert ccode(2*EulerGamma) == "double const EulerGamma = 0.577215664901533;\n2*EulerGamma" def test_ccode_Rational(): assert ccode(Rational(3, 7)) == "3.0L/7.0L" assert ccode(Rational(18, 9)) == "2" assert ccode(Rational(3, -7)) == "-3.0L/7.0L" assert ccode(Rational(-3, -7)) == "3.0L/7.0L" assert ccode(x + Rational(3, 7)) == "x + 3.0L/7.0L" assert ccode(Rational(3, 7)*x) == "(3.0L/7.0L)*x" def test_ccode_Integer(): assert ccode(Integer(67)) == "67" assert ccode(Integer(-1)) == "-1" def test_ccode_functions(): assert ccode(sin(x) ** cos(x)) == "pow(sin(x), cos(x))" def test_ccode_inline_function(): x = symbols('x') g = implemented_function('g', Lambda(x, 2*x)) assert ccode(g(x)) == "2*x" g = implemented_function('g', Lambda(x, 2*x/Catalan)) assert ccode( g(x)) == "double const Catalan = %s;\n2*x/Catalan" % Catalan.n() A = IndexedBase('A') i = Idx('i', symbols('n', integer=True)) g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x))) assert ccode(g(A[i]), assign_to=A[i]) == ( "for (int i=0; i<n; i++){\n" " A[i] = (A[i] + 1)*(A[i] + 2)*A[i];\n" "}" ) def test_ccode_exceptions(): assert ccode(ceiling(x)) == "ceil(x)" assert ccode(Abs(x)) == "fabs(x)" assert ccode(gamma(x)) == "tgamma(x)" def test_ccode_user_functions(): x = symbols('x', integer=False) n = symbols('n', integer=True) custom_functions = { "ceiling": "ceil", "Abs": [(lambda x: not x.is_integer, "fabs"), (lambda x: x.is_integer, "abs")], } assert ccode(ceiling(x), user_functions=custom_functions) == "ceil(x)" assert ccode(Abs(x), user_functions=custom_functions) == "fabs(x)" assert ccode(Abs(n), user_functions=custom_functions) == "abs(n)" def test_ccode_boolean(): assert ccode(x & y) == "x && y" assert ccode(x | y) == "x || y" assert ccode(~x) == "!x" assert ccode(x & y & z) == "x && y && z" assert ccode(x | y | z) == "x || y || z" assert ccode((x & y) | z) == "z || x && y" assert ccode((x | y) & z) == "z && (x || y)" def test_ccode_Piecewise(): p = ccode(Piecewise((x, x < 1), (x**2, True))) s = \ """\ if (x < 1) { x } else { pow(x, 2) }\ """ assert p == s def test_ccode_Piecewise_deep(): p = ccode(2*Piecewise((x, x < 1), (x**2, True))) s = \ """\ 2*((x < 1) ? ( x ) : ( pow(x, 2) ) )\ """ assert p == s def test_ccode_settings(): raises(TypeError, lambda: ccode(sin(x), method="garbage")) def test_ccode_Indexed(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o = symbols('n m o', integer=True) i, j, k = Idx('i', n), Idx('j', m), Idx('k', o) p = CCodePrinter() p._not_c = set() x = IndexedBase('x')[j] assert p._print_Indexed(x) == 'x[j]' A = IndexedBase('A')[i, j] assert p._print_Indexed(A) == 'A[%s]' % (m*i+j) B = IndexedBase('B')[i, j, k] assert p._print_Indexed(B) == 'B[%s]' % (i*o*m+j*o+k) assert p._not_c == set() def test_ccode_Indexed_without_looking_for_contraction(): len_y = 5 y = IndexedBase('y', shape=(len_y,)) x = IndexedBase('x', shape=(len_y,)) Dy = IndexedBase('Dy', shape=(len_y-1,)) i = Idx('i', len_y-1) e=Eq(Dy[i], (y[i+1]-y[i])/(x[i+1]-x[i])) code0 = ccode(e.rhs, assign_to=e.lhs, contract=False) assert code0 == 'Dy[i] = (y[%s] - y[i])/(x[%s] - x[i]);' % (i + 1, i + 1) def test_ccode_loops_matrix_vector(): n, m = symbols('n m', integer=True) A = IndexedBase('A') x = IndexedBase('x') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) s = ( 'for (int i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' 'for (int i=0; i<m; i++){\n' ' for (int j=0; j<n; j++){\n' ' y[i] = x[j]*A[%s] + y[i];\n' % (i*n + j) +\ ' }\n' '}' ) c = ccode(A[i, j]*x[j], assign_to=y[i]) assert c == s def test_dummy_loops(): # the following line could also be # [Dummy(s, integer=True) for s in 'im'] # or [Dummy(integer=True) for s in 'im'] i, m = symbols('i m', integer=True, cls=Dummy) x = IndexedBase('x') y = IndexedBase('y') i = Idx(i, m) expected = ( 'for (int i_%(icount)i=0; i_%(icount)i<m_%(mcount)i; i_%(icount)i++){\n' ' y[i_%(icount)i] = x[i_%(icount)i];\n' '}' ) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index} code = ccode(x[i], assign_to=y[i]) assert code == expected def test_ccode_loops_add(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m = symbols('n m', integer=True) A = IndexedBase('A') x = IndexedBase('x') y = IndexedBase('y') z = IndexedBase('z') i = Idx('i', m) j = Idx('j', n) s = ( 'for (int i=0; i<m; i++){\n' ' y[i] = x[i] + z[i];\n' '}\n' 'for (int i=0; i<m; i++){\n' ' for (int j=0; j<n; j++){\n' ' y[i] = x[j]*A[%s] + y[i];\n' % (i*n + j) +\ ' }\n' '}' ) c = ccode(A[i, j]*x[j] + x[i] + z[i], assign_to=y[i]) assert c == s def test_ccode_loops_multiple_contractions(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o, p = symbols('n m o p', integer=True) a = IndexedBase('a') b = IndexedBase('b') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) k = Idx('k', o) l = Idx('l', p) s = ( 'for (int i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' 'for (int i=0; i<m; i++){\n' ' for (int j=0; j<n; j++){\n' ' for (int k=0; k<o; k++){\n' ' for (int l=0; l<p; l++){\n' ' y[i] = y[i] + b[%s]*a[%s];\n' % (j*o*p + k*p + l, i*n*o*p + j*o*p + k*p + l) +\ ' }\n' ' }\n' ' }\n' '}' ) c = ccode(b[j, k, l]*a[i, j, k, l], assign_to=y[i]) assert c == s def test_ccode_loops_addfactor(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o, p = symbols('n m o p', integer=True) a = IndexedBase('a') b = IndexedBase('b') c = IndexedBase('c') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) k = Idx('k', o) l = Idx('l', p) s = ( 'for (int i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' 'for (int i=0; i<m; i++){\n' ' for (int j=0; j<n; j++){\n' ' for (int k=0; k<o; k++){\n' ' for (int l=0; l<p; l++){\n' ' y[i] = (a[%s] + b[%s])*c[%s] + y[i];\n' % (i*n*o*p + j*o*p + k*p + l, i*n*o*p + j*o*p + k*p + l, j*o*p + k*p + l) +\ ' }\n' ' }\n' ' }\n' '}' ) c = ccode((a[i, j, k, l] + b[i, j, k, l])*c[j, k, l], assign_to=y[i]) assert c == s def test_ccode_loops_multiple_terms(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o, p = symbols('n m o p', integer=True) a = IndexedBase('a') b = IndexedBase('b') c = IndexedBase('c') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) k = Idx('k', o) s0 = ( 'for (int i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' ) s1 = ( 'for (int i=0; i<m; i++){\n' ' for (int j=0; j<n; j++){\n' ' for (int k=0; k<o; k++){\n' ' y[i] = b[j]*b[k]*c[%s] + y[i];\n' % (i*n*o + j*o + k) +\ ' }\n' ' }\n' '}\n' ) s2 = ( 'for (int i=0; i<m; i++){\n' ' for (int k=0; k<o; k++){\n' ' y[i] = b[k]*a[%s] + y[i];\n' % (i*o + k) +\ ' }\n' '}\n' ) s3 = ( 'for (int i=0; i<m; i++){\n' ' for (int j=0; j<n; j++){\n' ' y[i] = b[j]*a[%s] + y[i];\n' % (i*n + j) +\ ' }\n' '}\n' ) c = ccode( b[j]*a[i, j] + b[k]*a[i, k] + b[j]*b[k]*c[i, j, k], assign_to=y[i]) assert (c == s0 + s1 + s2 + s3[:-1] or c == s0 + s1 + s3 + s2[:-1] or c == s0 + s2 + s1 + s3[:-1] or c == s0 + s2 + s3 + s1[:-1] or c == s0 + s3 + s1 + s2[:-1] or c == s0 + s3 + s2 + s1[:-1])
from django.contrib import admin from django.utils.translation import ugettext, ugettext_lazy as _ from ella.positions.models import Position from ella.utils import timezone class PositionOptions(admin.ModelAdmin): def show_title(self, obj): if not obj.target: return '-- %s --' % ugettext('empty position') else: return u'%s [%s]' % (obj.target.title, ugettext(obj.target_ct.name),) show_title.short_description = _('Title') def is_filled(self, obj): if obj.target: return True else: return False is_filled.short_description = _('Filled') is_filled.boolean = True def is_active(self, obj): if obj.disabled: return False now = timezone.now() active_from = not obj.active_from or obj.active_from <= now active_till = not obj.active_till or obj.active_till > now return active_from and active_till is_active.short_description = _('Active') is_active.boolean = True list_display = ('name', 'category', 'box_type', 'is_active', 'is_filled', 'show_title', 'disabled',) list_filter = ('category', 'name', 'disabled', 'active_from', 'active_till',) search_fields = ('box_type', 'text',) admin.site.register(Position, PositionOptions)
from django.contrib import admin from panoptes.tracking.models import AccountFilter class AccountFilterAdmin(admin.ModelAdmin): list_display = ('location', 'include_users', 'exclude_users') ordering = ('location',) admin.site.register(AccountFilter, AccountFilterAdmin)
''' c++ finally ''' def myfunc(): b = False try: print('trying something that will fail...') print('some call that fails at runtime') f = open('/tmp/nosuchfile') except: print('got exception') finally: print('finally cleanup') b = True TestError( b == True ) def main(): myfunc()
from __future__ import nested_scopes __revision__ = "$Id$" import unittest from Crypto.PublicKey import RSA from Crypto import Random from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex from Crypto.Hash import * from Crypto.Signature import PKCS1_PSS as PKCS from Crypto.Util.py3compat import * def isStr(s): t = '' try: t += s except TypeError: return 0 return 1 def rws(t): """Remove white spaces, tabs, and new lines from a string""" for c in ['\t', '\n', ' ']: t = t.replace(c,'') return t def t2b(t): """Convert a text string with bytes in hex form to a byte string""" clean = b(rws(t)) if len(clean)%2 == 1: raise ValueError("Even number of characters expected") return a2b_hex(clean) class MyKey: def __init__(self, key): self._key = key self.n = key.n self.asked = 0 def _randfunc(self, N): self.asked += N return self._key._randfunc(N) def sign(self, m): return self._key.sign(m) def has_private(self): return self._key.has_private() def decrypt(self, m): return self._key.decrypt(m) def verify(self, m, p): return self._key.verify(m, p) def encrypt(self, m, p): return self._key.encrypt(m, p) class PKCS1_PSS_Tests(unittest.TestCase): # List of tuples with test data for PKCS#1 PSS # Each tuple is made up by: # Item #0: dictionary with RSA key component, or key to import # Item #1: data to hash and sign # Item #2: signature of the data #1, done with the key #0, # and salt #3 after hashing it with #4 # Item #3: salt # Item #4: hash object generator _testData = ( # # From in pss-vect.txt to be found in # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''a2 ba 40 ee 07 e3 b2 bd 2f 02 ce 22 7f 36 a1 95 02 44 86 e4 9c 19 cb 41 bb bd fb ba 98 b2 2b 0e 57 7c 2e ea ff a2 0d 88 3a 76 e6 5e 39 4c 69 d4 b3 c0 5a 1e 8f ad da 27 ed b2 a4 2b c0 00 fe 88 8b 9b 32 c2 2d 15 ad d0 cd 76 b3 e7 93 6e 19 95 5b 22 0d d1 7d 4e a9 04 b1 ec 10 2b 2e 4d e7 75 12 22 aa 99 15 10 24 c7 cb 41 cc 5e a2 1d 00 ee b4 1f 7c 80 08 34 d2 c6 e0 6b ce 3b ce 7e a9 a5''', 'e':'''01 00 01''', # In the test vector, only p and q were given... # d is computed offline as e^{-1} mod (p-1)(q-1) 'd':'''50e2c3e38d886110288dfc68a9533e7e12e27d2aa56 d2cdb3fb6efa990bcff29e1d2987fb711962860e7391b1ce01 ebadb9e812d2fbdfaf25df4ae26110a6d7a26f0b810f54875e 17dd5c9fb6d641761245b81e79f8c88f0e55a6dcd5f133abd3 5f8f4ec80adf1bf86277a582894cb6ebcd2162f1c7534f1f49 47b129151b71''' }, # Data to sign '''85 9e ef 2f d7 8a ca 00 30 8b dc 47 11 93 bf 55 bf 9d 78 db 8f 8a 67 2b 48 46 34 f3 c9 c2 6e 64 78 ae 10 26 0f e0 dd 8c 08 2e 53 a5 29 3a f2 17 3c d5 0c 6d 5d 35 4f eb f7 8b 26 02 1c 25 c0 27 12 e7 8c d4 69 4c 9f 46 97 77 e4 51 e7 f8 e9 e0 4c d3 73 9c 6b bf ed ae 48 7f b5 56 44 e9 ca 74 ff 77 a5 3c b7 29 80 2f 6e d4 a5 ff a8 ba 15 98 90 fc''', # Signature '''8d aa 62 7d 3d e7 59 5d 63 05 6c 7e c6 59 e5 44 06 f1 06 10 12 8b aa e8 21 c8 b2 a0 f3 93 6d 54 dc 3b dc e4 66 89 f6 b7 95 1b b1 8e 84 05 42 76 97 18 d5 71 5d 21 0d 85 ef bb 59 61 92 03 2c 42 be 4c 29 97 2c 85 62 75 eb 6d 5a 45 f0 5f 51 87 6f c6 74 3d ed dd 28 ca ec 9b b3 0e a9 9e 02 c3 48 82 69 60 4f e4 97 f7 4c cd 7c 7f ca 16 71 89 71 23 cb d3 0d ef 5d 54 a2 b5 53 6a d9 0a 74 7e''', # Salt '''e3 b5 d5 d0 02 c1 bc e5 0c 2b 65 ef 88 a1 88 d8 3b ce 7e 61''', # Hash algorithm SHA ), # # Example 1.1 to be found in # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', 'e':'''01 00 01''', 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' }, # Message '''cd c8 7d a2 23 d7 86 df 3b 45 e0 bb bc 72 13 26 d1 ee 2a f8 06 cc 31 54 75 cc 6f 0d 9c 66 e1 b6 23 71 d4 5c e2 39 2e 1a c9 28 44 c3 10 10 2f 15 6a 0d 8d 52 c1 f4 c4 0b a3 aa 65 09 57 86 cb 76 97 57 a6 56 3b a9 58 fe d0 bc c9 84 e8 b5 17 a3 d5 f5 15 b2 3b 8a 41 e7 4a a8 67 69 3f 90 df b0 61 a6 e8 6d fa ae e6 44 72 c0 0e 5f 20 94 57 29 cb eb e7 7f 06 ce 78 e0 8f 40 98 fb a4 1f 9d 61 93 c0 31 7e 8b 60 d4 b6 08 4a cb 42 d2 9e 38 08 a3 bc 37 2d 85 e3 31 17 0f cb f7 cc 72 d0 b7 1c 29 66 48 b3 a4 d1 0f 41 62 95 d0 80 7a a6 25 ca b2 74 4f d9 ea 8f d2 23 c4 25 37 02 98 28 bd 16 be 02 54 6f 13 0f d2 e3 3b 93 6d 26 76 e0 8a ed 1b 73 31 8b 75 0a 01 67 d0''', # Signature '''90 74 30 8f b5 98 e9 70 1b 22 94 38 8e 52 f9 71 fa ac 2b 60 a5 14 5a f1 85 df 52 87 b5 ed 28 87 e5 7c e7 fd 44 dc 86 34 e4 07 c8 e0 e4 36 0b c2 26 f3 ec 22 7f 9d 9e 54 63 8e 8d 31 f5 05 12 15 df 6e bb 9c 2f 95 79 aa 77 59 8a 38 f9 14 b5 b9 c1 bd 83 c4 e2 f9 f3 82 a0 d0 aa 35 42 ff ee 65 98 4a 60 1b c6 9e b2 8d eb 27 dc a1 2c 82 c2 d4 c3 f6 6c d5 00 f1 ff 2b 99 4d 8a 4e 30 cb b3 3c''', # Salt '''de e9 59 c7 e0 64 11 36 14 20 ff 80 18 5e d5 7f 3e 67 76 af''', # Hash SHA ), # # Example 1.2 to be found in # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', 'e':'''01 00 01''', 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' }, # Message '''85 13 84 cd fe 81 9c 22 ed 6c 4c cb 30 da eb 5c f0 59 bc 8e 11 66 b7 e3 53 0c 4c 23 3e 2b 5f 8f 71 a1 cc a5 82 d4 3e cc 72 b1 bc a1 6d fc 70 13 22 6b 9e''', # Signature '''3e f7 f4 6e 83 1b f9 2b 32 27 41 42 a5 85 ff ce fb dc a7 b3 2a e9 0d 10 fb 0f 0c 72 99 84 f0 4e f2 9a 9d f0 78 07 75 ce 43 73 9b 97 83 83 90 db 0a 55 05 e6 3d e9 27 02 8d 9d 29 b2 19 ca 2c 45 17 83 25 58 a5 5d 69 4a 6d 25 b9 da b6 60 03 c4 cc cd 90 78 02 19 3b e5 17 0d 26 14 7d 37 b9 35 90 24 1b e5 1c 25 05 5f 47 ef 62 75 2c fb e2 14 18 fa fe 98 c2 2c 4d 4d 47 72 4f db 56 69 e8 43''', # Salt '''ef 28 69 fa 40 c3 46 cb 18 3d ab 3d 7b ff c9 8f d5 6d f4 2d''', # Hash SHA ), # # Example 2.1 to be found in # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''01 d4 0c 1b cf 97 a6 8a e7 cd bd 8a 7b f3 e3 4f a1 9d cc a4 ef 75 a4 74 54 37 5f 94 51 4d 88 fe d0 06 fb 82 9f 84 19 ff 87 d6 31 5d a6 8a 1f f3 a0 93 8e 9a bb 34 64 01 1c 30 3a d9 91 99 cf 0c 7c 7a 8b 47 7d ce 82 9e 88 44 f6 25 b1 15 e5 e9 c4 a5 9c f8 f8 11 3b 68 34 33 6a 2f d2 68 9b 47 2c bb 5e 5c ab e6 74 35 0c 59 b6 c1 7e 17 68 74 fb 42 f8 fc 3d 17 6a 01 7e dc 61 fd 32 6c 4b 33 c9''', 'e':'''01 00 01''', 'd':'''02 7d 14 7e 46 73 05 73 77 fd 1e a2 01 56 57 72 17 6a 7d c3 83 58 d3 76 04 56 85 a2 e7 87 c2 3c 15 57 6b c1 6b 9f 44 44 02 d6 bf c5 d9 8a 3e 88 ea 13 ef 67 c3 53 ec a0 c0 dd ba 92 55 bd 7b 8b b5 0a 64 4a fd fd 1d d5 16 95 b2 52 d2 2e 73 18 d1 b6 68 7a 1c 10 ff 75 54 5f 3d b0 fe 60 2d 5f 2b 7f 29 4e 36 01 ea b7 b9 d1 ce cd 76 7f 64 69 2e 3e 53 6c a2 84 6c b0 c2 dd 48 6a 39 fa 75 b1''' }, # Message '''da ba 03 20 66 26 3f ae db 65 98 48 11 52 78 a5 2c 44 fa a3 a7 6f 37 51 5e d3 36 32 10 72 c4 0a 9d 9b 53 bc 05 01 40 78 ad f5 20 87 51 46 aa e7 0f f0 60 22 6d cb 7b 1f 1f c2 7e 93 60''', # Signature '''01 4c 5b a5 33 83 28 cc c6 e7 a9 0b f1 c0 ab 3f d6 06 ff 47 96 d3 c1 2e 4b 63 9e d9 13 6a 5f ec 6c 16 d8 88 4b dd 99 cf dc 52 14 56 b0 74 2b 73 68 68 cf 90 de 09 9a db 8d 5f fd 1d ef f3 9b a4 00 7a b7 46 ce fd b2 2d 7d f0 e2 25 f5 46 27 dc 65 46 61 31 72 1b 90 af 44 53 63 a8 35 8b 9f 60 76 42 f7 8f ab 0a b0 f4 3b 71 68 d6 4b ae 70 d8 82 78 48 d8 ef 1e 42 1c 57 54 dd f4 2c 25 89 b5 b3''', # Salt '''57 bf 16 0b cb 02 bb 1d c7 28 0c f0 45 85 30 b7 d2 83 2f f7''', SHA ), # # Example 8.1 to be found in # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''49 53 70 a1 fb 18 54 3c 16 d3 63 1e 31 63 25 5d f6 2b e6 ee e8 90 d5 f2 55 09 e4 f7 78 a8 ea 6f bb bc df 85 df f6 4e 0d 97 20 03 ab 36 81 fb ba 6d d4 1f d5 41 82 9b 2e 58 2d e9 f2 a4 a4 e0 a2 d0 90 0b ef 47 53 db 3c ee 0e e0 6c 7d fa e8 b1 d5 3b 59 53 21 8f 9c ce ea 69 5b 08 66 8e de aa dc ed 94 63 b1 d7 90 d5 eb f2 7e 91 15 b4 6c ad 4d 9a 2b 8e fa b0 56 1b 08 10 34 47 39 ad a0 73 3f''', 'e':'''01 00 01''', 'd':'''6c 66 ff e9 89 80 c3 8f cd ea b5 15 98 98 83 61 65 f4 b4 b8 17 c4 f6 a8 d4 86 ee 4e a9 13 0f e9 b9 09 2b d1 36 d1 84 f9 5f 50 4a 60 7e ac 56 58 46 d2 fd d6 59 7a 89 67 c7 39 6e f9 5a 6e ee bb 45 78 a6 43 96 6d ca 4d 8e e3 de 84 2d e6 32 79 c6 18 15 9c 1a b5 4a 89 43 7b 6a 61 20 e4 93 0a fb 52 a4 ba 6c ed 8a 49 47 ac 64 b3 0a 34 97 cb e7 01 c2 d6 26 6d 51 72 19 ad 0e c6 d3 47 db e9''' }, # Message '''81 33 2f 4b e6 29 48 41 5e a1 d8 99 79 2e ea cf 6c 6e 1d b1 da 8b e1 3b 5c ea 41 db 2f ed 46 70 92 e1 ff 39 89 14 c7 14 25 97 75 f5 95 f8 54 7f 73 56 92 a5 75 e6 92 3a f7 8f 22 c6 99 7d db 90 fb 6f 72 d7 bb 0d d5 74 4a 31 de cd 3d c3 68 58 49 83 6e d3 4a ec 59 63 04 ad 11 84 3c 4f 88 48 9f 20 97 35 f5 fb 7f da f7 ce c8 ad dc 58 18 16 8f 88 0a cb f4 90 d5 10 05 b7 a8 e8 4e 43 e5 42 87 97 75 71 dd 99 ee a4 b1 61 eb 2d f1 f5 10 8f 12 a4 14 2a 83 32 2e db 05 a7 54 87 a3 43 5c 9a 78 ce 53 ed 93 bc 55 08 57 d7 a9 fb''', # Signature '''02 62 ac 25 4b fa 77 f3 c1 ac a2 2c 51 79 f8 f0 40 42 2b 3c 5b af d4 0a 8f 21 cf 0f a5 a6 67 cc d5 99 3d 42 db af b4 09 c5 20 e2 5f ce 2b 1e e1 e7 16 57 7f 1e fa 17 f3 da 28 05 2f 40 f0 41 9b 23 10 6d 78 45 aa f0 11 25 b6 98 e7 a4 df e9 2d 39 67 bb 00 c4 d0 d3 5b a3 55 2a b9 a8 b3 ee f0 7c 7f ec db c5 42 4a c4 db 1e 20 cb 37 d0 b2 74 47 69 94 0e a9 07 e1 7f bb ca 67 3b 20 52 23 80 c5''', # Salt '''1d 65 49 1d 79 c8 64 b3 73 00 9b e6 f6 f2 46 7b ac 4c 78 fa''', SHA ) ) def testSign1(self): for i in range(len(self._testData)): # Build the key comps = [ long(rws(self._testData[i][0][x]),16) for x in ('n','e','d') ] key = MyKey(RSA.construct(comps)) # Hash function h = self._testData[i][4].new() # Data to sign h.update(t2b(self._testData[i][1])) # Salt test_salt = t2b(self._testData[i][3]) key._randfunc = lambda N: test_salt # The real test signer = PKCS.new(key) self.assertTrue(signer.can_sign()) s = signer.sign(h) self.assertEqual(s, t2b(self._testData[i][2])) def testVerify1(self): for i in range(len(self._testData)): # Build the key comps = [ long(rws(self._testData[i][0][x]),16) for x in ('n','e') ] key = MyKey(RSA.construct(comps)) # Hash function h = self._testData[i][4].new() # Data to sign h.update(t2b(self._testData[i][1])) # Salt test_salt = t2b(self._testData[i][3]) # The real test key._randfunc = lambda N: test_salt verifier = PKCS.new(key) self.assertFalse(verifier.can_sign()) result = verifier.verify(h, t2b(self._testData[i][2])) self.assertTrue(result) def testSignVerify(self): h = SHA.new() h.update(b('blah blah blah')) rng = Random.new().read key = MyKey(RSA.generate(1024,rng)) # Helper function to monitor what's request from MGF global mgfcalls def newMGF(seed,maskLen): global mgfcalls mgfcalls += 1 return bchr(0x00)*maskLen # Verify that PSS is friendly to all ciphers for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,RIPEMD): h = hashmod.new() h.update(b('blah blah blah')) # Verify that sign() asks for as many random bytes # as the hash output size key.asked = 0 signer = PKCS.new(key) s = signer.sign(h) self.assertTrue(signer.verify(h, s)) self.assertEqual(key.asked, h.digest_size) h = SHA.new() h.update(b('blah blah blah')) # Verify that sign() uses a different salt length for sLen in (0,3,21): key.asked = 0 signer = PKCS.new(key, saltLen=sLen) s = signer.sign(h) self.assertEqual(key.asked, sLen) self.assertTrue(signer.verify(h, s)) # Verify that sign() uses the custom MGF mgfcalls = 0 signer = PKCS.new(key, newMGF) s = signer.sign(h) self.assertEqual(mgfcalls, 1) self.assertTrue(signer.verify(h, s)) # Verify that sign() does not call the RNG # when salt length is 0, even when a new MGF is provided key.asked = 0 mgfcalls = 0 signer = PKCS.new(key, newMGF, 0) s = signer.sign(h) self.assertEqual(key.asked,0) self.assertEqual(mgfcalls, 1) self.assertTrue(signer.verify(h, s)) def get_tests(config={}): tests = [] tests += list_test_cases(PKCS1_PSS_Tests) return tests if __name__ == '__main__': suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite')
""" This is an example that demonstrates how to use an RGB led with BreakfastSerial. It assumes you have an RGB led wired up with red on pin 10, green on pin 9, and blue on pin 8. """ from BreakfastSerial import RGBLed, Arduino from time import sleep board = Arduino() led = RGBLed(board, { "red": 10, "green": 9, "blue": 8 }) led.red() sleep(1) led.green() sleep(1) led.blue() sleep(1) led.yellow() sleep(1) led.cyan() sleep(1) led.purple() sleep(1) led.white() sleep(1) led.off() import code code.InteractiveConsole(locals=globals()).interact()
import os import inspect from lib import BaseTest def changesRemove(_, s): return s.replace(os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "changes"), "") class EditRepo1Test(BaseTest): """ edit repo: change comment """ fixtureCmds = [ "aptly repo create repo1", ] runCmd = "aptly repo edit -comment=Lala repo1" def check(self): self.check_output() self.check_cmd_output("aptly repo show repo1", "repo-show") class EditRepo2Test(BaseTest): """ edit repo: change distribution & component """ fixtureCmds = [ "aptly repo create -comment=Lala -component=non-free repo2", ] runCmd = "aptly repo edit -distribution=wheezy -component=contrib repo2" def check(self): self.check_output() self.check_cmd_output("aptly repo show repo2", "repo-show") class EditRepo3Test(BaseTest): """ edit repo: no such repo """ runCmd = "aptly repo edit repo3" expectedCode = 1 class EditRepo4Test(BaseTest): """ edit repo: add uploaders.json """ fixtureCmds = [ "aptly repo create repo4", ] runCmd = "aptly repo edit -uploaders-file=${changes}/uploaders2.json repo4" def check(self): self.check_output() self.check_cmd_output("aptly repo show repo4", "repo_show") class EditRepo5Test(BaseTest): """ edit repo: with broken uploaders.json """ fixtureCmds = [ "aptly repo create repo5", ] runCmd = "aptly repo edit -uploaders-file=${changes}/uploaders3.json repo5" expectedCode = 1 class EditRepo6Test(BaseTest): """ edit local repo: with missing uploaders.json """ fixtureCmds = [ "aptly repo create repo6", ] runCmd = "aptly repo edit -uploaders-file=${changes}/uploaders-not-found.json repo6" expectedCode = 1 outputMatchPrepare = changesRemove class EditRepo7Test(BaseTest): """ edit local repo: remove uploaders.json """ fixtureCmds = [ "aptly repo create -uploaders-file=${changes}/uploaders2.json repo7", ] runCmd = "aptly repo edit -uploaders-file= repo7" def check(self): self.check_output() self.check_cmd_output("aptly repo show repo7", "repo_show")
import re import datetime import dateutil.parser from django.conf import settings from django.utils import feedgenerator from django.utils.html import linebreaks from apps.social.models import MSocialServices from apps.reader.models import UserSubscription from utils import log as logging from vendor.facebook import GraphAPIError class FacebookFetcher: def __init__(self, feed, options=None): self.feed = feed self.options = options or {} def fetch(self): page_name = self.extract_page_name() if not page_name: return facebook_user = self.facebook_user() if not facebook_user: return # If 'video', use video API to get embed: # f.get_object('tastyvegetarian', fields='posts') # f.get_object('1992797300790726', fields='embed_html') feed = self.fetch_page_feed(facebook_user, page_name, 'name,about,posts,videos,photos') data = {} data['title'] = feed.get('name', "%s on Facebook" % page_name) data['link'] = feed.get('link', "https://facebook.com/%s" % page_name) data['description'] = feed.get('about', "%s on Facebook" % page_name) data['lastBuildDate'] = datetime.datetime.utcnow() data['generator'] = 'NewsBlur Facebook API Decrapifier - %s' % settings.NEWSBLUR_URL data['docs'] = None data['feed_url'] = self.feed.feed_address rss = feedgenerator.Atom1Feed(**data) merged_data = [] posts = feed.get('posts', {}).get('data', None) if posts: for post in posts: story_data = self.page_posts_story(facebook_user, post) if not story_data: continue merged_data.append(story_data) videos = feed.get('videos', {}).get('data', None) if videos: for video in videos: story_data = self.page_video_story(facebook_user, video) if not story_data: continue for seen_data in merged_data: if story_data['link'] == seen_data['link']: # Video wins over posts (and attachments) seen_data['description'] = story_data['description'] seen_data['title'] = story_data['title'] break for story_data in merged_data: rss.add_item(**story_data) return rss.writeString('utf-8') def extract_page_name(self): page = None try: page_groups = re.search('facebook.com/(\w+)/?', self.feed.feed_address) if not page_groups: return page = page_groups.group(1) except IndexError: return return page def facebook_user(self): facebook_api = None social_services = None if self.options.get('requesting_user_id', None): social_services = MSocialServices.get_user(self.options.get('requesting_user_id')) facebook_api = social_services.facebook_api() if not facebook_api: logging.debug(u' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' % (self.feed.log_title[:30], self.feed.feed_address, self.options)) return else: usersubs = UserSubscription.objects.filter(feed=self.feed) if not usersubs: logging.debug(u' ***> [%-30s] ~FRFacebook fetch failed: %s: No subscriptions' % (self.feed.log_title[:30], self.feed.feed_address)) return for sub in usersubs: social_services = MSocialServices.get_user(sub.user_id) if not social_services.facebook_uid: continue facebook_api = social_services.facebook_api() if not facebook_api: continue else: break if not facebook_api: logging.debug(u' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' % (self.feed.log_title[:30], self.feed.feed_address, usersubs[0].user.username)) return return facebook_api def fetch_page_feed(self, facebook_user, page, fields): try: stories = facebook_user.get_object(page, fields=fields) except GraphAPIError, e: message = str(e).lower() if 'session has expired' in message: logging.debug(u' ***> [%-30s] ~FRFacebook page failed/expired, disconnecting facebook: %s: %s' % (self.feed.log_title[:30], self.feed.feed_address, e)) self.feed.save_feed_history(560, "Facebook Error: Expired token") return {} if not stories: return {} return stories def page_posts_story(self, facebook_user, page_story): categories = set() if 'message' not in page_story: # Probably a story shared on the page's timeline, not a published story return message = linebreaks(page_story['message']) created_date = page_story['created_time'] if isinstance(created_date, unicode): created_date = dateutil.parser.parse(created_date) fields = facebook_user.get_object(page_story['id'], fields='permalink_url,link,attachments') permalink = fields.get('link', fields['permalink_url']) attachments_html = "" if fields.get('attachments', None) and fields['attachments']['data']: for attachment in fields['attachments']['data']: if 'media' in attachment: attachments_html += "<img src=\"%s\" />" % attachment['media']['image']['src'] if attachment.get('subattachments', None): for subattachment in attachment['subattachments']['data']: attachments_html += "<img src=\"%s\" />" % subattachment['media']['image']['src'] content = """<div class="NB-facebook-rss"> <div class="NB-facebook-rss-message">%s</div> <div class="NB-facebook-rss-picture">%s</div> </div>""" % ( message, attachments_html ) story = { 'title': message, 'link': permalink, 'description': content, 'categories': list(categories), 'unique_id': "fb_post:%s" % page_story['id'], 'pubdate': created_date, } return story def page_video_story(self, facebook_user, page_story): categories = set() if 'description' not in page_story: return message = linebreaks(page_story['description']) created_date = page_story['updated_time'] if isinstance(created_date, unicode): created_date = dateutil.parser.parse(created_date) permalink = facebook_user.get_object(page_story['id'], fields='permalink_url')['permalink_url'] embed_html = facebook_user.get_object(page_story['id'], fields='embed_html') if permalink.startswith('/'): permalink = "https://www.facebook.com%s" % permalink content = """<div class="NB-facebook-rss"> <div class="NB-facebook-rss-message">%s</div> <div class="NB-facebook-rss-embed">%s</div> </div>""" % ( message, embed_html.get('embed_html', '') ) story = { 'title': page_story.get('story', message), 'link': permalink, 'description': content, 'categories': list(categories), 'unique_id': "fb_post:%s" % page_story['id'], 'pubdate': created_date, } return story def favicon_url(self): page_name = self.extract_page_name() facebook_user = self.facebook_user() if not facebook_user: logging.debug(u' ***> [%-30s] ~FRFacebook icon failed, disconnecting facebook: %s' % (self.feed.log_title[:30], self.feed.feed_address)) return try: picture_data = facebook_user.get_object(page_name, fields='picture') except GraphAPIError, e: message = str(e).lower() if 'session has expired' in message: logging.debug(u' ***> [%-30s] ~FRFacebook icon failed/expired, disconnecting facebook: %s: %s' % (self.feed.log_title[:30], self.feed.feed_address, e)) return if 'picture' in picture_data: return picture_data['picture']['data']['url']
import io import sys isPython3 = sys.version_info >= (3, 0) class Scribe: @staticmethod def read(path): with io.open(path, mode="rt", encoding="utf-8") as f: s = f.read() # go to beginning f.seek(0) return s @staticmethod def read_beginning(path, lines): with io.open(path, mode="rt", encoding="utf-8") as f: s = f.read(lines) # go to beginning f.seek(0) return s @staticmethod def read_lines(path): with io.open(path, mode="rt", encoding="utf-8") as f: content = f.readlines() return content @staticmethod def write(contents, path): if isPython3: with open(path, mode="wt", encoding="utf-8") as f: # truncate previous contents f.truncate() f.write(contents) else: with io.open(path, mode="wt", encoding="utf-8") as f: # truncate previous contents f.truncate() f.write(contents.decode("utf8")) @staticmethod def write_lines(lines, path): if isPython3: with open(path, mode="wt", encoding="utf-8") as f: f.writelines([l + "\n" for l in lines]) else: with io.open(path, mode="wt") as f: for line in lines: f.writelines(line.decode("utf8") + "\n") @staticmethod def add_content(contents, path): if isPython3: with open(path, mode="a", encoding="utf-8") as f: f.writelines(contents) else: with io.open(path, mode="a") as f: f.writelines(contents.decode("utf8"))
""" This script shows the categories on each page and lets you change them. For each page in the target wiki: * If the page contains no categories, you can specify a list of categories to add to the page. * If the page already contains one or more categories, you can specify a new list of categories to replace the current list of categories of the page. Usage: python pwb.py catall [start] If no starting name is provided, the bot starts at 'A'. Options: -onlynew : Only run on pages that do not yet have a category. """ from __future__ import absolute_import, unicode_literals __version__ = '$Id$' import pywikibot from pywikibot import i18n, textlib from pywikibot.bot import QuitKeyboardInterrupt def choosecats(pagetext): """Coose categories.""" chosen = [] done = False length = 1000 # TODO: → input_choice pywikibot.output("""Give the new categories, one per line. Empty line: if the first, don't change. Otherwise: Ready. -: I made a mistake, let me start over. ?: Give the text of the page with GUI. ??: Give the text of the page in console. xx: if the first, remove all categories and add no new. q: quit.""") while not done: choice = pywikibot.input(u"?") if choice == "": done = True elif choice == "-": chosen = choosecats(pagetext) done = True elif choice == "?": from pywikibot import editor as editarticle editor = editarticle.TextEditor() editor.edit(pagetext) elif choice == "??": pywikibot.output(pagetext[0:length]) length = length + 500 elif choice == "xx" and chosen == []: chosen = None done = True elif choice == "q": raise QuitKeyboardInterrupt else: chosen.append(choice) return chosen def make_categories(page, list, site=None): """Make categories.""" if site is None: site = pywikibot.Site() pllist = [] for p in list: cattitle = "%s:%s" % (site.namespaces.CATEGORY, p) pllist.append(pywikibot.Page(site, cattitle)) page.put_async(textlib.replaceCategoryLinks(page.get(), pllist, site=page.site), summary=i18n.twtranslate(site, 'catall-changing')) def main(*args): """ Process command line arguments and perform task. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ docorrections = True start = 'A' local_args = pywikibot.handle_args(args) for arg in local_args: if arg == '-onlynew': docorrections = False else: start = arg mysite = pywikibot.Site() for p in mysite.allpages(start=start): try: text = p.get() cats = p.categories() if not cats: pywikibot.output(u"========== %s ==========" % p.title()) pywikibot.output('No categories') pywikibot.output('-' * 40) newcats = choosecats(text) if newcats != [] and newcats is not None: make_categories(p, newcats, mysite) elif docorrections: pywikibot.output(u"========== %s ==========" % p.title()) for c in cats: pywikibot.output(c.title()) pywikibot.output('-' * 40) newcats = choosecats(text) if newcats is None: make_categories(p, [], mysite) elif newcats != []: make_categories(p, newcats, mysite) except pywikibot.IsRedirectPage: pywikibot.output(u'%s is a redirect' % p.title()) if __name__ == "__main__": try: main() except KeyboardInterrupt: pywikibot.output('\nQuitting program...')
from botocore.utils import is_json_value_header class ShapeDocumenter(object): EVENT_NAME = '' def __init__(self, service_name, operation_name, event_emitter, context=None): self._service_name = service_name self._operation_name = operation_name self._event_emitter = event_emitter self._context = context if context is None: self._context = { 'special_shape_types': {} } def traverse_and_document_shape(self, section, shape, history, include=None, exclude=None, name=None, is_required=False): """Traverses and documents a shape Will take a self class and call its appropriate methods as a shape is traversed. :param section: The section to document. :param history: A list of the names of the shapes that have been traversed. :type include: Dictionary where keys are parameter names and values are the shapes of the parameter names. :param include: The parameter shapes to include in the documentation. :type exclude: List of the names of the parameters to exclude. :param exclude: The names of the parameters to exclude from documentation. :param name: The name of the shape. :param is_required: If the shape is a required member. """ param_type = shape.type_name if shape.name in history: self.document_recursive_shape(section, shape, name=name) else: history.append(shape.name) is_top_level_param = (len(history) == 2) getattr(self, 'document_shape_type_%s' % param_type, self.document_shape_default)( section, shape, history=history, name=name, include=include, exclude=exclude, is_top_level_param=is_top_level_param, is_required=is_required) if is_top_level_param: self._event_emitter.emit( 'docs.%s.%s.%s.%s' % (self.EVENT_NAME, self._service_name, self._operation_name, name), section=section) at_overlying_method_section = (len(history) == 1) if at_overlying_method_section: self._event_emitter.emit( 'docs.%s.%s.%s.complete-section' % (self.EVENT_NAME, self._service_name, self._operation_name), section=section) history.pop() def _get_special_py_default(self, shape): special_defaults = { 'jsonvalue_header': '{...}|[...]|123|123.4|\'string\'|True|None', 'streaming_input_shape': 'b\'bytes\'|file', 'streaming_output_shape': 'StreamingBody()' } return self._get_value_for_special_type(shape, special_defaults) def _get_special_py_type_name(self, shape): special_type_names = { 'jsonvalue_header': 'JSON serializable', 'streaming_input_shape': 'bytes or seekable file-like object', 'streaming_output_shape': ':class:`.StreamingBody`' } return self._get_value_for_special_type(shape, special_type_names) def _get_value_for_special_type(self, shape, special_type_map): if is_json_value_header(shape): return special_type_map['jsonvalue_header'] for special_type, marked_shape in self._context[ 'special_shape_types'].items(): if special_type in special_type_map: if shape == marked_shape: return special_type_map[special_type] return None
from sys import version_info if version_info >= (2,6,0): def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_allog', [dirname(__file__)]) except ImportError: import _allog return _allog if fp is not None: try: _mod = imp.load_module('_allog', fp, pathname, description) finally: fp.close() return _mod _allog = swig_import_helper() del swig_import_helper else: import _allog del version_info try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self,class_type,name,value,static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'SwigPyObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name,None) if method: return method(self,value) if (not static): self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self,class_type,name,value): return _swig_setattr_nondynamic(self,class_type,name,value,0) def _swig_getattr(self,class_type,name): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name,None) if method: return method(self) raise AttributeError(name) def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) try: _object = object _newclass = 1 except AttributeError: class _object : pass _newclass = 0 def debug(*args): return _allog.debug(*args) debug = _allog.debug def info(*args): return _allog.info(*args) info = _allog.info def warning(*args): return _allog.warning(*args) warning = _allog.warning def error(*args): return _allog.error(*args) error = _allog.error def fatal(*args): return _allog.fatal(*args) fatal = _allog.fatal
import logging import codecs from optparse import OptionParser from pyjade.utils import process import os def convert_file(): support_compilers_list = ['django', 'jinja', 'underscore', 'mako', 'tornado'] available_compilers = {} for i in support_compilers_list: try: compiler_class = __import__('pyjade.ext.%s' % i, fromlist=['pyjade']).Compiler except ImportError, e: logging.warning(e) else: available_compilers[i] = compiler_class usage = "usage: %prog [options] file [output]" parser = OptionParser(usage) parser.add_option("-o", "--output", dest="output", help="Write output to FILE", metavar="FILE") parser.add_option("-c", "--compiler", dest="compiler", choices=available_compilers.keys(), default='django', type="choice", help="COMPILER must be one of %s, default is django" % ','.join(available_compilers.keys())) parser.add_option("-e", "--ext", dest="extension", help="Set import/extends default file extension", metavar="FILE") options, args = parser.parse_args() if len(args) < 1: print "Specify the input file as the first argument." exit() file_output = options.output or (args[1] if len(args) > 1 else None) compiler = options.compiler if options.extension: extension = '.%s'%options.extension elif options.output: extension = os.path.splitext(options.output)[1] else: extension = None if compiler in available_compilers: template = codecs.open(args[0], 'r', encoding='utf-8').read() output = process(template, compiler=available_compilers[compiler], staticAttrs=True, extension=extension) if file_output: outfile = codecs.open(file_output, 'w', encoding='utf-8') outfile.write(output) else: print output else: raise Exception('You must have %s installed!' % compiler) if __name__ == '__main__': convert_file()
import os import unittest from lxml import etree from hovercraft.template import (Template, CSS_RESOURCE, JS_RESOURCE, JS_POSITION_BODY, JS_POSITION_HEADER) TEST_DATA = os.path.join(os.path.split(__file__)[0], 'test_data') class TemplateInfoTests(unittest.TestCase): """Tests that template information is correctly parsed""" def test_template_paths(self): # You can specify a folder or a cfg file and that's the same thing. template_info1 = Template(os.path.join(TEST_DATA, 'minimal')) template_info2 = Template(os.path.join(TEST_DATA, 'minimal', 'template.cfg')) self.assertEqual(etree.tostring(template_info1.xml_node()), etree.tostring(template_info2.xml_node())) def test_template_minimal(self): template_info = Template(os.path.join(TEST_DATA, 'minimal')) with open(os.path.join(TEST_DATA, 'minimal', 'template.xsl'), 'rb') as xslfile: xsl = xslfile.read() self.assertEqual(template_info.xsl, xsl) template_files = [each.filepath for each in template_info.resources] self.assertIn('js/impress.js', template_files) self.assertIn('js/hovercraft-minimal.js', template_files) css_files = list(each.filepath for each in template_info.resources if each.resource_type == CSS_RESOURCE) self.assertEqual(len(css_files), 0) self.assertEqual(template_info.doctype, b'<!DOCTYPE html>') def test_template_maximal(self): template_info = Template(os.path.join(TEST_DATA, 'maximal')) with open(os.path.join(TEST_DATA, 'maximal', 'template.xsl'), 'rb') as xslfile: xsl = xslfile.read() self.assertEqual(template_info.xsl, xsl) template_files = [each.filepath for each in template_info.resources] self.assertIn('images/python-logo-master-v3-TM.png', template_files) self.assertIn('js/impress.js', template_files) self.assertIn('js/impressConsole.js', template_files) self.assertIn('js/hovercraft.js', template_files) js_bodies = [each.filepath for each in template_info.resources if each.resource_type == JS_RESOURCE and each.extra_info == JS_POSITION_BODY] self.assertIn('js/impress.js', js_bodies) self.assertIn('js/impressConsole.js', js_bodies) self.assertIn('js/hovercraft.js', js_bodies) js_headers = [each.filepath for each in template_info.resources if each.resource_type == JS_RESOURCE and each.extra_info == JS_POSITION_HEADER] self.assertIn('js/dummy.js', js_headers) self.assertEqual(template_info.resources[0].filepath, 'css/style.css') self.assertEqual(template_info.resources[0].extra_info, 'all') self.assertEqual(template_info.resources[1].filepath, 'css/print.css') self.assertEqual(template_info.resources[1].extra_info, 'print') self.assertEqual(template_info.resources[2].filepath, 'css/impressConsole.css') self.assertEqual(template_info.resources[2].extra_info, 'screen,projection') self.assertEqual(template_info.doctype, b'<!DOCTYPE html SYSTEM "about:legacy-compat">') class TemplateInfoNodeTests(unittest.TestCase): """Tests that template information is correctly made into an xml nodes""" def test_minimal_template(self): template_info = Template(os.path.join(TEST_DATA, 'minimal')) node = template_info.xml_node() self.assertEqual(etree.tostring(node), ( b'<templateinfo><header/><body>' b'<js src="js/impress.js"/><js src="js/hovercraft-minimal.js"/>' b'</body></templateinfo>')) def test_maximal_template(self): template_info = Template(os.path.join(TEST_DATA, 'maximal')) node = template_info.xml_node() self.assertEqual(etree.tostring(node), ( b'<templateinfo><header>' b'<css href="css/style.css" media="all"/>' b'<css href="css/print.css" media="print"/>' b'<css href="css/impressConsole.css" media="screen,projection"/>' b'<js src="js/dummy.js"/></header>' b'<body><js src="js/impress.js"/><js src="js/impressConsole.js"/>' b'<js src="js/hovercraft.js"/>' b'</body></templateinfo>')) if __name__ == '__main__': unittest.main()
import operator import os import abc import functools import pyparsing as pp from mitmproxy.utils import strutils from mitmproxy.utils import human import typing # noqa from . import generators from . import exceptions class Settings: def __init__( self, is_client=False, staticdir=None, unconstrained_file_access=False, request_host=None, websocket_key=None, protocol=None, ): self.is_client = is_client self.staticdir = staticdir self.unconstrained_file_access = unconstrained_file_access self.request_host = request_host self.websocket_key = websocket_key # TODO: refactor this into the protocol self.protocol = protocol Sep = pp.Optional(pp.Literal(":")).suppress() v_integer = pp.Word(pp.nums)\ .setName("integer")\ .setParseAction(lambda toks: int(toks[0])) v_literal = pp.MatchFirst( [ pp.QuotedString( "\"", unquoteResults=True, multiline=True ), pp.QuotedString( "'", unquoteResults=True, multiline=True ), ] ) v_naked_literal = pp.MatchFirst( [ v_literal, pp.Word("".join(i for i in pp.printables if i not in ",:\n@\'\"")) ] ) class Token: """ A token in the specification language. Tokens are immutable. The token classes have no meaning in and of themselves, and are combined into Components and Actions to build the language. """ __metaclass__ = abc.ABCMeta @classmethod def expr(cls): # pragma: no cover """ A parse expression. """ return None @abc.abstractmethod def spec(self): # pragma: no cover """ A parseable specification for this token. """ return None @property def unique_name(self) -> typing.Optional[str]: """ Controls uniqueness constraints for tokens. No two tokens with the same name will be allowed. If no uniquness should be applied, this should be None. """ return self.__class__.__name__.lower() def resolve(self, settings_, msg_): """ Resolves this token to ready it for transmission. This means that the calculated offsets of actions are fixed. settings: a language.Settings instance msg: The containing message """ return self def __repr__(self): return self.spec() class _TokValueLiteral(Token): def __init__(self, val): self.val = strutils.escaped_str_to_bytes(val) def get_generator(self, settings_): return self.val def freeze(self, settings_): return self class TokValueLiteral(_TokValueLiteral): """ A literal with Python-style string escaping """ @classmethod def expr(cls): e = v_literal.copy() return e.setParseAction(cls.parseAction) @classmethod def parseAction(cls, x): v = cls(*x) return v def spec(self): inner = strutils.bytes_to_escaped_str(self.val) inner = inner.replace(r"'", r"\x27") return "'" + inner + "'" class TokValueNakedLiteral(_TokValueLiteral): @classmethod def expr(cls): e = v_naked_literal.copy() return e.setParseAction(lambda x: cls(*x)) def spec(self): return strutils.bytes_to_escaped_str(self.val, escape_single_quotes=True) class TokValueGenerate(Token): def __init__(self, usize, unit, datatype): if not unit: unit = "b" self.usize, self.unit, self.datatype = usize, unit, datatype def bytes(self): return self.usize * human.SIZE_UNITS[self.unit] def get_generator(self, settings_): return generators.RandomGenerator(self.datatype, self.bytes()) def freeze(self, settings): g = self.get_generator(settings) return TokValueLiteral(strutils.bytes_to_escaped_str(g[:], escape_single_quotes=True)) @classmethod def expr(cls): e = pp.Literal("@").suppress() + v_integer u = functools.reduce( operator.or_, [pp.Literal(i) for i in human.SIZE_UNITS.keys()] ).leaveWhitespace() e = e + pp.Optional(u, default=None) s = pp.Literal(",").suppress() s += functools.reduce( operator.or_, [pp.Literal(i) for i in generators.DATATYPES.keys()] ) e += pp.Optional(s, default="bytes") return e.setParseAction(lambda x: cls(*x)) def spec(self): s = "@%s" % self.usize if self.unit != "b": s += self.unit if self.datatype != "bytes": s += ",%s" % self.datatype return s class TokValueFile(Token): def __init__(self, path): self.path = str(path) @classmethod def expr(cls): e = pp.Literal("<").suppress() e = e + v_naked_literal return e.setParseAction(lambda x: cls(*x)) def freeze(self, settings_): return self def get_generator(self, settings): if not settings.staticdir: raise exceptions.FileAccessDenied("File access disabled.") s = os.path.expanduser(self.path) s = os.path.normpath( os.path.abspath(os.path.join(settings.staticdir, s)) ) uf = settings.unconstrained_file_access if not uf and not s.startswith(os.path.normpath(settings.staticdir)): raise exceptions.FileAccessDenied( "File access outside of configured directory" ) if not os.path.isfile(s): raise exceptions.FileAccessDenied("File not readable") return generators.FileGenerator(s) def spec(self): return "<'%s'" % self.path TokValue = pp.MatchFirst( [ TokValueGenerate.expr(), TokValueFile.expr(), TokValueLiteral.expr() ] ) TokNakedValue = pp.MatchFirst( [ TokValueGenerate.expr(), TokValueFile.expr(), TokValueLiteral.expr(), TokValueNakedLiteral.expr(), ] ) TokOffset = pp.MatchFirst( [ v_integer, pp.Literal("r"), pp.Literal("a") ] ) class _Component(Token): """ A value component of the primary specification of an message. Components produce byte values describing the bytes of the message. """ def values(self, settings): # pragma: no cover """ A sequence of values, which can either be strings or generators. """ pass def string(self, settings=None): """ A bytestring representation of the object. """ return b"".join(i[:] for i in self.values(settings or {})) class KeyValue(_Component): """ A key/value pair. cls.preamble: leader """ def __init__(self, key, value): self.key, self.value = key, value @classmethod def expr(cls): e = pp.Literal(cls.preamble).suppress() e += TokValue e += pp.Literal("=").suppress() e += TokValue return e.setParseAction(lambda x: cls(*x)) def spec(self): return "%s%s=%s" % (self.preamble, self.key.spec(), self.value.spec()) def freeze(self, settings): return self.__class__( self.key.freeze(settings), self.value.freeze(settings) ) class CaselessLiteral(_Component): """ A caseless token that can take only one value. """ def __init__(self, value): self.value = value @classmethod def expr(cls): spec = pp.CaselessLiteral(cls.TOK) spec = spec.setParseAction(lambda x: cls(*x)) return spec def values(self, settings): return self.TOK def spec(self): return self.TOK def freeze(self, settings_): return self class OptionsOrValue(_Component): """ Can be any of a specified set of options, or a value specifier. """ preamble = "" options = [] # type: typing.List[str] def __init__(self, value): # If it's a string, we were passed one of the options, so we lower-case # it to be canonical. The user can specify a different case by using a # string value literal. self.option_used = False if isinstance(value, str): for i in self.options: # Find the exact option value in a case-insensitive way if i.lower() == value.lower(): self.option_used = True value = TokValueLiteral(i) break self.value = value @classmethod def expr(cls): parts = [pp.CaselessLiteral(i) for i in cls.options] m = pp.MatchFirst(parts) spec = m | TokValue.copy() spec = spec.setParseAction(lambda x: cls(*x)) if cls.preamble: spec = pp.Literal(cls.preamble).suppress() + spec return spec def values(self, settings): return [ self.value.get_generator(settings) ] def spec(self): s = self.value.spec() if s[1:-1].lower() in self.options: s = s[1:-1].lower() return "%s%s" % (self.preamble, s) def freeze(self, settings): return self.__class__(self.value.freeze(settings)) class Integer(_Component): bounds = (None, None) # type: typing.Tuple[typing.Optional[int], typing.Optional[int]] preamble = "" def __init__(self, value): v = int(value) outofbounds = any([ self.bounds[0] is not None and v < self.bounds[0], self.bounds[1] is not None and v > self.bounds[1] ]) if outofbounds: raise exceptions.ParseException( "Integer value must be between %s and %s." % self.bounds, 0, 0 ) self.value = str(value).encode() @classmethod def expr(cls): e = v_integer.copy() if cls.preamble: e = pp.Literal(cls.preamble).suppress() + e return e.setParseAction(lambda x: cls(*x)) def values(self, settings): return [self.value] def spec(self): return "%s%s" % (self.preamble, self.value.decode()) def freeze(self, settings_): return self class Value(_Component): """ A value component lead by an optional preamble. """ preamble = "" def __init__(self, value): self.value = value @classmethod def expr(cls): e = (TokValue | TokNakedValue) if cls.preamble: e = pp.Literal(cls.preamble).suppress() + e return e.setParseAction(lambda x: cls(*x)) def values(self, settings): return [self.value.get_generator(settings)] def spec(self): return "%s%s" % (self.preamble, self.value.spec()) def freeze(self, settings): return self.__class__(self.value.freeze(settings)) class FixedLengthValue(Value): """ A value component lead by an optional preamble. """ preamble = "" length = None # type: typing.Optional[int] def __init__(self, value): Value.__init__(self, value) lenguess = None try: lenguess = len(value.get_generator(Settings())) except exceptions.RenderError: pass # This check will fail if we know the length upfront if lenguess is not None and lenguess != self.length: raise exceptions.RenderError( "Invalid value length: '%s' is %s bytes, should be %s." % ( self.spec(), lenguess, self.length ) ) def values(self, settings): ret = Value.values(self, settings) l = sum(len(i) for i in ret) # This check will fail if we don't know the length upfront - i.e. for # file inputs if l != self.length: raise exceptions.RenderError( "Invalid value length: '%s' is %s bytes, should be %s." % ( self.spec(), l, self.length ) ) return ret class Boolean(_Component): """ A boolean flag. name = true -name = false """ name = "" def __init__(self, value): self.value = value @classmethod def expr(cls): e = pp.Optional(pp.Literal("-"), default=True) e += pp.Literal(cls.name).suppress() def parse(s_, loc_, toks): val = True if toks[0] == "-": val = False return cls(val) return e.setParseAction(parse) def spec(self): return "%s%s" % ("-" if not self.value else "", self.name) class IntField(_Component): """ An integer field, where values can optionally specified by name. """ names = {} # type: typing.Dict[str, int] max = 16 preamble = "" def __init__(self, value): self.origvalue = value self.value = self.names.get(value, value) if self.value > self.max: raise exceptions.ParseException( "Value can't exceed %s" % self.max, 0, 0 ) @classmethod def expr(cls): parts = [pp.CaselessLiteral(i) for i in cls.names.keys()] m = pp.MatchFirst(parts) spec = m | v_integer.copy() spec = spec.setParseAction(lambda x: cls(*x)) if cls.preamble: spec = pp.Literal(cls.preamble).suppress() + spec return spec def values(self, settings): return [str(self.value)] def spec(self): return "%s%s" % (self.preamble, self.origvalue)
"""Test the fundrawtransaction RPC.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * def get_unspent(listunspent, amount): for utx in listunspent: if utx['amount'] == amount: return utx raise AssertionError('Could not find unspent with amount={}'.format(amount)) class RawTransactionsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 self.setup_clean_chain = True def setup_network(self, split=False): self.setup_nodes() connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) connect_nodes_bi(self.nodes, 0, 3) def run_test(self): min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee'] # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) # if the fee's positive delta is higher than this value tests will fail, # neg. delta always fail the tests. # The size of the signature of every input may be at most 2 bytes larger # than a minimum sized signature. # = 2 bytes * minRelayTxFeePerByte feeTolerance = 2 * min_relay_tx_fee/1000 self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(121) self.sync_all() # ensure that setting changePosition in fundraw with an exact match is handled properly rawmatch = self.nodes[2].createrawtransaction([], {self.nodes[2].getnewaddress():50}) rawmatch = self.nodes[2].fundrawtransaction(rawmatch, {"changePosition":1, "subtractFeeFromOutputs":[0]}) assert_equal(rawmatch["changepos"], -1) watchonly_address = self.nodes[0].getnewaddress() watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"] watchonly_amount = Decimal(200) self.nodes[3].importpubkey(watchonly_pubkey, "", True) watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount) self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0) self.nodes[0].generate(1) self.sync_all() ############### # simple test # ############### inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) #test that we have enough inputs ############################## # simple test with two coins # ############################## inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.2 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) #test if we have enough inputs ############################## # simple test with two coins # ############################## inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.6 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ################################ # simple test with two outputs # ################################ inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert(len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ######################################################################### # test a fundrawtransaction with a VIN greater than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee ##################################################################### # test a fundrawtransaction with which will not get a change output # ##################################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(rawtxfund['changepos'], -1) assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee #################################################### # test a fundrawtransaction with an invalid option # #################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'}) ############################################################ # test a fundrawtransaction with an invalid change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_rpc_error(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'}) ############################################################ # test a fundrawtransaction with a provided change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) change = self.nodes[2].getnewaddress() assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2}) rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0}) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) out = dec_tx['vout'][0] assert_equal(change, out['scriptPubKey']['addresses'][0]) ######################################################################### # test a fundrawtransaction with a VIN smaller than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 1) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) # 4-byte version + 1-byte vin count + 36-byte prevout then script_len rawtx = rawtx[:82] + "0100" + rawtx[84:] dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for i, out in enumerate(dec_tx['vout']): totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 else: assert_equal(i, rawtxfund['changepos']) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) ########################################### # test a fundrawtransaction with two VINs # ########################################### utx = get_unspent(self.nodes[2].listunspent(), 1) utx2 = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] outputs = { self.nodes[0].getnewaddress() : 6.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) matchingIns = 0 for vinOut in dec_tx['vin']: for vinIn in inputs: if vinIn['txid'] == vinOut['txid']: matchingIns+=1 assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params ######################################################### # test a fundrawtransaction with two VINs and two vOUTs # ######################################################### utx = get_unspent(self.nodes[2].listunspent(), 1) utx2 = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 assert_equal(matchingOuts, 2) assert_equal(len(dec_tx['vout']), 3) ############################################## # test a fundrawtransaction with invalid vin # ############################################## inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin! outputs = { self.nodes[0].getnewaddress() : 1.0} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx) ############################################################ #compare fee of a standard pubkeyhash transaction inputs = [] outputs = {self.nodes[1].getnewaddress():1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction with multiple outputs inputs = [] outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendmany("", outputs) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a 2of2 multisig p2sh transaction # create 2of2 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].validateaddress(addr1) addr2Obj = self.nodes[1].validateaddress(addr2) mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) inputs = [] outputs = {mSigObj:1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction # create 4of5 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr3 = self.nodes[1].getnewaddress() addr4 = self.nodes[1].getnewaddress() addr5 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].validateaddress(addr1) addr2Obj = self.nodes[1].validateaddress(addr2) addr3Obj = self.nodes[1].validateaddress(addr3) addr4Obj = self.nodes[1].validateaddress(addr4) addr5Obj = self.nodes[1].validateaddress(addr5) mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']]) inputs = [] outputs = {mSigObj:1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ # spend a 2of2 multisig transaction over fundraw # create 2of2 addr addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].validateaddress(addr1) addr2Obj = self.nodes[2].validateaddress(addr2) mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # send 1.2 BTC to msig addr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[1].generate(1) self.sync_all() oldBalance = self.nodes[1].getbalance() inputs = [] outputs = {self.nodes[1].getnewaddress():1.1} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) fundedTx = self.nodes[2].fundrawtransaction(rawtx) signedTx = self.nodes[2].signrawtransaction(fundedTx['hex']) txId = self.nodes[2].sendrawtransaction(signedTx['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance()) ############################################################ # locked wallet test self.stop_node(0) self.nodes[1].node_encrypt_wallet("test") self.stop_node(2) self.stop_node(3) self.start_nodes() # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) connect_nodes_bi(self.nodes,0,3) self.sync_all() # drain the keypool self.nodes[1].getnewaddress() self.nodes[1].getrawchangeaddress() inputs = [] outputs = {self.nodes[0].getnewaddress():1.1} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) # fund a transaction that requires a new key for the change output # creating the key must be impossible because the wallet is locked assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx) #refill the keypool self.nodes[1].walletpassphrase("test", 100) self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address self.nodes[1].walletlock() assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2) oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():1.1} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) #now we need to unlock self.nodes[1].walletpassphrase("test", 600) signedTx = self.nodes[1].signrawtransaction(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(signedTx['hex']) self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance()) ############################################### # multiple (~19) inputs tx test | Compare fee # ############################################### #empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0,20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs inputs = [] outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[1].sendmany("", outputs) signedFee = self.nodes[1].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs ############################################# # multiple (~19) inputs tx test | sign/send # ############################################# #again, empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0,20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward ##################################################### # test fundrawtransaction with OP_RETURN and no vin # ##################################################### rawtx = "0100000000010000000000000000066a047465737400000000" dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(len(dec_tx['vin']), 0) assert_equal(len(dec_tx['vout']), 1) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert_greater_than(len(dec_tx['vin']), 0) # at least one vin assert_equal(len(dec_tx['vout']), 2) # one change output added ################################################## # test a fundrawtransaction using only watchonly # ################################################## inputs = [] outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True }) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 1) assert_equal(res_dec["vin"][0]["txid"], watchonly_txid) assert("fee" in result.keys()) assert_greater_than(result["changepos"], -1) ############################################################### # test fundrawtransaction using the entirety of watched funds # ############################################################### inputs = [] outputs = {self.nodes[2].getnewaddress() : watchonly_amount} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) # Backward compatibility test (2nd param is includeWatching) result = self.nodes[3].fundrawtransaction(rawtx, True) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 2) assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid) assert_greater_than(result["fee"], 0) assert_greater_than(result["changepos"], -1) assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10) signedtx = self.nodes[3].signrawtransaction(result["hex"]) assert(not signedtx["complete"]) signedtx = self.nodes[0].signrawtransaction(signedtx["hex"]) assert(signedtx["complete"]) self.nodes[0].sendrawtransaction(signedtx["hex"]) self.nodes[0].generate(1) self.sync_all() ####################### # Test feeRate option # ####################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[3].getnewaddress() : 1} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee) result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}) result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee}) result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex']) assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate) assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate) ################################ # Test no address reuse occurs # ################################ result3 = self.nodes[3].fundrawtransaction(rawtx) res_dec = self.nodes[0].decoderawtransaction(result3["hex"]) changeaddress = "" for out in res_dec['vout']: if out['value'] > 1.0: changeaddress += out['scriptPubKey']['addresses'][0] assert(changeaddress != "") nextaddr = self.nodes[3].getnewaddress() # Now the change address key should be removed from the keypool assert(changeaddress != nextaddr) ###################################### # Test subtractFeeFromOutputs option # ###################################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[2].getnewaddress(): 1} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}), self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})] dec_tx = [self.nodes[3].decoderawtransaction(tx['hex']) for tx in result] output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)] change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)] assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee']) assert_equal(result[3]['fee'], result[4]['fee']) assert_equal(change[0], change[1]) assert_equal(output[0], output[1]) assert_equal(output[0], output[2] + result[2]['fee']) assert_equal(change[0] + result[0]['fee'], change[2]) assert_equal(output[3], output[4] + result[4]['fee']) assert_equal(change[3] + result[3]['fee'], change[4]) inputs = [] outputs = {self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [self.nodes[3].fundrawtransaction(rawtx), # split the fee between outputs 0, 2, and 3, but not output 1 self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})] dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']), self.nodes[3].decoderawtransaction(result[1]['hex'])] # Nested list of non-change output amounts for each transaction output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']] for d, r in zip(dec_tx, result)] # List of differences in output amounts between normal and subtractFee transactions share = [o0 - o1 for o0, o1 in zip(output[0], output[1])] # output 1 is the same in both transactions assert_equal(share[1], 0) # the other 3 outputs are smaller as a result of subtractFeeFromOutputs assert_greater_than(share[0], 0) assert_greater_than(share[2], 0) assert_greater_than(share[3], 0) # outputs 2 and 3 take the same share of the fee assert_equal(share[2], share[3]) # output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3 assert_greater_than_or_equal(share[0], share[2]) assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0]) # the fee is the same in both transactions assert_equal(result[0]['fee'], result[1]['fee']) # the total subtracted from the outputs is equal to the fee assert_equal(share[0] + share[2] + share[3], result[0]['fee']) if __name__ == '__main__': RawTransactionsTest().main()
""" pygments.styles.borland ~~~~~~~~~~~~~~~~~~~~~~~ Style similar to the style used in the Borland IDEs. :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace class BorlandStyle(Style): """ Style similar to the style used in the borland IDEs. """ default_style = '' styles = { Whitespace: '#bbbbbb', Comment: 'italic #008800', Comment.Preproc: 'noitalic #008080', Comment.Special: 'noitalic bold', String: '#0000FF', String.Char: '#800080', Number: '#0000FF', Keyword: 'bold #000080', Operator.Word: 'bold', Name.Tag: 'bold #000080', Name.Attribute: '#FF0000', Generic.Heading: '#999999', Generic.Subheading: '#aaaaaa', Generic.Deleted: 'bg:#ffdddd #000000', Generic.Inserted: 'bg:#ddffdd #000000', Generic.Error: '#aa0000', Generic.Emph: 'italic', Generic.Strong: 'bold', Generic.Prompt: '#555555', Generic.Output: '#888888', Generic.Traceback: '#aa0000', Error: 'bg:#e3d2d2 #a61717' }
from django.conf.urls import * from omeroweb.webstart import views urlpatterns = patterns('django.views.generic.simple', url( r'^$', views.index, name="webstart_index" ), url( r'^jars/insight\.jnlp$', views.insight, name='webstart_insight'), )
""" All the little functions that make life nicer in the Traits package. .. moduleauthor:: Mihai Andrei <mihai.andrei@codemart.ro> .. moduleauthor:: Lia Domide <lia.domide@codemart.ro> .. moduleauthor:: marmaduke <duke@eml.cc> """ import numpy import collections import inspect from tvb.basic.profile import TvbProfile ispublic = lambda key: key[0] is not '_' def str_class_name(thing, short_form=False): """ A helper function that tries to generate an informative name for its argument: when passed a class, return its name, when passed an object return a string representation of that value. """ # if thing is a class, it has attribute __name__ if hasattr(thing, '__name__'): cls = thing if short_form: return cls.__name__ return cls.__module__ + '.' + cls.__name__ else: # otherwise, it's an object and we return its __str__ return str(thing) def get(obj, key, default=None): """ get() is a general function allowing us to ignore whether we are getting from a dictionary or object. If obj is a dictionary, we return the value corresponding to key, otherwise we return the attribute on obj corresponding to key. In both cases, if key does not exist, default is returned. """ if type(obj) is dict: return obj.get(key, default) else: return getattr(obj, key) if hasattr(obj, key) else default def log_debug_array(log, array, array_name, owner=""): """ Simple access to debugging info on an array. """ if TvbProfile.current.TRAITS_CONFIGURATION.use_storage: return # Hide this logs in web-mode, with storage, because we have multiple storage exceptions if owner != "": name = ".".join((owner, array_name)) else: name = array_name if array is not None and hasattr(array, 'shape'): shape = str(array.shape) dtype = str(array.dtype) has_nan = str(numpy.isnan(array).any()) array_max = str(array.max()) array_min = str(array.min()) log.debug("%s shape: %s" % (name, shape)) log.debug("%s dtype: %s" % (name, dtype)) log.debug("%s has NaN: %s" % (name, has_nan)) log.debug("%s maximum: %s" % (name, array_max)) log.debug("%s minimum: %s" % (name, array_min)) else: log.debug("%s is None or not Array" % name) Args = collections.namedtuple('Args', 'pos kwd') class TypeRegister(list): """ TypeRegister is a smart list that can be queried to obtain selections of the classes inheriting from Traits classes. """ def subclasses(self, obj, avoid_subclasses=False): """ The subclasses method takes a class (or given instance object, will use the class of the instance), and returns a list of all options known to this TypeRegister that are direct subclasses of the class or have the class in their base class list. :param obj: Class or instance :param avoid_subclasses: When specified, subclasses are not retrieved, only current class. """ cls = obj if inspect.isclass(obj) else obj.__class__ if avoid_subclasses: return [cls] if hasattr(cls, '_base_classes'): bases = cls._base_classes else: bases = [] sublcasses = [opt for opt in self if ((issubclass(opt, cls) or cls in opt.__bases__) and not inspect.isabstract(opt) and opt.__name__ not in bases)] return sublcasses def multiline_math_directives_to_matjax(doc): """ Looks for multi-line sphinx math directives in the given rst string It converts them in html text that will be interpreted by mathjax The parsing is simplistic, not a rst parser. Wraps .. math :: body in \[\begin{split}\end{split}\] """ # doc = text | math BEGIN = r'\[\begin{split}' END = r'\end{split}\]' in_math = False # 2 state parser out_lines = [] indent = '' for line in doc.splitlines(): if not in_math: # math = indent directive math_body indent, sep, _ = line.partition('.. math::') if sep: out_lines.append(BEGIN) in_math = True else: out_lines.append(line) else: # math body is at least 1 space more indented than the directive, but we tolerate empty lines if line.startswith(indent + ' ') or line.strip() == '': out_lines.append(line) else: # this line is not properly indented, math block is over out_lines.append(END) out_lines.append(line) in_math = False if in_math: # close math tag out_lines.append(END) return '\n'.join(out_lines)
import os import time from . import test_util def test_add_file(): test_util.mkfile(1, 'a.md', 'add a file') test_util.verify_result() def test_add_file_t(): test_util.mkfile(2, 'l/m/n/test.md', 'add l/m/n/test.md') test_util.verify_result() def test_add_dir(): test_util.mkdir(1, 'ad') test_util.verify_result() def test_add_dir_t(): test_util.mkdir(2, 'tt/ee/st') test_util.verify_result() def test_modify_file(): test_util.modfile(1, 'a.md', 'modify a.md') test_util.verify_result() def test_rm_file(): test_util.rmfile(1, 'a.md') test_util.verify_result() def test_rm_dir(): test_util.rmdir(1, 'ad') test_util.verify_result() def test_rename_file(): test_util.mkfile(2, 'b.md', 'add b.md') time.sleep(1) test_util.move(2, 'b.md', 'b_bak.md') test_util.verify_result() def test_rename_dir(): test_util.mkdir(2, 'ab') time.sleep(1) test_util.move(2, 'ab', 'ab_bak') test_util.verify_result() def test_each(): test_util.mkdir(1, 'abc1') test_util.mkfile(1, 'abc1/c.md', 'add abc1/c.md') time.sleep(1) test_util.mkdir(2, 'bcd1') test_util.mkfile(2, 'bcd1/d.md', 'add bcd1/d.md') test_util.verify_result() def test_unsync_resync(): test_util.desync_cli1() test_util.rmdir(1, 'abc1') test_util.modfile(1, 'bcd1/d.md', 'modify bcd1/d.md to test unsync resync') test_util.sync_cli1() test_util.verify_result() if not os.path.exists(test_util.getpath(1, 'abc1')): assert False, 'dir abc1 should be recreated when resync' if len(os.listdir(test_util.getpath(1, 'bcd1'))) != 2: assert False, 'should generate conflict file for bcd1/d.md when resync' def test_modify_timestamp(): test_util.touch(1, 'bcd1/d.md') test_util.verify_result()
""" Import and export of snippets. """ import os try: import xml.etree.cElementTree as ET except ImportError: import xml.etree.ElementTree as ET from PyQt5.QtCore import QSize, Qt from PyQt5.QtGui import QKeySequence from PyQt5.QtWidgets import QMessageBox, QTreeWidget, QTreeWidgetItem import app import appinfo import qutil import userguide import widgets.dialog from . import model from . import snippets from . import builtin def save(names, filename): """Saves the named snippets to a file.""" root = ET.Element('snippets') root.text = '\n\n' root.tail = '\n' d = ET.ElementTree(root) comment = ET.Comment(_comment.format(appinfo=appinfo)) comment.tail = '\n\n' root.append(comment) for name in names: snippet = ET.Element('snippet') snippet.set('id', name) snippet.text = '\n' snippet.tail = '\n\n' title = ET.Element('title') title.text = snippets.title(name, False) title.tail = '\n' shortcuts = ET.Element('shortcuts') ss = model.shortcuts(name) if ss: shortcuts.text = '\n' for s in ss: shortcut = ET.Element('shortcut') shortcut.text = s.toString() shortcut.tail = '\n' shortcuts.append(shortcut) shortcuts.tail = '\n' body = ET.Element('body') body.text = snippets.text(name) body.tail = '\n' snippet.append(title) snippet.append(shortcuts) snippet.append(body) root.append(snippet) d.write(filename, "UTF-8") def load(filename, widget): """Loads snippets from a file, displaying them in a list. The user can then choose: - overwrite builtin snippets or not - overwrite own snippets with same title or not - select and view snippets contents. """ try: d = ET.parse(filename) elements = list(d.findall('snippet')) if not elements: raise ValueError(_("No snippets found.")) except Exception as e: QMessageBox.critical(widget, app.caption(_("Error")), _("Can't read from source:\n\n{url}\n\n{error}").format( url=filename, error=e)) return dlg = widgets.dialog.Dialog(widget) dlg.setWindowModality(Qt.WindowModal) dlg.setWindowTitle(app.caption(_("dialog title", "Import Snippets"))) tree = QTreeWidget(headerHidden=True, rootIsDecorated=False) dlg.setMainWidget(tree) userguide.addButton(dlg.buttonBox(), "snippet_import_export") allnames = frozenset(snippets.names()) builtins = frozenset(builtin.builtin_snippets) titles = dict((snippets.title(n), n) for n in allnames if n not in builtins) new = QTreeWidgetItem(tree, [_("New Snippets")]) updated = QTreeWidgetItem(tree, [_("Updated Snippets")]) unchanged = QTreeWidgetItem(tree, [_("Unchanged Snippets")]) new.setFlags(Qt.ItemIsEnabled) updated.setFlags(Qt.ItemIsEnabled) unchanged.setFlags(Qt.ItemIsEnabled) new.setExpanded(True) updated.setExpanded(True) items = [] for snip in elements: item = QTreeWidgetItem() item.body = snip.find('body').text item.title = snip.find('title').text item.shortcuts = list(e.text for e in snip.findall('shortcuts/shortcut')) title = item.title or snippets.maketitle(snippets.parse(item.body).text) item.setText(0, title) name = snip.get('id') name = name if name in builtins else None # determine if new, updated or unchanged if not name: name = titles.get(title) item.name = name if not name or name not in allnames: new.addChild(item) items.append(item) item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsUserCheckable) item.setCheckState(0, Qt.Checked) elif name: if (item.body != snippets.text(name) or title != snippets.title(name) or (item.shortcuts and item.shortcuts != [s.toString() for s in model.shortcuts(name) or ()])): updated.addChild(item) items.append(item) item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsUserCheckable) item.setCheckState(0, Qt.Checked) else: unchanged.addChild(item) item.setFlags(Qt.ItemIsEnabled) # count: for i in new, updated, unchanged: i.setText(0, i.text(0) + " ({0})".format(i.childCount())) for i in new, updated: if i.childCount(): i.setFlags(Qt.ItemIsEnabled | Qt.ItemIsUserCheckable) i.setCheckState(0, Qt.Checked) def changed(item): if item in (new, updated): for i in range(item.childCount()): c = item.child(i) c.setCheckState(0, item.checkState(0)) tree.itemChanged.connect(changed) importShortcuts = QTreeWidgetItem([_("Import Keyboard Shortcuts")]) if items: tree.addTopLevelItem(importShortcuts) importShortcuts.setFlags(Qt.ItemIsEnabled | Qt.ItemIsUserCheckable) importShortcuts.setCheckState(0, Qt.Checked) dlg.setMessage(_("Choose which snippets you want to import:")) else: dlg.setMessage(_("There are no new or updated snippets in the file.")) unchanged.setExpanded(True) tree.setWhatsThis(_( "<p>Here the snippets from {filename} are displayed.</p>\n" "<p>If there are new or updated snippets, you can select or deselect " "them one by one, or all at once, using the checkbox of the group. " "Then click OK to import all the selected snippets.</p>\n" "<p>Existing, unchanged snippets can't be imported.</p>\n" ).format(filename=os.path.basename(filename))) qutil.saveDialogSize(dlg, "snippettool/import/size", QSize(400, 300)) if not dlg.exec_() or not items: return ac = model.collection() m = model.model() with qutil.busyCursor(): for i in items: if i.checkState(0) == Qt.Checked: index = m.saveSnippet(i.name, i.body, i.title) if i.shortcuts and importShortcuts.checkState(0): shortcuts = list(map(QKeySequence.fromString, i.shortcuts)) ac.setShortcuts(m.name(index), shortcuts) widget.updateColumnSizes() _comment = """ Created by {appinfo.appname} {appinfo.version}. Every snippet is represented by: title: title text shortcuts: list of shortcut elements, every shortcut is a key sequence body: the snippet text The snippet id attribute can be the name of a builtin snippet or a random name like 'n123456'. In the latter case, the title is used to determine whether a snippet is new or updated. """
__doc__="""interfaces.py Representation of Predictive Threshold components. $Id: info.py,v 1.2 2010/12/14 20:45:46 jc Exp $""" __version__ = "$Revision: 1.4 $"[11:-2] from Products.Zuul.interfaces import IInfo, IFacade from Products.Zuul.interfaces.template import IThresholdInfo from Products.Zuul.form import schema from Products.Zuul.utils import ZuulMessageFactory as _t class IPredThresholdInfo(IThresholdInfo): """ Interfaces for Predictive Threshold """ escalateCount = schema.Int(title=_t(u'Escalate Count'), order=9) alpha = schema.Text(title=_t(u'Alpha'), order=10) beta = schema.Text(title=_t(u'Beta'), order=11) gamma = schema.Text(title=_t(u'Gamma'), order=12) rows = schema.Text(title=_t(u'Rows'), order=13) season = schema.Text(title=_t(u'Season'), order=14) window = schema.Text(title=_t(u'Window'), order=15) threshold = schema.Text(title=_t(u'Threshold'), order=16) delta = schema.Text(title=_t(u'Delta'), order=17) predcolor = schema.Text(title=_t(u'Prediction Color'), order=18) cbcolor = schema.Text(title=_t(u'Confidence Band Color'), order=19) tkcolor = schema.Text(title=_t(u'Tick Color'), order=20)
def ExOh(str): temp = list(str) xcount = 0 ocount = 0 for c in temp: if c == "x": xcount += 1 if c == "o": ocount += 1 if xcount == ocount: print "true" elif xcount != ocount: print "false" ExOh(raw_input())
__author__="hechao" __date__ ="$2011-12-20 16:36:20$" import gc from xml.parsers import expat from hwclass import * class Device: def __init__(self, dev_xml): self.description = '' self.product = '' self.vendor = '' self.version = '' self.businfo = '' self.logicalname = '' self.date = '' self.serial = '' self.capacity = '' self.width = '' self.clock = '' self.slot = '' self.size = '' self.config = {} self.capability = [] self.attr = {} self.dev_type = {} self.pcid = {} self._parser = expat.ParserCreate() self._parser.buffer_size = 102400 self._parser.StartElementHandler = self.start_handler self._parser.CharacterDataHandler = self.data_handler self._parser.EndElementHandler = self.end_handler self._parser.returns_unicode = False fd = file(dev_xml) self._parser.ParseFile(fd) fd.close() def start_handler(self, tag, attrs): self.flag = tag if tag == "node": self.attr = attrs elif tag == "setting": self.config.setdefault(attrs["id"], attrs["value"]) elif tag == "capability": self.capability.append(attrs["id"]) def data_handler(self, data): if(data == '\n'): return if(data.isspace()): return if self.flag == "description": self.description = data.strip() elif self.flag == "product": self.product = data.strip() elif self.flag == "vendor": self.vendor = data.strip() elif self.flag == "businfo": self.businfo = data.strip() elif self.flag == "logicalname": self.logicalname = data.strip() elif self.flag == "version": self.version = data.strip() elif self.flag == "date": self.date = data.strip() elif self.flag == "serial": self.serial = data.strip() elif self.flag == "capacity": self.capacity = data.strip() elif self.flag == "width": self.width = data.strip() elif self.flag == "clock": self.clock = data.strip() elif self.flag == "slot": self.slot = data.strip() elif self.flag == "size": self.size = data.strip() def end_handler(self, tag): if tag == "node": if self.attr["class"] == "system": system = System(self.description, self.product, self.vendor, self.version, \ self.serial, self.width, self.config, self.capability) self.dev_type.setdefault((0, "system"), []).append(system) elif self.attr["id"].split(":")[0] == "cpu" and self.attr["class"] == "processor": cpu = Cpu(self.description, self.product, self.vendor, self.version, \ self.businfo, self.serial, self.slot, self.size, self.capacity, self.width, self.clock, self.config, self.capability) self.dev_type.setdefault((1, "cpu"), []).append(cpu) elif self.attr["id"].split(":")[0] == "cache" and self.attr["class"] == "memory": cache = Cache(self.description, self.product, self.vendor, self.version, self.slot, self.size) self.dev_type.setdefault((1, "cpu"), []).append(cache) elif (self.attr["id"] == "core" or self.attr["id"] == "board") and self.attr["class"] == "bus": motherboard = Motherboard(self.description, self.product, self.vendor, self.version, self.serial) self.dev_type.setdefault((2, "motherboard"), []).append(motherboard) elif self.attr["id"] == "firmware" and self.attr["class"] == "memory": bios = Bios(self.description, self.product, self.vendor, self.version, \ self.date, self.size, self.capability) self.dev_type.setdefault((2, "motherboard"), []).append(bios) elif self.attr["id"].split(":")[0] == "memory" and self.attr["class"] == "memory": memory = Memory(self.description, self.product, self.vendor, self.version, \ self.slot, self.size) self.dev_type.setdefault((3, "memory"), []).append(memory) elif self.attr["id"].split(":")[0] == "bank" and self.attr["class"] == "memory": bank = Bank(self.description, self.product, self.vendor, self.version, \ self.serial, self.slot, self.size, self.width, self.clock) self.dev_type.setdefault((3, "memory"), []).append(bank) elif self.attr["id"].split(":")[0] == "display" and self.attr["class"] == "display": display = Display(self.description, self.product, self.vendor, self.version, \ self.businfo, self.config, self.capability) self.dev_type.setdefault((4, "display"), []).append(display) self.pcid[display.pcid] = "display" if get_monitor(): monitor = Monitor("", "", "", "") self.dev_type.setdefault((5, "monitor"), [monitor])#.append(monitor) elif self.attr["id"].split(":")[0] == "disk" and self.attr["class"] == "disk": disk = Disk(self.description, self.product, self.vendor, self.version, \ self.businfo, self.logicalname, self.serial, self.size, self.config, self.capability) self.dev_type.setdefault((6, "disk"), []).append(disk) elif self.attr["id"].split(":")[0] == "cdrom" and self.attr["class"] == "disk": cdrom = Cdrom(self.description, self.product, self.vendor, self.version, \ self.businfo, self.logicalname, self.config, self.capability) self.dev_type.setdefault((7, "cdrom"), []).append(cdrom) elif self.attr["class"] == "storage" and self.attr["handle"]: storage = Storage(self.description, self.product, self.vendor, self.version, \ self.businfo, self.logicalname, self.serial, self.config, self.capability) self.dev_type.setdefault((8, "storage"), []).append(storage) elif (self.attr["class"] == "network") or (self.attr["id"].split(":")[0] == "bridge" \ and self.attr["class"] == "bridge"): network = Network(self.description, self.product, self.vendor, self.version, \ self.businfo, self.logicalname, self.serial, self.capacity, self.config, self.capability) self.dev_type.setdefault((9, "network"), []).append(network) self.pcid[network.pcid] = "network" elif self.attr["class"] == "multimedia": media = Multimedia(self.description, self.product, self.vendor, self.version, \ self.businfo, self.config, self.capability) self.dev_type.setdefault((10, "multimedia"), []).append(media) self.pcid[media.pcid] = "multimedia" elif self.attr["class"] == "input": imput = Imput(self.description, self.product, self.vendor, self.version, \ self.businfo, self.config, self.capability) self.dev_type.setdefault((11, "input"), []).append(imput) self.pcid[imput.pcid] = "input" elif self.attr["id"].split(":")[0] != "generic" and self.attr["class"] == "generic": generic = Generic(self.description, self.product, self.vendor, self.version, \ self.businfo, self.serial, self.config, self.capability) self.dev_type.setdefault((12, "generic"), []).append(generic) self.pcid[generic.pcid] = "generic" elif self.attr["id"].split(":")[0] != "communication" and self.attr["class"] == "communication": modem = Modem(self.description, self.product, self.vendor, self.version, \ self.businfo, self.serial, self.config, self.capability) self.dev_type.setdefault((12, "generic"), []).append(modem) elif self.attr["id"].split(":")[0] == "battery" and self.attr["class"] == "power": power = Power(self.description, self.product, self.vendor, self.version, \ self.slot, self.capacity, self.config) self.dev_type.setdefault((12, "generic"), []).append(power) self.clear() def clear(self): self.description = '' self.product = '' self.vendor = '' self.version = '' self.businfo = '' self.logicalname = '' self.date = '' self.serial = '' self.capacity = '' self.width = '' self.clock = '' self.slot = '' self.size = '' self.config = {} self.capability = [] self.attr = {} def close(self): del self._parser gc.collect()
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('monitoring', '0002_monitoring_update'), ] operations = [ migrations.RemoveField( model_name='requestevent', name='resources', ), migrations.AddField( model_name='requestevent', name='resources', field=models.ManyToManyField(help_text='List of resources affected', to='monitoring.MonitoredResource', null=True, blank=True), ), ]
from south.db import db from south.v2 import SchemaMigration class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Application.content_type' db.add_column('applications_application', 'content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], null=True, blank=True), keep_default=False) # Adding field 'Application.object_id' db.add_column('applications_application', 'object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True), keep_default=False) # Deleting field 'UserApplication.object_id' db.delete_column('applications_userapplication', 'object_id') # Deleting field 'UserApplication.content_type' db.delete_column('applications_userapplication', 'content_type_id') def backwards(self, orm): # Deleting field 'Application.content_type' db.delete_column('applications_application', 'content_type_id') # Deleting field 'Application.object_id' db.delete_column('applications_application', 'object_id') # Adding field 'UserApplication.object_id' db.add_column('applications_userapplication', 'object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True), keep_default=False) # Adding field 'UserApplication.content_type' db.add_column('applications_userapplication', 'content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], null=True, blank=True), keep_default=False) models = { 'applications.applicant': { 'Meta': {'object_name': 'Applicant'}, 'address': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}), 'department': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Institute']", 'null': 'True', 'blank': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}), 'mobile': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'position': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}), 'supervisor': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'telephone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'max_length': '16', 'unique': 'True', 'null': 'True', 'blank': 'True'}) }, 'applications.application': { 'Meta': {'object_name': 'Application'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}), 'content_type_temp': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'app_temp_obj'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']", 'null': 'True', 'blank': 'True'}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'expires': ('django.db.models.fields.DateTimeField', [], {}), 'header_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'object_id_temp': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'secret_token': ('django.db.models.fields.CharField', [], {'default': "'f0369b28f1adc73f2c0c351ed377febb0fa872d4'", 'unique': 'True', 'max_length': '64'}), 'state': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '1'}), 'submitted_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}) }, 'applications.projectapplication': { 'Meta': {'object_name': 'ProjectApplication', '_ormbases': ['applications.Application']}, 'additional_req': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'application_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['applications.Application']", 'unique': 'True', 'primary_key': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Institute']"}), 'machine_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['machines.MachineCategory']", 'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'user_applications': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['applications.UserApplication']", 'null': 'True', 'blank': 'True'}) }, 'applications.userapplication': { 'Meta': {'object_name': 'UserApplication', '_ormbases': ['applications.Application']}, 'application_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['applications.Application']", 'unique': 'True', 'primary_key': 'True'}), 'make_leader': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'needs_account': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']", 'null': 'True', 'blank': 'True'}) }, 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'machines.machinecategory': { 'Meta': {'object_name': 'MachineCategory', 'db_table': "'machine_category'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'people.institute': { 'Meta': {'object_name': 'Institute', 'db_table': "'institute'"}, 'active_delegate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'active_delegate'", 'null': 'True', 'to': "orm['people.Person']"}), 'delegate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'delegate'", 'null': 'True', 'to': "orm['people.Person']"}), 'gid': ('django.db.models.fields.IntegerField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'sub_delegates': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'sub_delegates'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['people.Person']"}) }, 'people.person': { 'Meta': {'object_name': 'Person', 'db_table': "'person'"}, 'address': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_approver'", 'null': 'True', 'to': "orm['people.Person']"}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}), 'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_deletor'", 'null': 'True', 'to': "orm['people.Person']"}), 'department': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'expires': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Institute']"}), 'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'mobile': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'position': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}), 'supervisor': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'telephone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}), 'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}) }, 'projects.project': { 'Meta': {'object_name': 'Project', 'db_table': "'project'"}, 'additional_req': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_approver'", 'null': 'True', 'to': "orm['people.Person']"}), 'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_deletor'", 'null': 'True', 'to': "orm['people.Person']"}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Institute']"}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_expertise': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'leaders': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'leaders'", 'symmetrical': 'False', 'to': "orm['people.Person']"}), 'machine_categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'projects'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['machines.MachineCategory']"}), 'machine_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['machines.MachineCategory']"}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'pid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}), 'start_date': ('django.db.models.fields.DateField', [], {}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['people.Person']", 'null': 'True', 'blank': 'True'}) } } complete_apps = ['applications']
import sys, os, glob, re def getAvgSingle(fileNameGetAvg): """calcuates the average cache hits for a file""" #print "TESTING:: file:", fileNameGetAvg #this part is from counter_mikey.py try: traceFile = open(fileNameGetAvg, 'r') fileLines = traceFile.readlines() traceFile.close() except IOError: print "Warning: file" + traceFile, "not found" exit() #Process file #count the lines, track each line in a list everyLine = [] totalMisses = [] totalAccesses = [] myLine = fileLines[0].split() cLevel = len(myLine)-3 #print "TESTING:: cLevel=", cLevel for i in range(0,cLevel,1): totalMisses.append(0) totalAccesses.append(0) if cLevel < 1 or cLevel > 3: print "FATAL: Expected 1, 2 or 3 cache levels" exit() idx = 1 for myLine in fileLines: # tokenize the line and verify that we get the correct number of tokens myLine = myLine.split() if cLevel != len(myLine)-3: print "FATAL: expected " + cLevel + " hit rates on line " + str(idx) exit() # ascribe each token to an aptly-named variable #blockid = long(myLine[0]) ###ASK MIKEY - needed?### #fpCount = long(myLine[1]) ###ASK MIKEY - needed?### memCount = long(myLine[2]) inclRate = [] for i in range(0,len(totalMisses),1): inclRate.append(float(myLine[i+3])) #print "myLine[", i+3, "]= ", myLine[i+3] # convert each inclusive hit rate to an exclusive rate exclRate = [inclRate[0]] for i in range(1,len(inclRate),1): thisRate = float(inclRate[i]) prevRate = float(inclRate[i-1]) if prevRate < 100.0: exclRate.append(100.0*float(thisRate - prevRate)/(100.0 - prevRate)) else: exclRate.append(float(0.0)) blockAccesses = [] blockMisses = [] blockAccesses.append(memCount) ## blockHits[n] stores the number of memory accesses that make it to cache level N for i in range(0,len(totalMisses)-1,1): blockMisses.append((blockAccesses[i]*(float(100.0)-exclRate[i]))/float(100.0)) blockAccesses.append(blockMisses[i]) blockMisses.append(blockAccesses[cLevel-1]*((100.0-exclRate[cLevel-1])/100.0)) for i in range(0,len(totalMisses),1): totalMisses[i] += blockMisses[i] totalAccesses[i] += blockAccesses[i] totalHits = 0 cacheValues = [] for i in range(0,len(totalMisses),1): levelHits = (totalAccesses[i] - totalMisses[i]) totalHits += levelHits #assign values to tuple and return cacheValues.append((levelHits)/(totalAccesses[i])*100) cacheValues.append(100*totalHits/totalAccesses[0]) #print "Cache " + str(i+1) + " average %= " + str((levelHits)/(totalAccesses[i])*100) + " incl(" + str(100*totalHits/totalAccesses[0]) + ")" #print "cacheValues:", cacheValues return cacheValues def printAvgSingle(fileNamepAvg, printType): #print "Avg for file:", fileNamepAvg fileidx = fileNamepAvg.rfind("/") shortfileName = fileNamepAvg[fileidx+1:] #print "TESTING: FileName:", shortfileName # get the sysid# for printing later try: sysidname = shortfileName[shortfileName.index('.')+1:] taskidname = shortfileName[shortfileName.index('.')-4:shortfileName.index('.')] #print "TESTING: sysidname=", sysidname except ValueError: print "ERROR: Invalid filename no '.' found in filename:", shortfileName exit() except IndexError: #If file has '.' as last char, this could error print "Error: Invalid location of . in filename. this shouldn't happen-", shortfileName exit() #lifted from counter_mikey.py try: traceFile = open(fileNamepAvg, 'r') except IOError, NameError: print "ERROR: can't find that file: " + fileNamepAvg exit() #Process file #count the lines, track each line in a list everyLine = [] fileLines = traceFile.readlines() traceFile.close() myLine = fileLines[0].split() cLevel = len(myLine)-3 totalMisses = [] totalAccesses = [] for i in range(0,cLevel,1): totalMisses.append(0) totalAccesses.append(0) ####validate cLevel 4,5, or 6 is expected #print "TESTING: This file has", cLevel, "cache level(s)" ##print "Eachline has", len(myLines), "columns" if cLevel < 1 or cLevel > 3: print "ERROR: Expected 1, 2, or 3 cache levels" exit() #### create if, else for cLevel = 4,5,6 idx = 1 for myLine in fileLines: # tokenize the line and verify that we get the correct number of tokens myLine = myLine.split() if cLevel != len(myLine)-3: print "ERROR: expected " + cLevel + " hit rates on line " + str(idx) # ascribe each token to an aptly-named variable blockid = long(myLine[0]) fpCount = long(myLine[1]) memCount = long(myLine[2]) inclRate = [] for i in range(0,len(totalMisses),1): inclRate.append(float(myLine[i+3])) # convert each inclusive hit rate to an exclusive rate exclRate = [inclRate[0]] for i in range(1,len(inclRate),1): thisRate = float(inclRate[i]) prevRate = float(inclRate[i-1]) if prevRate < 100.0: exclRate.append(100.0*float(thisRate - prevRate)/(100.0 - prevRate)) else: exclRate.append(float(0.0)) # print str(myLine) + ' -> ', # print str(blockid) + '\t' + str(fpCount) + '\t' + str(memCount), # for i in range(0,len(exclRate),1): # print '\t' + str(exclRate[i]), # print '' blockAccesses = [] blockMisses = [] blockAccesses.append(memCount) # print "block L1 accesses: " + str(blockAccesses[0]) ## blockHits[n] stores the number of memory accesses that make it to cache level N for i in range(0,len(totalMisses)-1,1): blockMisses.append((blockAccesses[i]*(float(100.0)-exclRate[i]))/float(100.0)) blockAccesses.append(blockMisses[i]) blockMisses.append(blockAccesses[cLevel-1]*((100.0-exclRate[cLevel-1])/100.0)) for i in range(0,len(totalMisses),1): totalMisses[i] += blockMisses[i] totalAccesses[i] += blockAccesses[i] print shortfileName, totalHits = 0 for i in range(0,len(totalMisses),1): levelHits = (totalAccesses[i] - totalMisses[i]) totalHits += levelHits #print "Cache " + str(i+1) + " average %= " + str((levelHits)/(totalAccesses[i])*100) + " incl(" + str(100*totalHits/totalAccesses[0]) + ")" print str(100*totalHits/totalAccesses[0]).ljust(13), print "" def sort_nicely( l ): """ Sort the given list in the way that humans expect. """ def convert(text): if text.isdigit(): return int(text) else: return text ##convert = lambda text: int(text) if text.isdigit() else text alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ] l.sort( key=alphanum_key ) def errorMsg(): print print "Usage : ./AvgCacheCalc.py\n" print "required:" print "\t--app string; eg icepic,hycom,..." print "\t--dataset string; eg large, standard..." print "\t--cpu_count int; eg 256,1024,...\n" print "One of these two are required:" print "\t--taskid int; eg 0001" print "\t--sysid int; 1, 2, or 3 chars - 75\n" print "optional" print "\t--dir string; eg /pmaclabs/ti10/ti10_icepic_standard_0128/processed_trace/ [default=.]" exit() diridx = -1 sysidx = -1 taskidx = -1 sysidindexerr = 0 taskidindexerr = 0 try: #check for sysid sysidx = sys.argv.index("--sysid") #print "past sysidx ValueError" #print "sysidx=", sysidx if sysidx != -1: sysid = sys.argv[sysidx+1] #print "sysid=", sysid except IndexError: print "TESTING: IndexError:No --sysid argument. ->pass" sysidindexerr = 1 pass except ValueError: #print "TESTING: ValueError --sysid ->pass" # sysid may not be needed, taskid maybe used pass try: # check for taskid taskidx = sys.argv.index("--taskid") task = sys.argv[taskidx+1] # pad task with 0s if needed while len(task) < 4: task = "0"+task #print "TESTING:task=", task #print "taskidx=", taskidx #print "taskid=", task except IndexError: print "TESTING: IndexError: No --taskid argument. ->pass" taskidindexerr = 1 pass except ValueError: #print "TESTING: ValueError --taskid ->pass" pass if sysidx == -1 and taskidx == -1: print "Either --sysid or --taskid required - neither used" errorMsg() if sysidx != -1 and taskidx != -1: print "Either --sysid or --taskid required - both used" errorMsg() if sysidx != -1 and sysidindexerr == 1: # we are using sysid and there was a no value after print "No --sysid value given, please provide argument\n" errorMsg() if taskidx != -1 and taskidindexerr == 1: # we are using taskid and there was a no value after print "No --taskid value given, please provide argument\n" errorMsg() try: diridx = sys.argv.index("--dir") except ValueError: #print"no dir->ok" pass # if --dir is not used, current dir willbe used try: if diridx == -1: dirRead = os.getcwd() #use currend dir if none given #print "TESTING: current dir used dir=", dirRead else: dirRead = sys.argv[diridx+1] #print "TESTING: input used ***WHAT ABOUT SLASH AT END*** dir=", dirRead #pad a '/' to the end of the directory if dirRead[-1] != '/': dirRead = dirRead + '/' except IndexError: print "No --dir value given, please provide argument\n" errorMsg() except ValueError: print "TESTING:Error with --dir argument, see usage below\n" errorMsg() try: #check for app appidx = sys.argv.index("--app") appname = sys.argv[appidx+1] #print "app=", appname except IndexError: print "No --app value given, please provide argument\n" errorMsg() except ValueError: print "Error with --app argument, see usage below\n" errorMsg() try: #check for dataset datasetidx = sys.argv.index("--dataset") datasetname = sys.argv[datasetidx+1] #print "dataset=", datasetname except IndexError: print "No --dataset value given, please provide argument\n" errorMsg() except ValueError: print "Error with --dataset argument, see usage below\n" errorMsg() try: #check for cpu_count cpuidx = sys.argv.index("--cpu_count") cpu = sys.argv[cpuidx+1] #print "cpu=", cpu #print "cpu type:", type(cpu) cpulen = len(cpu) if cpulen > 4: # error if more than 4 digits print "ERROR: cpu_count cannot be greater than 4 digits" exit() if cpulen < 4: # pad with 0 if less than 4 digits #print "TESTING: cpulen:", cpulen, "needs to be 4" while len(cpu) < 4: cpu = "0"+cpu cpulen = len(cpu) except IndexError: print "No --cpu_count value given, please provide argument\n" errorMsg() except ValueError: print "Error with --cpu_count argument, see usage below\n" errorMsg() fileName = appname+"_"+datasetname+"_"+cpu print print "Reading files from: ", dirRead if taskidx == -1: #use sysid print "Averaging for all files like "+fileName+"*.sysid"+sysid fileList = glob.glob(dirRead+fileName+"*.sysid"+sysid) elif sysidx == -1: #use taskid print "Averaging for all files like "+fileName+"_"+task+"*" #print dirRead+fileName+"_"+task+".*" fileList = glob.glob(dirRead+fileName+"_"+task+".*") else: print "ERROR: No one should get here either taskid or sysid should have been validated" errorMsg() sort_nicely(fileList) if len(fileList) == 0: print "ERROR: No files match input...exiting" exit() print "Number of files: ", len(fileList) if taskidx == -1: #use sysid dirAvg = [] for i in range(0, len(fileList)): dirAvg.append(getAvgSingle(fileList[i])) printAvgSingle(fileList[i],1) print "\n *** Averaged Hit rates ***" print fileName+"*.sysid"+sysid, numCache = len(dirAvg[0]) totalCache = [0,0,0,0,0,0] #print "TESTING:numcache for avg of files is:", numCache #print "TESTING:dirAvg[0]=", dirAvg[0] #print "TESTING:len(dirAvg[0])=", len(dirAvg[0]) #print "TESTING:len(dirAvg)=", len(dirAvg) #print "TESTING:numCache range= ", range(numCache) #calcute averages for the folder for i in range(len(dirAvg)): #if len(dirAvg[i]) > 4: #print "TESTING:dirAvg[",i,"][4]=", dirAvg[i][4] for j in range(numCache): #print "::j=",j,"dirAvg[",i,"][",j,"]= ", dirAvg[i][j] totalCache[j] = totalCache[j]+dirAvg[i][j] #print values of the cache for i in range(0, len(totalCache), 2): if totalCache[i+1] !=0: print totalCache[i+1]/len(dirAvg), #print "excl", totalCache[i]/len(dirAvg) #print "Cache " + str((i/2)+1) + " average %= " + str(totalCache[i]/len(dirAvg)) + " incl(" + str(totalCache[i+1]/len(dirAvg)) + ")" elif sysidx == -1: #use taskid for i in range(0, len(fileList)): printAvgSingle(fileList[i],2)
"""Solar analemma.""" from ._skyBase import RadianceSky from ..material.light import Light from ..geometry.source import Source from ladybug.epw import EPW from ladybug.sunpath import Sunpath import os try: from itertools import izip as zip writemode = 'wb' except ImportError: # python 3 writemode = 'w' class Analemma(RadianceSky): """Generate a radiance-based analemma. Use Analemma for solar access/sunlight hours studies. For annual daylight/radiation studies see AnalemmaReversed. Analemma consists of two files: 1. *.ann file which includes sun geometries and materials. 2. *.mod file includes list of modifiers that are included in *.ann file. """ def __init__(self, sun_vectors, sun_up_hours): """Radiance-based analemma. Args: sun_vectors: A list of sun vectors as (x, y, z). sun_up_hours: List of hours of the year that corresponds to sun_vectors. """ RadianceSky.__init__(self) vectors = sun_vectors or [] # reverse sun vectors self._sun_vectors = tuple(tuple(v) for v in vectors) self._sun_up_hours = sun_up_hours assert len(sun_up_hours) == len(vectors), \ ValueError( 'Length of vectors [%d] does not match the length of hours [%d]' % (len(vectors), len(sun_up_hours)) ) @classmethod def from_json(cls, inp): """Create an analemma from a dictionary.""" return cls(inp['sun_vectors'], inp['sun_up_hours']) @classmethod def from_location(cls, location, hoys=None, north=0, is_leap_year=False): """Generate a radiance-based analemma for a location. Args: location: A ladybug location. hoys: A list of hours of the year (default: range(8760)). north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ sun_vectors = [] sun_up_hours = [] hoys = hoys or range(8760) north = north or 0 sp = Sunpath.from_location(location, north) sp.is_leap_year = is_leap_year for hour in hoys: sun = sp.calculate_sun_from_hoy(hour) if sun.altitude < 0: continue sun_vectors.append(sun.sun_vector) sun_up_hours.append(hour) return cls(sun_vectors, sun_up_hours) @classmethod def from_location_sun_up_hours(cls, location, sun_up_hours, north=0, is_leap_year=False): """Generate a radiance-based analemma for a location. Args: location: A ladybug location. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ sun_vectors = [] north = north or 0 sp = Sunpath.from_location(location, north) sp.is_leap_year = is_leap_year for hour in sun_up_hours: sun = sp.calculate_sun_from_hoy(hour) sun_vectors.append(sun.sun_vector) return cls(sun_vectors, sun_up_hours) @classmethod def from_wea(cls, wea, hoys=None, north=0, is_leap_year=False): """Generate a radiance-based analemma from a ladybug wea. NOTE: Only the location from wea will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: wea: A ladybug Wea. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location(wea.location, hoys, north, is_leap_year) @classmethod def from_wea_sun_up_hours(cls, wea, sun_up_hours, north=0, is_leap_year=False): """Generate a radiance-based analemma from a ladybug wea. NOTE: Only the location from wea will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: wea: A ladybug Wea. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location_sun_up_hours(wea.location, sun_up_hours, north, is_leap_year) @classmethod def from_epw_file(cls, epw_file, hoys=None, north=0, is_leap_year=False): """Create sun matrix from an epw file. NOTE: Only the location from epw file will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: epw_file: Full path to an epw file. hoys: A list of hours of the year (default: range(8760)). north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location(EPW(epw_file).location, hoys, north, is_leap_year) @classmethod def from_epw_file_sun_up_hours(cls, epw_file, sun_up_hours, north=0, is_leap_year=False): """Create sun matrix from an epw file. NOTE: Only the location from epw file will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: epw_file: Full path to an epw file. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location_sun_up_hours(EPW(epw_file).location, sun_up_hours, north, is_leap_year) @property def isAnalemma(self): """Return True.""" return True @property def is_climate_based(self): """Return True if generated based on values from weather file.""" return False @property def analemma_file(self): """Analemma file name. Use this file to create the octree. """ return 'analemma.rad' @property def sunlist_file(self): """Sun list file name. Use this file as the list of modifiers in rcontrib. """ return 'analemma.mod' @property def sun_vectors(self): """Return list of sun vectors.""" return self._sun_vectors @property def sun_up_hours(self): """Return list of hours for sun vectors.""" return self._sun_up_hours def execute(self, working_dir): fp = os.path.join(working_dir, self.analemma_file) # analemma file (geo and mat) sfp = os.path.join(working_dir, self.sunlist_file) # modifier list with open(fp, writemode) as outf, open(sfp, writemode) as outm: for hoy, vector in zip(self.sun_up_hours, self.sun_vectors): # use minute of the year to name sun positions moy = int(round(hoy * 60)) mat = Light('sol_%06d' % moy, 1e6, 1e6, 1e6) sun = Source('sun_%06d' % moy, vector, 0.533, mat) outf.write(sun.to_rad_string(True).replace('\n', ' ') + '\n') outm.write('sol_%06d\n' % moy) def duplicate(self): """Duplicate this class.""" return Analemma(self.sun_vectors, self.sun_up_hours) def to_rad_string(self): """Get the radiance command line as a string.""" raise AttributeError( 'analemma does not have a single line command. Try execute method.' ) def to_json(self): """Convert analemma to a dictionary.""" return {'sun_vectors': self.sun_vectors, 'sun_up_hours': self.sun_up_hours} def ToString(self): """Overwrite .NET ToString method.""" return self.__repr__() def __repr__(self): """Analemma representation.""" return 'Analemma: #%d' % len(self.sun_vectors) class AnalemmaReversed(Analemma): """Generate a radiance-based analemma. Reversed Analemma reverses direction of input sun vectors. Use reversed Analemma for radiation and daylight studies. Analemma consists of two files: 1. *_reversed.ann file which includes sun geometries and materials. 2. *.mod file includes list of modifiers that are included in *_reversed.ann file. """ @property def analemma_file(self): """Analemma file name. Use this file to create the octree. """ return 'analemma_reversed.rad' def execute(self, working_dir): fp = os.path.join(working_dir, self.analemma_file) # analemma file (geo and mat) sfp = os.path.join(working_dir, self.sunlist_file) # modifier list with open(fp, writemode) as outf, open(sfp, writemode) as outm: for hoy, vector in zip(self.sun_up_hours, self.sun_vectors): # use minute of the year to name sun positions moy = int(round(hoy * 60)) # reverse sun vector r_vector = tuple(-1 * i for i in vector) mat = Light('sol_%06d' % moy, 1e6, 1e6, 1e6) sun = Source('sun_%06d' % moy, r_vector, 0.533, mat) outf.write(sun.to_rad_string(True).replace('\n', ' ') + '\n') outm.write('sol_%06d\n' % moy)
""" Read samples from a UHD device and write to file formatted as binary outputs single precision complex float values or complex short values (interleaved 16 bit signed short integers). """ from gnuradio import gr, eng_notation from gnuradio import uhd from gnuradio.eng_option import eng_option from optparse import OptionParser import sys n2s = eng_notation.num_to_str class rx_cfile_block(gr.top_block): def __init__(self, options, filename): gr.top_block.__init__(self) # Create a UHD device source if options.output_shorts: self._u = uhd.usrp_source(device_addr=options.address, io_type=uhd.io_type.COMPLEX_INT16, num_channels=1) self._sink = gr.file_sink(gr.sizeof_short*2, filename) else: self._u = uhd.usrp_source(device_addr=options.address, io_type=uhd.io_type.COMPLEX_FLOAT32, num_channels=1) self._sink = gr.file_sink(gr.sizeof_gr_complex, filename) # Set receiver sample rate self._u.set_samp_rate(options.samp_rate) # Set receive daughterboard gain if options.gain is None: g = self._u.get_gain_range() options.gain = float(g.start()+g.stop())/2 print "Using mid-point gain of", options.gain, "(", g.start(), "-", g.stop(), ")" self._u.set_gain(options.gain) # Set the antenna if(options.antenna): self._u.set_antenna(options.antenna, 0) # Set frequency (tune request takes lo_offset) if(options.lo_offset is not None): treq = uhd.tune_request(options.freq, options.lo_offset) else: treq = uhd.tune_request(options.freq) tr = self._u.set_center_freq(treq) if tr == None: sys.stderr.write('Failed to set center frequency\n') raise SystemExit, 1 # Create head block if needed and wire it up if options.nsamples is None: self.connect(self._u, self._sink) else: if options.output_shorts: self._head = gr.head(gr.sizeof_short*2, int(options.nsamples)) else: self._head = gr.head(gr.sizeof_gr_complex, int(options.nsamples)) self.connect(self._u, self._head, self._sink) input_rate = self._u.get_samp_rate() if options.verbose: print "Address:", options.address print "Rx gain:", options.gain print "Rx baseband frequency:", n2s(tr.actual_rf_freq) print "Rx DDC frequency:", n2s(tr.actual_dsp_freq) print "Rx Sample Rate:", n2s(input_rate) if options.nsamples is None: print "Receiving samples until Ctrl-C" else: print "Receving", n2s(options.nsamples), "samples" if options.output_shorts: print "Writing 16-bit complex shorts" else: print "Writing 32-bit complex floats" print "Output filename:", filename def get_options(): usage="%prog: [options] output_filename" parser = OptionParser(option_class=eng_option, usage=usage) parser.add_option("-a", "--address", type="string", default="addr=192.168.10.2", help="Address of UHD device, [default=%default]") parser.add_option("-A", "--antenna", type="string", default=None, help="select Rx Antenna where appropriate") parser.add_option("", "--samp-rate", type="eng_float", default=1e6, help="set sample rate (bandwidth) [default=%default]") parser.add_option("-f", "--freq", type="eng_float", default=None, help="set frequency to FREQ", metavar="FREQ") parser.add_option("-g", "--gain", type="eng_float", default=None, help="set gain in dB (default is midpoint)") parser.add_option( "-s","--output-shorts", action="store_true", default=False, help="output interleaved shorts instead of complex floats") parser.add_option("-N", "--nsamples", type="eng_float", default=None, help="number of samples to collect [default=+inf]") parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose output") parser.add_option("", "--lo-offset", type="eng_float", default=None, help="set daughterboard LO offset to OFFSET [default=hw default]") (options, args) = parser.parse_args () if len(args) != 1: parser.print_help() raise SystemExit, 1 if options.freq is None: parser.print_help() sys.stderr.write('You must specify the frequency with -f FREQ\n'); raise SystemExit, 1 return (options, args[0]) if __name__ == '__main__': (options, filename) = get_options() tb = rx_cfile_block(options, filename) try: tb.run() except KeyboardInterrupt: pass
from PyQt4.QtCore import Qt from PyQt4.QtGui import QColor from volumina.api import LazyflowSource, ColortableLayer, AlphaModulatedLayer from ilastik.applets.dataExport.dataExportGui import DataExportGui, DataExportLayerViewerGui from lazyflow.operators import OpMultiArraySlicer2 from ilastik.utility.exportingOperator import ExportingGui class ObjectClassificationDataExportGui( DataExportGui, ExportingGui ): """ A subclass of the generic data export gui that creates custom layer viewers. """ def __init__(self, *args, **kwargs): super(ObjectClassificationDataExportGui, self).__init__(*args, **kwargs) self._exporting_operator = None def set_exporting_operator(self, op): self._exporting_operator = op def get_exporting_operator(self, lane=0): return self._exporting_operator.getLane(lane) def createLayerViewer(self, opLane): return ObjectClassificationResultsViewer(self.parentApplet, opLane) def get_export_dialog_title(self): return "Export Object Information" @property def gui_applet(self): return self.parentApplet def get_raw_shape(self): return self.get_exporting_operator().RawImages.meta.shape def get_feature_names(self): return self.get_exporting_operator().ComputedFeatureNames([]).wait() def _initAppletDrawerUic(self): super(ObjectClassificationDataExportGui, self)._initAppletDrawerUic() from PyQt4.QtGui import QGroupBox, QPushButton, QVBoxLayout group = QGroupBox("Export Object Feature Table", self.drawer) group.setLayout(QVBoxLayout()) self.drawer.layout().addWidget(group) btn = QPushButton("Configure and export", group) btn.clicked.connect(self.show_export_dialog) group.layout().addWidget(btn) def _createDefault16ColorColorTable(): colors = [] # Transparent for the zero label colors.append(QColor(0,0,0,0)) # ilastik v0.5 colors colors.append( QColor( Qt.red ) ) colors.append( QColor( Qt.green ) ) colors.append( QColor( Qt.yellow ) ) colors.append( QColor( Qt.blue ) ) colors.append( QColor( Qt.magenta ) ) colors.append( QColor( Qt.darkYellow ) ) colors.append( QColor( Qt.lightGray ) ) # Additional colors colors.append( QColor(255, 105, 180) ) #hot pink colors.append( QColor(102, 205, 170) ) #dark aquamarine colors.append( QColor(165, 42, 42) ) #brown colors.append( QColor(0, 0, 128) ) #navy colors.append( QColor(255, 165, 0) ) #orange colors.append( QColor(173, 255, 47) ) #green-yellow colors.append( QColor(128,0, 128) ) #purple colors.append( QColor(240, 230, 140) ) #khaki assert len(colors) == 16 return [c.rgba() for c in colors] class ObjectClassificationResultsViewer(DataExportLayerViewerGui): _colorTable16 = _createDefault16ColorColorTable() def setupLayers(self): layers = [] opLane = self.topLevelOperatorView selection_names = opLane.SelectionNames.value selection = selection_names[ opLane.InputSelection.value ] # This code depends on a specific order for the export slots. # If those change, update this function! assert selection in ['Object Predictions', 'Object Probabilities', 'Pixel Probabilities'] if selection == "Object Predictions": fromDiskSlot = self.topLevelOperatorView.ImageOnDisk if fromDiskSlot.ready(): exportLayer = ColortableLayer( LazyflowSource(fromDiskSlot), colorTable=self._colorTable16 ) exportLayer.name = "Prediction - Exported" exportLayer.visible = True layers.append(exportLayer) previewSlot = self.topLevelOperatorView.ImageToExport if previewSlot.ready(): previewLayer = ColortableLayer( LazyflowSource(previewSlot), colorTable=self._colorTable16 ) previewLayer.name = "Prediction - Preview" previewLayer.visible = False layers.append(previewLayer) elif selection == "Object Probabilities": exportedLayers = self._initPredictionLayers(opLane.ImageOnDisk) for layer in exportedLayers: layer.visible = True layer.name = layer.name + "- Exported" layers += exportedLayers previewLayers = self._initPredictionLayers(opLane.ImageToExport) for layer in previewLayers: layer.visible = False layer.name = layer.name + "- Preview" layers += previewLayers elif selection == 'Pixel Probabilities': exportedLayers = self._initPredictionLayers(opLane.ImageOnDisk) for layer in exportedLayers: layer.visible = True layer.name = layer.name + "- Exported" layers += exportedLayers previewLayers = self._initPredictionLayers(opLane.ImageToExport) for layer in previewLayers: layer.visible = False layer.name = layer.name + "- Preview" layers += previewLayers else: assert False, "Unknown selection." rawSlot = self.topLevelOperatorView.RawData if rawSlot.ready(): rawLayer = self.createStandardLayerFromSlot(rawSlot) rawLayer.name = "Raw Data" rawLayer.opacity = 1.0 layers.append(rawLayer) return layers def _initPredictionLayers(self, predictionSlot): layers = [] opLane = self.topLevelOperatorView # Use a slicer to provide a separate slot for each channel layer opSlicer = OpMultiArraySlicer2( parent=opLane.viewed_operator().parent ) opSlicer.Input.connect( predictionSlot ) opSlicer.AxisFlag.setValue('c') for channel, channelSlot in enumerate(opSlicer.Slices): if channelSlot.ready(): drange = channelSlot.meta.drange or (0.0, 1.0) predictsrc = LazyflowSource(channelSlot) predictLayer = AlphaModulatedLayer( predictsrc, tintColor=QColor.fromRgba(self._colorTable16[channel+1]), # FIXME: This is weird. Why are range and normalize both set to the same thing? range=drange, normalize=drange ) predictLayer.opacity = 1.0 predictLayer.visible = True predictLayer.name = "Probability Channel #{}".format( channel+1 ) layers.append(predictLayer) return layers
""" sphinx.environment.managers.toctree ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Toctree manager for sphinx.environment. :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ from six import iteritems from docutils import nodes from sphinx import addnodes from sphinx.util import url_re from sphinx.util.nodes import clean_astext, process_only_nodes from sphinx.transforms import SphinxContentsFilter from sphinx.environment.managers import EnvironmentManager class Toctree(EnvironmentManager): name = 'toctree' def __init__(self, env): super(Toctree, self).__init__(env) self.tocs = env.tocs self.toc_num_entries = env.toc_num_entries self.toc_secnumbers = env.toc_secnumbers self.toc_fignumbers = env.toc_fignumbers self.toctree_includes = env.toctree_includes self.files_to_rebuild = env.files_to_rebuild self.glob_toctrees = env.glob_toctrees self.numbered_toctrees = env.numbered_toctrees def clear_doc(self, docname): self.tocs.pop(docname, None) self.toc_secnumbers.pop(docname, None) self.toc_fignumbers.pop(docname, None) self.toc_num_entries.pop(docname, None) self.toctree_includes.pop(docname, None) self.glob_toctrees.discard(docname) self.numbered_toctrees.discard(docname) for subfn, fnset in list(self.files_to_rebuild.items()): fnset.discard(docname) if not fnset: del self.files_to_rebuild[subfn] def merge_other(self, docnames, other): for docname in docnames: self.tocs[docname] = other.tocs[docname] self.toc_num_entries[docname] = other.toc_num_entries[docname] if docname in other.toctree_includes: self.toctree_includes[docname] = other.toctree_includes[docname] if docname in other.glob_toctrees: self.glob_toctrees.add(docname) if docname in other.numbered_toctrees: self.numbered_toctrees.add(docname) for subfn, fnset in other.files_to_rebuild.items(): self.files_to_rebuild.setdefault(subfn, set()).update(fnset & docnames) def process_doc(self, docname, doctree): """Build a TOC from the doctree and store it in the inventory.""" numentries = [0] # nonlocal again... def traverse_in_section(node, cls): """Like traverse(), but stay within the same section.""" result = [] if isinstance(node, cls): result.append(node) for child in node.children: if isinstance(child, nodes.section): continue result.extend(traverse_in_section(child, cls)) return result def build_toc(node, depth=1): entries = [] for sectionnode in node: # find all toctree nodes in this section and add them # to the toc (just copying the toctree node which is then # resolved in self.get_and_resolve_doctree) if isinstance(sectionnode, addnodes.only): onlynode = addnodes.only(expr=sectionnode['expr']) blist = build_toc(sectionnode, depth) if blist: onlynode += blist.children entries.append(onlynode) continue if not isinstance(sectionnode, nodes.section): for toctreenode in traverse_in_section(sectionnode, addnodes.toctree): item = toctreenode.copy() entries.append(item) # important: do the inventory stuff self.note_toctree(docname, toctreenode) continue title = sectionnode[0] # copy the contents of the section title, but without references # and unnecessary stuff visitor = SphinxContentsFilter(doctree) title.walkabout(visitor) nodetext = visitor.get_entry_text() if not numentries[0]: # for the very first toc entry, don't add an anchor # as it is the file's title anyway anchorname = '' else: anchorname = '#' + sectionnode['ids'][0] numentries[0] += 1 # make these nodes: # list_item -> compact_paragraph -> reference reference = nodes.reference( '', '', internal=True, refuri=docname, anchorname=anchorname, *nodetext) para = addnodes.compact_paragraph('', '', reference) item = nodes.list_item('', para) sub_item = build_toc(sectionnode, depth + 1) item += sub_item entries.append(item) if entries: return nodes.bullet_list('', *entries) return [] toc = build_toc(doctree) if toc: self.tocs[docname] = toc else: self.tocs[docname] = nodes.bullet_list('') self.toc_num_entries[docname] = numentries[0] def note_toctree(self, docname, toctreenode): """Note a TOC tree directive in a document and gather information about file relations from it. """ if toctreenode['glob']: self.glob_toctrees.add(docname) if toctreenode.get('numbered'): self.numbered_toctrees.add(docname) includefiles = toctreenode['includefiles'] for includefile in includefiles: # note that if the included file is rebuilt, this one must be # too (since the TOC of the included file could have changed) self.files_to_rebuild.setdefault(includefile, set()).add(docname) self.toctree_includes.setdefault(docname, []).extend(includefiles) def get_toc_for(self, docname, builder): """Return a TOC nodetree -- for use on the same page only!""" tocdepth = self.env.metadata[docname].get('tocdepth', 0) try: toc = self.tocs[docname].deepcopy() self._toctree_prune(toc, 2, tocdepth) except KeyError: # the document does not exist anymore: return a dummy node that # renders to nothing return nodes.paragraph() process_only_nodes(toc, builder.tags, warn_node=self.env.warn_node) for node in toc.traverse(nodes.reference): node['refuri'] = node['anchorname'] or '#' return toc def get_toctree_for(self, docname, builder, collapse, **kwds): """Return the global TOC nodetree.""" doctree = self.env.get_doctree(self.env.config.master_doc) toctrees = [] if 'includehidden' not in kwds: kwds['includehidden'] = True if 'maxdepth' not in kwds: kwds['maxdepth'] = 0 kwds['collapse'] = collapse for toctreenode in doctree.traverse(addnodes.toctree): toctree = self.env.resolve_toctree(docname, builder, toctreenode, prune=True, **kwds) if toctree: toctrees.append(toctree) if not toctrees: return None result = toctrees[0] for toctree in toctrees[1:]: result.extend(toctree.children) return result def resolve_toctree(self, docname, builder, toctree, prune=True, maxdepth=0, titles_only=False, collapse=False, includehidden=False): """Resolve a *toctree* node into individual bullet lists with titles as items, returning None (if no containing titles are found) or a new node. If *prune* is True, the tree is pruned to *maxdepth*, or if that is 0, to the value of the *maxdepth* option on the *toctree* node. If *titles_only* is True, only toplevel document titles will be in the resulting tree. If *collapse* is True, all branches not containing docname will be collapsed. """ if toctree.get('hidden', False) and not includehidden: return None # For reading the following two helper function, it is useful to keep # in mind the node structure of a toctree (using HTML-like node names # for brevity): # # <ul> # <li> # <p><a></p> # <p><a></p> # ... # <ul> # ... # </ul> # </li> # </ul> # # The transformation is made in two passes in order to avoid # interactions between marking and pruning the tree (see bug #1046). toctree_ancestors = self.get_toctree_ancestors(docname) def _toctree_add_classes(node, depth): """Add 'toctree-l%d' and 'current' classes to the toctree.""" for subnode in node.children: if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): # for <p> and <li>, indicate the depth level and recurse subnode['classes'].append('toctree-l%d' % (depth-1)) _toctree_add_classes(subnode, depth) elif isinstance(subnode, nodes.bullet_list): # for <ul>, just recurse _toctree_add_classes(subnode, depth+1) elif isinstance(subnode, nodes.reference): # for <a>, identify which entries point to the current # document and therefore may not be collapsed if subnode['refuri'] == docname: if not subnode['anchorname']: # give the whole branch a 'current' class # (useful for styling it differently) branchnode = subnode while branchnode: branchnode['classes'].append('current') branchnode = branchnode.parent # mark the list_item as "on current page" if subnode.parent.parent.get('iscurrent'): # but only if it's not already done return while subnode: subnode['iscurrent'] = True subnode = subnode.parent def _entries_from_toctree(toctreenode, parents, separate=False, subtree=False): """Return TOC entries for a toctree node.""" refs = [(e[0], e[1]) for e in toctreenode['entries']] entries = [] for (title, ref) in refs: try: refdoc = None if url_re.match(ref): if title is None: title = ref reference = nodes.reference('', '', internal=False, refuri=ref, anchorname='', *[nodes.Text(title)]) para = addnodes.compact_paragraph('', '', reference) item = nodes.list_item('', para) toc = nodes.bullet_list('', item) elif ref == 'self': # 'self' refers to the document from which this # toctree originates ref = toctreenode['parent'] if not title: title = clean_astext(self.env.titles[ref]) reference = nodes.reference('', '', internal=True, refuri=ref, anchorname='', *[nodes.Text(title)]) para = addnodes.compact_paragraph('', '', reference) item = nodes.list_item('', para) # don't show subitems toc = nodes.bullet_list('', item) else: if ref in parents: self.env.warn(ref, 'circular toctree references ' 'detected, ignoring: %s <- %s' % (ref, ' <- '.join(parents))) continue refdoc = ref toc = self.tocs[ref].deepcopy() maxdepth = self.env.metadata[ref].get('tocdepth', 0) if ref not in toctree_ancestors or (prune and maxdepth > 0): self._toctree_prune(toc, 2, maxdepth, collapse) process_only_nodes(toc, builder.tags, warn_node=self.env.warn_node) if title and toc.children and len(toc.children) == 1: child = toc.children[0] for refnode in child.traverse(nodes.reference): if refnode['refuri'] == ref and \ not refnode['anchorname']: refnode.children = [nodes.Text(title)] if not toc.children: # empty toc means: no titles will show up in the toctree self.env.warn_node( 'toctree contains reference to document %r that ' 'doesn\'t have a title: no link will be generated' % ref, toctreenode) except KeyError: # this is raised if the included file does not exist self.env.warn_node( 'toctree contains reference to nonexisting document %r' % ref, toctreenode) else: # if titles_only is given, only keep the main title and # sub-toctrees if titles_only: # delete everything but the toplevel title(s) # and toctrees for toplevel in toc: # nodes with length 1 don't have any children anyway if len(toplevel) > 1: subtrees = toplevel.traverse(addnodes.toctree) if subtrees: toplevel[1][:] = subtrees else: toplevel.pop(1) # resolve all sub-toctrees for subtocnode in toc.traverse(addnodes.toctree): if not (subtocnode.get('hidden', False) and not includehidden): i = subtocnode.parent.index(subtocnode) + 1 for item in _entries_from_toctree( subtocnode, [refdoc] + parents, subtree=True): subtocnode.parent.insert(i, item) i += 1 subtocnode.parent.remove(subtocnode) if separate: entries.append(toc) else: entries.extend(toc.children) if not subtree and not separate: ret = nodes.bullet_list() ret += entries return [ret] return entries maxdepth = maxdepth or toctree.get('maxdepth', -1) if not titles_only and toctree.get('titlesonly', False): titles_only = True if not includehidden and toctree.get('includehidden', False): includehidden = True # NOTE: previously, this was separate=True, but that leads to artificial # separation when two or more toctree entries form a logical unit, so # separating mode is no longer used -- it's kept here for history's sake tocentries = _entries_from_toctree(toctree, [], separate=False) if not tocentries: return None newnode = addnodes.compact_paragraph('', '') caption = toctree.attributes.get('caption') if caption: caption_node = nodes.caption(caption, '', *[nodes.Text(caption)]) caption_node.line = toctree.line caption_node.source = toctree.source caption_node.rawsource = toctree['rawcaption'] if hasattr(toctree, 'uid'): # move uid to caption_node to translate it caption_node.uid = toctree.uid del toctree.uid newnode += caption_node newnode.extend(tocentries) newnode['toctree'] = True # prune the tree to maxdepth, also set toc depth and current classes _toctree_add_classes(newnode, 1) self._toctree_prune(newnode, 1, prune and maxdepth or 0, collapse) if len(newnode[-1]) == 0: # No titles found return None # set the target paths in the toctrees (they are not known at TOC # generation time) for refnode in newnode.traverse(nodes.reference): if not url_re.match(refnode['refuri']): refnode['refuri'] = builder.get_relative_uri( docname, refnode['refuri']) + refnode['anchorname'] return newnode def get_toctree_ancestors(self, docname): parent = {} for p, children in iteritems(self.toctree_includes): for child in children: parent[child] = p ancestors = [] d = docname while d in parent and d not in ancestors: ancestors.append(d) d = parent[d] return ancestors def _toctree_prune(self, node, depth, maxdepth, collapse=False): """Utility: Cut a TOC at a specified depth.""" for subnode in node.children[:]: if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): # for <p> and <li>, just recurse self._toctree_prune(subnode, depth, maxdepth, collapse) elif isinstance(subnode, nodes.bullet_list): # for <ul>, determine if the depth is too large or if the # entry is to be collapsed if maxdepth > 0 and depth > maxdepth: subnode.parent.replace(subnode, []) else: # cull sub-entries whose parents aren't 'current' if (collapse and depth > 1 and 'iscurrent' not in subnode.parent): subnode.parent.remove(subnode) else: # recurse on visible children self._toctree_prune(subnode, depth+1, maxdepth, collapse) def assign_section_numbers(self): """Assign a section number to each heading under a numbered toctree.""" # a list of all docnames whose section numbers changed rewrite_needed = [] assigned = set() old_secnumbers = self.toc_secnumbers self.toc_secnumbers = self.env.toc_secnumbers = {} def _walk_toc(node, secnums, depth, titlenode=None): # titlenode is the title of the document, it will get assigned a # secnumber too, so that it shows up in next/prev/parent rellinks for subnode in node.children: if isinstance(subnode, nodes.bullet_list): numstack.append(0) _walk_toc(subnode, secnums, depth-1, titlenode) numstack.pop() titlenode = None elif isinstance(subnode, nodes.list_item): _walk_toc(subnode, secnums, depth, titlenode) titlenode = None elif isinstance(subnode, addnodes.only): # at this stage we don't know yet which sections are going # to be included; just include all of them, even if it leads # to gaps in the numbering _walk_toc(subnode, secnums, depth, titlenode) titlenode = None elif isinstance(subnode, addnodes.compact_paragraph): numstack[-1] += 1 if depth > 0: number = tuple(numstack) else: number = None secnums[subnode[0]['anchorname']] = \ subnode[0]['secnumber'] = number if titlenode: titlenode['secnumber'] = number titlenode = None elif isinstance(subnode, addnodes.toctree): _walk_toctree(subnode, depth) def _walk_toctree(toctreenode, depth): if depth == 0: return for (title, ref) in toctreenode['entries']: if url_re.match(ref) or ref == 'self' or ref in assigned: # don't mess with those continue if ref in self.tocs: secnums = self.toc_secnumbers[ref] = {} assigned.add(ref) _walk_toc(self.tocs[ref], secnums, depth, self.env.titles.get(ref)) if secnums != old_secnumbers.get(ref): rewrite_needed.append(ref) for docname in self.numbered_toctrees: assigned.add(docname) doctree = self.env.get_doctree(docname) for toctreenode in doctree.traverse(addnodes.toctree): depth = toctreenode.get('numbered', 0) if depth: # every numbered toctree gets new numbering numstack = [0] _walk_toctree(toctreenode, depth) return rewrite_needed def assign_figure_numbers(self): """Assign a figure number to each figure under a numbered toctree.""" rewrite_needed = [] assigned = set() old_fignumbers = self.toc_fignumbers self.toc_fignumbers = self.env.toc_fignumbers = {} fignum_counter = {} def get_section_number(docname, section): anchorname = '#' + section['ids'][0] secnumbers = self.toc_secnumbers.get(docname, {}) if anchorname in secnumbers: secnum = secnumbers.get(anchorname) else: secnum = secnumbers.get('') return secnum or tuple() def get_next_fignumber(figtype, secnum): counter = fignum_counter.setdefault(figtype, {}) secnum = secnum[:self.env.config.numfig_secnum_depth] counter[secnum] = counter.get(secnum, 0) + 1 return secnum + (counter[secnum],) def register_fignumber(docname, secnum, figtype, fignode): self.toc_fignumbers.setdefault(docname, {}) fignumbers = self.toc_fignumbers[docname].setdefault(figtype, {}) figure_id = fignode['ids'][0] fignumbers[figure_id] = get_next_fignumber(figtype, secnum) def _walk_doctree(docname, doctree, secnum): for subnode in doctree.children: if isinstance(subnode, nodes.section): next_secnum = get_section_number(docname, subnode) if next_secnum: _walk_doctree(docname, subnode, next_secnum) else: _walk_doctree(docname, subnode, secnum) continue elif isinstance(subnode, addnodes.toctree): for title, subdocname in subnode['entries']: if url_re.match(subdocname) or subdocname == 'self': # don't mess with those continue _walk_doc(subdocname, secnum) continue figtype = self.env.domains['std'].get_figtype(subnode) if figtype and subnode['ids']: register_fignumber(docname, secnum, figtype, subnode) _walk_doctree(docname, subnode, secnum) def _walk_doc(docname, secnum): if docname not in assigned: assigned.add(docname) doctree = self.env.get_doctree(docname) _walk_doctree(docname, doctree, secnum) if self.env.config.numfig: _walk_doc(self.env.config.master_doc, tuple()) for docname, fignums in iteritems(self.toc_fignumbers): if fignums != old_fignumbers.get(docname): rewrite_needed.append(docname) return rewrite_needed
from django.template.defaultfilters import register from appointment.constants import EVENT_STATUS, ALARM_STATUS, ALARM_METHOD @register.filter(name='event_status') def event_status(value): """Event Status Templatetag""" if not value: return '' STATUS = dict(EVENT_STATUS) try: return STATUS[value].encode('utf-8') except: return '' @register.filter(name='alarm_status') def alarm_status(value): """Alarm Status Templatetag""" if not value: return '' STATUS = dict(ALARM_STATUS) try: return STATUS[value].encode('utf-8') except: return '' @register.filter(name='alarm_method') def alarm_method(value): """Alarm Method Templatetag""" if not value: return '' METHOD = dict(ALARM_METHOD) try: return METHOD[value].encode('utf-8') except: return ''
from datetime import datetime, date, timedelta from dateutil.relativedelta import relativedelta from openerp.osv import fields, osv from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as OE_DATEFORMAT class hr_employee(osv.Model): _inherit = 'hr.employee' def _get_contracts_list(self, employee): '''Return list of contracts in chronological order''' contracts = [] for c in employee.contract_ids: l = len(contracts) if l == 0: contracts.append(c) else: dCStart = datetime.strptime(c.date_start, OE_DATEFORMAT).date() i = l - 1 while i >= 0: dContractStart = datetime.strptime( contracts[i].date_start, OE_DATEFORMAT).date() if dContractStart < dCStart: contracts = contracts[:i + 1] + [c] + contracts[i + 1:] break elif i == 0: contracts = [c] + contracts i -= 1 return contracts def _get_days_in_month(self, d): last_date = d - timedelta(days=(d.day - 1)) + relativedelta( months= +1) + relativedelta(days= -1) return last_date.day def get_months_service_to_date(self, cr, uid, ids, dToday=None, context=None): '''Returns a dictionary of floats. The key is the employee id, and the value is number of months of employment.''' res = dict.fromkeys(ids, 0) if dToday == None: dToday = date.today() for ee in self.pool.get('hr.employee').browse(cr, uid, ids, context=context): delta = relativedelta(dToday, dToday) contracts = self._get_contracts_list(ee) if len(contracts) == 0: res[ee.id] = (0.0, False) continue dInitial = datetime.strptime( contracts[0].date_start, OE_DATEFORMAT).date() if ee.initial_employment_date: dFirstContract = dInitial dInitial = datetime.strptime( ee.initial_employment_date, '%Y-%m-%d').date() if dFirstContract < dInitial: raise osv.except_osv(_('Employment Date mismatch!'), _('The initial employment date cannot be after the first contract in the system.\nEmployee: %s', ee.name)) delta = relativedelta(dFirstContract, dInitial) for c in contracts: dStart = datetime.strptime(c.date_start, '%Y-%m-%d').date() if dStart >= dToday: continue # If the contract doesn't have an end date, use today's date # If the contract has finished consider the entire duration of # the contract, otherwise consider only the months in the # contract until today. # if c.date_end: dEnd = datetime.strptime(c.date_end, '%Y-%m-%d').date() else: dEnd = dToday if dEnd > dToday: dEnd = dToday delta += relativedelta(dEnd, dStart) # Set the number of months the employee has worked date_part = float(delta.days) / float( self._get_days_in_month(dInitial)) res[ee.id] = ( float((delta.years * 12) + delta.months) + date_part, dInitial) return res def _get_employed_months(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, 0.0) _res = self.get_months_service_to_date(cr, uid, ids, context=context) for k, v in _res.iteritems(): res[k] = v[0] return res def _search_amount(self, cr, uid, obj, name, args, context): ids = set() for cond in args: amount = cond[2] if isinstance(cond[2], (list, tuple)): if cond[1] in ['in', 'not in']: amount = tuple(cond[2]) else: continue else: if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']: continue cr.execute("select id from hr_employee having %s %%s" % (cond[1]), (amount,)) res_ids = set(id[0] for id in cr.fetchall()) ids = ids and (ids & res_ids) or res_ids if ids: return [('id', 'in', tuple(ids))] return [('id', '=', '0')] _columns = { 'initial_employment_date': fields.date('Initial Date of Employment', groups=False, help='Date of first employment if it was before the start of the first contract in the system.'), 'length_of_service': fields.function(_get_employed_months, type='float', method=True, groups=False, string='Lenght of Service'), }
import datetime import sqlalchemy.orm.exc from nylas.logging import get_logger log = get_logger() from inbox.auth.oauth import OAuthAuthHandler from inbox.basicauth import OAuthError from inbox.models import Namespace from inbox.config import config from inbox.models.backends.outlook import OutlookAccount from inbox.models.backends.oauth import token_manager from inbox.util.url import url_concat PROVIDER = '_outlook' AUTH_HANDLER_CLS = '_OutlookAuthHandler' OAUTH_CLIENT_ID = config.get_required('MS_LIVE_OAUTH_CLIENT_ID') OAUTH_CLIENT_SECRET = config.get_required('MS_LIVE_OAUTH_CLIENT_SECRET') OAUTH_REDIRECT_URI = config.get_required('MS_LIVE_OAUTH_REDIRECT_URI') OAUTH_AUTHENTICATE_URL = 'https://login.live.com/oauth20_authorize.srf' OAUTH_ACCESS_TOKEN_URL = 'https://login.live.com/oauth20_token.srf' OAUTH_USER_INFO_URL = 'https://apis.live.net/v5.0/me' OAUTH_BASE_URL = 'https://apis.live.net/v5.0/' OAUTH_SCOPE = ' '.join([ 'wl.basic', # Read access for basic profile info + contacts 'wl.offline_access', # ability to read / update user's info at any time 'wl.emails', # Read access to user's email addresses 'wl.imap']) # R/W access to user's email using IMAP / SMTP class _OutlookAuthHandler(OAuthAuthHandler): OAUTH_CLIENT_ID = OAUTH_CLIENT_ID OAUTH_CLIENT_SECRET = OAUTH_CLIENT_SECRET OAUTH_REDIRECT_URI = OAUTH_REDIRECT_URI OAUTH_AUTHENTICATE_URL = OAUTH_AUTHENTICATE_URL OAUTH_ACCESS_TOKEN_URL = OAUTH_ACCESS_TOKEN_URL OAUTH_USER_INFO_URL = OAUTH_USER_INFO_URL OAUTH_BASE_URL = OAUTH_BASE_URL OAUTH_SCOPE = OAUTH_SCOPE def create_account(self, db_session, email_address, response): email_address = response.get('emails')['account'] try: account = db_session.query(OutlookAccount).filter_by( email_address=email_address).one() except sqlalchemy.orm.exc.NoResultFound: namespace = Namespace() account = OutlookAccount(namespace=namespace) account.refresh_token = response['refresh_token'] account.date = datetime.datetime.utcnow() tok = response.get('access_token') expires_in = response.get('expires_in') token_manager.cache_token(account, tok, expires_in) account.scope = response.get('scope') account.email_address = email_address account.o_id_token = response.get('user_id') account.o_id = response.get('id') account.name = response.get('name') account.gender = response.get('gender') account.link = response.get('link') account.locale = response.get('locale') # Unlike Gmail, Outlook doesn't return the client_id and secret here account.client_id = OAUTH_CLIENT_ID account.client_secret = OAUTH_CLIENT_SECRET # Ensure account has sync enabled. account.enable_sync() return account def validate_token(self, access_token): return self._get_user_info(access_token) def interactive_auth(self, email_address=None): url_args = {'redirect_uri': self.OAUTH_REDIRECT_URI, 'client_id': self.OAUTH_CLIENT_ID, 'response_type': 'code', 'scope': self.OAUTH_SCOPE, 'access_type': 'offline'} url = url_concat(self.OAUTH_AUTHENTICATE_URL, url_args) print ('Please visit the following url to allow access to this ' 'application. The response will provide ' 'code=[AUTHORIZATION_CODE]&lc=XXXX in the location. Paste the' ' AUTHORIZATION_CODE here:') print '\n{}'.format(url) while True: auth_code = raw_input('Enter authorization code: ').strip() try: auth_response = self._get_authenticated_user(auth_code) return auth_response except OAuthError: print '\nInvalid authorization code, try again...\n' auth_code = None
''' Copyright (C) 2005 Aaron Spike, aaron@ekips.org This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ''' import random, math, inkex, cubicsuperpath def randomize((x, y), rx, ry, norm): if norm: r = abs(random.normalvariate(0.0,0.5*max(rx, ry))) else: r = random.uniform(0.0,max(rx, ry)) a = random.uniform(0.0,2*math.pi) x += math.cos(a)*rx y += math.sin(a)*ry return [x, y] class RadiusRandomize(inkex.Effect): def __init__(self): inkex.Effect.__init__(self) self.OptionParser.add_option("--title") self.OptionParser.add_option("-x", "--radiusx", action="store", type="float", dest="radiusx", default=10.0, help="Randomly move nodes and handles within this radius, X") self.OptionParser.add_option("-y", "--radiusy", action="store", type="float", dest="radiusy", default=10.0, help="Randomly move nodes and handles within this radius, Y") self.OptionParser.add_option("-c", "--ctrl", action="store", type="inkbool", dest="ctrl", default=True, help="Randomize control points") self.OptionParser.add_option("-e", "--end", action="store", type="inkbool", dest="end", default=True, help="Randomize nodes") self.OptionParser.add_option("-n", "--norm", action="store", type="inkbool", dest="norm", default=True, help="Use normal distribution") def effect(self): for id, node in self.selected.iteritems(): if node.tag == inkex.addNS('path','svg'): d = node.get('d') p = cubicsuperpath.parsePath(d) for subpath in p: for csp in subpath: if self.options.end: delta=randomize([0,0], self.options.radiusx, self.options.radiusy, self.options.norm) csp[0][0]+=delta[0] csp[0][1]+=delta[1] csp[1][0]+=delta[0] csp[1][1]+=delta[1] csp[2][0]+=delta[0] csp[2][1]+=delta[1] if self.options.ctrl: csp[0]=randomize(csp[0], self.options.radiusx, self.options.radiusy, self.options.norm) csp[2]=randomize(csp[2], self.options.radiusx, self.options.radiusy, self.options.norm) node.set('d',cubicsuperpath.formatPath(p)) if __name__ == '__main__': e = RadiusRandomize() e.affect()
import os from spack import * class Vasp(MakefilePackage): """ The Vienna Ab initio Simulation Package (VASP) is a computer program for atomic scale materials modelling, e.g. electronic structure calculations and quantum-mechanical molecular dynamics, from first principles. """ homepage = "https://vasp.at" url = "file://{0}/vasp.5.4.4.pl2.tgz".format(os.getcwd()) manual_download = True version('6.2.0', sha256='49e7ba351bd634bc5f5f67a8ef1e38e64e772857a1c02f602828898a84197e25') version('6.1.1', sha256='e37a4dfad09d3ad0410833bcd55af6b599179a085299026992c2d8e319bf6927') version('5.4.4.pl2', sha256='98f75fd75399a23d76d060a6155f4416b340a1704f256a00146f89024035bc8e') version('5.4.4', sha256='5bd2449462386f01e575f9adf629c08cb03a13142806ffb6a71309ca4431cfb3') resource(name='vaspsol', git='https://github.com/henniggroup/VASPsol.git', tag='V1.0', when='+vaspsol') variant('openmp', default=False, description='Enable openmp build') variant('scalapack', default=False, description='Enables build with SCALAPACK') variant('cuda', default=False, description='Enables running on Nvidia GPUs') variant('vaspsol', default=False, description='Enable VASPsol implicit solvation model\n' 'https://github.com/henniggroup/VASPsol') depends_on('rsync', type='build') depends_on('blas') depends_on('lapack') depends_on('fftw-api') depends_on('mpi', type=('build', 'link', 'run')) depends_on('scalapack', when='+scalapack') depends_on('cuda', when='+cuda') depends_on('qd', when='%nvhpc') conflicts('%gcc@:8', msg='GFortran before 9.x does not support all features needed to build VASP') conflicts('+vaspsol', when='+cuda', msg='+vaspsol only available for CPU') conflicts('+openmp', when='@:6.1.1', msg='openmp support started from 6.2') parallel = False def edit(self, spec, prefix): if '%gcc' in spec: if '+openmp' in spec: make_include = join_path('arch', 'makefile.include.linux_gnu_omp') else: make_include = join_path('arch', 'makefile.include.linux_gnu') elif '%nvhpc' in spec: make_include = join_path('arch', 'makefile.include.linux_pgi') filter_file('-pgc++libs', '-c++libs', make_include, string=True) filter_file('pgcc', spack_cc, make_include) filter_file('pgc++', spack_cxx, make_include, string=True) filter_file('pgfortran', spack_fc, make_include) filter_file('/opt/pgi/qd-2.3.17/install/include', spec['qd'].prefix.include, make_include) filter_file('/opt/pgi/qd-2.3.17/install/lib', spec['qd'].prefix.lib, make_include) elif '%aocc' in spec: if '+openmp' in spec: copy( join_path('arch', 'makefile.include.linux_gnu_omp'), join_path('arch', 'makefile.include.linux_aocc_omp') ) make_include = join_path('arch', 'makefile.include.linux_aocc_omp') else: copy( join_path('arch', 'makefile.include.linux_gnu'), join_path('arch', 'makefile.include.linux_aocc') ) make_include = join_path('arch', 'makefile.include.linux_aocc') filter_file( 'gcc', '{0} {1}'.format(spack_cc, '-Mfree'), make_include, string=True ) filter_file('g++', spack_cxx, make_include, string=True) filter_file('^CFLAGS_LIB[ ]{0,}=.*$', 'CFLAGS_LIB = -O3', make_include) filter_file('^FFLAGS_LIB[ ]{0,}=.*$', 'FFLAGS_LIB = -O2', make_include) filter_file('^OFLAG[ ]{0,}=.*$', 'OFLAG = -O3', make_include) filter_file('^FC[ ]{0,}=.*$', 'FC = {0}'.format(spec['mpi'].mpifc), make_include, string=True) filter_file('^FCL[ ]{0,}=.*$', 'FCL = {0}'.format(spec['mpi'].mpifc), make_include, string=True) else: if '+openmp' in spec: make_include = join_path('arch', 'makefile.include.linux_{0}_omp'. format(spec.compiler.name)) else: make_include = join_path('arch', 'makefile.include.linux_' + spec.compiler.name) os.rename(make_include, 'makefile.include') # This bunch of 'filter_file()' is to make these options settable # as environment variables filter_file('^CPP_OPTIONS[ ]{0,}=[ ]{0,}', 'CPP_OPTIONS ?= ', 'makefile.include') filter_file('^FFLAGS[ ]{0,}=[ ]{0,}', 'FFLAGS ?= ', 'makefile.include') filter_file('^LIBDIR[ ]{0,}=.*$', '', 'makefile.include') filter_file('^BLAS[ ]{0,}=.*$', 'BLAS ?=', 'makefile.include') filter_file('^LAPACK[ ]{0,}=.*$', 'LAPACK ?=', 'makefile.include') filter_file('^FFTW[ ]{0,}?=.*$', 'FFTW ?=', 'makefile.include') filter_file('^MPI_INC[ ]{0,}=.*$', 'MPI_INC ?=', 'makefile.include') filter_file('-DscaLAPACK.*$\n', '', 'makefile.include') filter_file('^SCALAPACK[ ]{0,}=.*$', 'SCALAPACK ?=', 'makefile.include') if '+cuda' in spec: filter_file('^OBJECTS_GPU[ ]{0,}=.*$', 'OBJECTS_GPU ?=', 'makefile.include') filter_file('^CPP_GPU[ ]{0,}=.*$', 'CPP_GPU ?=', 'makefile.include') filter_file('^CFLAGS[ ]{0,}=.*$', 'CFLAGS ?=', 'makefile.include') if '+vaspsol' in spec: copy('VASPsol/src/solvation.F', 'src/') def setup_build_environment(self, spack_env): spec = self.spec cpp_options = ['-DMPI -DMPI_BLOCK=8000', '-Duse_collective', '-DCACHE_SIZE=4000', '-Davoidalloc', '-Duse_bse_te', '-Dtbdyn', '-Duse_shmem'] if '%nvhpc' in self.spec: cpp_options.extend(['-DHOST=\\"LinuxPGI\\"', '-DPGI16', '-Dqd_emulate']) elif '%aocc' in self.spec: cpp_options.extend(['-DHOST=\\"LinuxGNU\\"', '-Dfock_dblbuf']) if '+openmp' in self.spec: cpp_options.extend(['-D_OPENMP']) else: cpp_options.append('-DHOST=\\"LinuxGNU\\"') if self.spec.satisfies('@6:'): cpp_options.append('-Dvasp6') cflags = ['-fPIC', '-DADD_'] fflags = [] if '%gcc' in spec or '%intel' in spec: fflags.append('-w') elif '%nvhpc' in spec: fflags.extend(['-Mnoupcase', '-Mbackslash', '-Mlarge_arrays']) elif '%aocc' in spec: fflags.extend(['-fno-fortran-main', '-Mbackslash', '-ffast-math']) spack_env.set('BLAS', spec['blas'].libs.ld_flags) spack_env.set('LAPACK', spec['lapack'].libs.ld_flags) spack_env.set('FFTW', spec['fftw-api'].prefix) spack_env.set('MPI_INC', spec['mpi'].prefix.include) if '%nvhpc' in spec: spack_env.set('QD', spec['qd'].prefix) if '+scalapack' in spec: cpp_options.append('-DscaLAPACK') spack_env.set('SCALAPACK', spec['scalapack'].libs.ld_flags) if '+cuda' in spec: cpp_gpu = ['-DCUDA_GPU', '-DRPROMU_CPROJ_OVERLAP', '-DCUFFT_MIN=28', '-DUSE_PINNED_MEMORY'] objects_gpu = ['fftmpiw.o', 'fftmpi_map.o', 'fft3dlib.o', 'fftw3d_gpu.o', 'fftmpiw_gpu.o'] cflags.extend(['-DGPUSHMEM=300', '-DHAVE_CUBLAS']) spack_env.set('CUDA_ROOT', spec['cuda'].prefix) spack_env.set('CPP_GPU', ' '.join(cpp_gpu)) spack_env.set('OBJECTS_GPU', ' '.join(objects_gpu)) if '+vaspsol' in spec: cpp_options.append('-Dsol_compat') if spec.satisfies('%gcc@10:'): fflags.append('-fallow-argument-mismatch') # Finally spack_env.set('CPP_OPTIONS', ' '.join(cpp_options)) spack_env.set('CFLAGS', ' '.join(cflags)) spack_env.set('FFLAGS', ' '.join(fflags)) def build(self, spec, prefix): if '+cuda' in self.spec: make('gpu', 'gpu_ncl') else: make('std', 'gam', 'ncl') def install(self, spec, prefix): install_tree('bin/', prefix.bin)
import sys from testrunner import run def testfunc(child): child.expect("All up, running the shell now") child.sendline("ifconfig") child.expect(r"Iface\s+(\d+)\s+HWaddr:") if __name__ == "__main__": sys.exit(run(testfunc, timeout=1, echo=False))
""" Unit tests for the class :class:`iris.fileformats.um._fast_load_structured_fields.FieldCollation`. """ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa import iris.tests as tests from iris._lazy_data import as_lazy_data from netcdftime import datetime import numpy as np from iris.fileformats.um._fast_load_structured_fields import FieldCollation import iris.fileformats.pp class Test___init__(tests.IrisTest): def test_no_fields(self): with self.assertRaises(AssertionError): FieldCollation([]) class Test_fields(tests.IrisTest): def test_preserve_members(self): fields = ('foo', 'bar', 'wibble') collation = FieldCollation(fields) self.assertEqual(collation.fields, fields) def _make_field(lbyr=None, lbyrd=None, lbft=None, blev=None, bhlev=None, data=None): header = [0] * 64 if lbyr is not None: header[0] = lbyr header[1] = 1 header[2] = 1 if lbyrd is not None: header[6] = lbyrd header[7] = 1 header[8] = 1 if lbft is not None: header[13] = lbft if blev is not None: header[51] = blev if bhlev is not None: header[53] = bhlev field = iris.fileformats.pp.PPField3(header) if data is not None: _data = _make_data(data) field.data = _data return field def _make_data(fill_value): shape = (10, 10) return as_lazy_data(np.ones(shape)*fill_value) class Test_data(tests.IrisTest): # Test order of the data attribute when fastest-varying element is changed. def test_t1_varies_faster(self): collation = FieldCollation( [_make_field(lbyr=2013, lbyrd=2000, data=0), _make_field(lbyr=2014, lbyrd=2000, data=1), _make_field(lbyr=2015, lbyrd=2000, data=2), _make_field(lbyr=2013, lbyrd=2001, data=3), _make_field(lbyr=2014, lbyrd=2001, data=4), _make_field(lbyr=2015, lbyrd=2001, data=5)]) result = collation.data[:, :, 0, 0] expected = [[0, 1, 2], [3, 4, 5]] self.assertArrayEqual(result, expected) def test_t2_varies_faster(self): collation = FieldCollation( [_make_field(lbyr=2013, lbyrd=2000, data=0), _make_field(lbyr=2013, lbyrd=2001, data=1), _make_field(lbyr=2013, lbyrd=2002, data=2), _make_field(lbyr=2014, lbyrd=2000, data=3), _make_field(lbyr=2014, lbyrd=2001, data=4), _make_field(lbyr=2014, lbyrd=2002, data=5)]) result = collation.data[:, :, 0, 0] expected = [[0, 1, 2], [3, 4, 5]] self.assertArrayEqual(result, expected) class Test_element_arrays_and_dims(tests.IrisTest): def test_single_field(self): field = _make_field(2013) collation = FieldCollation([field]) self.assertEqual(collation.element_arrays_and_dims, {}) def test_t1(self): collation = FieldCollation([_make_field(lbyr=2013), _make_field(lbyr=2014)]) result = collation.element_arrays_and_dims self.assertEqual(list(result.keys()), ['t1']) values, dims = result['t1'] self.assertArrayEqual(values, [datetime(2013, 1, 1), datetime(2014, 1, 1)]) self.assertEqual(dims, (0,)) def test_t1_and_t2(self): collation = FieldCollation([_make_field(lbyr=2013, lbyrd=2000), _make_field(lbyr=2014, lbyrd=2001), _make_field(lbyr=2015, lbyrd=2002)]) result = collation.element_arrays_and_dims self.assertEqual(set(result.keys()), set(['t1', 't2'])) values, dims = result['t1'] self.assertArrayEqual(values, [datetime(2013, 1, 1), datetime(2014, 1, 1), datetime(2015, 1, 1)]) self.assertEqual(dims, (0,)) values, dims = result['t2'] self.assertArrayEqual(values, [datetime(2000, 1, 1), datetime(2001, 1, 1), datetime(2002, 1, 1)]) self.assertEqual(dims, (0,)) def test_t1_and_t2_and_lbft(self): collation = FieldCollation([_make_field(lbyr=1, lbyrd=15, lbft=6), _make_field(lbyr=1, lbyrd=16, lbft=9), _make_field(lbyr=11, lbyrd=25, lbft=6), _make_field(lbyr=11, lbyrd=26, lbft=9)]) result = collation.element_arrays_and_dims self.assertEqual(set(result.keys()), set(['t1', 't2', 'lbft'])) values, dims = result['t1'] self.assertArrayEqual(values, [datetime(1, 1, 1), datetime(11, 1, 1)]) self.assertEqual(dims, (0,)) values, dims = result['t2'] self.assertArrayEqual(values, [[datetime(15, 1, 1), datetime(16, 1, 1)], [datetime(25, 1, 1), datetime(26, 1, 1)]]) self.assertEqual(dims, (0, 1)) values, dims = result['lbft'] self.assertArrayEqual(values, [6, 9]) self.assertEqual(dims, (1,)) def test_blev(self): collation = FieldCollation([_make_field(blev=1), _make_field(blev=2)]) result = collation.element_arrays_and_dims keys = set(['blev', 'brsvd1', 'brsvd2', 'brlev', 'bhrlev', 'lblev', 'bhlev']) self.assertEqual(set(result.keys()), keys) values, dims = result['blev'] self.assertArrayEqual(values, [1, 2]) self.assertEqual(dims, (0,)) def test_bhlev(self): collation = FieldCollation([_make_field(blev=0, bhlev=1), _make_field(blev=1, bhlev=2)]) result = collation.element_arrays_and_dims keys = set(['blev', 'brsvd1', 'brsvd2', 'brlev', 'bhrlev', 'lblev', 'bhlev']) self.assertEqual(set(result.keys()), keys) values, dims = result['bhlev'] self.assertArrayEqual(values, [1, 2]) self.assertEqual(dims, (0,)) class Test__time_comparable_int(tests.IrisTest): def test(self): # Define a list of date-time tuples, which should remain both all # distinct and in ascending order when converted... test_date_tuples = [ # Increment each component in turn to check that all are handled. (2004, 1, 1, 0, 0, 0), (2004, 1, 1, 0, 0, 1), (2004, 1, 1, 0, 1, 0), (2004, 1, 1, 1, 0, 0), (2004, 1, 2, 0, 0, 0), (2004, 2, 1, 0, 0, 0), # Go across 2004-02-29 leap-day, and on to "Feb 31 .. Mar 1". (2004, 2, 27, 0, 0, 0), (2004, 2, 28, 0, 0, 0), (2004, 2, 29, 0, 0, 0), (2004, 2, 30, 0, 0, 0), (2004, 2, 31, 0, 0, 0), (2004, 3, 1, 0, 0, 0), (2005, 1, 1, 0, 0, 0)] collation = FieldCollation(['foo', 'bar']) test_date_ints = [collation._time_comparable_int(*test_tuple) for test_tuple in test_date_tuples] # Check all values are distinct. self.assertEqual(len(test_date_ints), len(set(test_date_ints), )) # Check all values are in order. self.assertEqual(test_date_ints, sorted(test_date_ints)) if __name__ == "__main__": tests.main()
import unittest from ctypes import * import _ctypes_test lib = CDLL(_ctypes_test.__file__) class StringPtrTestCase(unittest.TestCase): def test__POINTER_c_char(self): class X(Structure): _fields_ = [("str", POINTER(c_char))] x = X() # NULL pointer access self.assertRaises(ValueError, getattr, x.str, "contents") b = c_buffer(b"Hello, World") from sys import getrefcount as grc self.assertEqual(grc(b), 2) x.str = b self.assertEqual(grc(b), 3) # POINTER(c_char) and Python string is NOT compatible # POINTER(c_char) and c_buffer() is compatible for i in range(len(b)): self.assertEqual(b[i], x.str[i]) self.assertRaises(TypeError, setattr, x, "str", "Hello, World") def test__c_char_p(self): class X(Structure): _fields_ = [("str", c_char_p)] x = X() # c_char_p and Python string is compatible # c_char_p and c_buffer is NOT compatible self.assertEqual(x.str, None) x.str = b"Hello, World" self.assertEqual(x.str, b"Hello, World") b = c_buffer(b"Hello, World") self.assertRaises(TypeError, setattr, x, b"str", b) def test_functions(self): strchr = lib.my_strchr strchr.restype = c_char_p # c_char_p and Python string is compatible # c_char_p and c_buffer are now compatible strchr.argtypes = c_char_p, c_char self.assertEqual(strchr(b"abcdef", b"c"), b"cdef") self.assertEqual(strchr(c_buffer(b"abcdef"), b"c"), b"cdef") # POINTER(c_char) and Python string is NOT compatible # POINTER(c_char) and c_buffer() is compatible strchr.argtypes = POINTER(c_char), c_char buf = c_buffer(b"abcdef") self.assertEqual(strchr(buf, b"c"), b"cdef") self.assertEqual(strchr(b"abcdef", b"c"), b"cdef") # XXX These calls are dangerous, because the first argument # to strchr is no longer valid after the function returns! # So we must keep a reference to buf separately strchr.restype = POINTER(c_char) buf = c_buffer(b"abcdef") r = strchr(buf, b"c") x = r[0], r[1], r[2], r[3], r[4] self.assertEqual(x, (b"c", b"d", b"e", b"f", b"\000")) del buf # x1 will NOT be the same as x, usually: x1 = r[0], r[1], r[2], r[3], r[4] if __name__ == '__main__': unittest.main()
"""Windows-specific implementation of process utilities. This file is only meant to be imported by process.py, not by end-users. """ from __future__ import print_function import os import sys import ctypes import msvcrt from ctypes import c_int, POINTER from ctypes.wintypes import LPCWSTR, HLOCAL from subprocess import STDOUT from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split from . import py3compat from . import text from .encoding import DEFAULT_ENCODING class AvoidUNCPath(object): """A context manager to protect command execution from UNC paths. In the Win32 API, commands can't be invoked with the cwd being a UNC path. This context manager temporarily changes directory to the 'C:' drive on entering, and restores the original working directory on exit. The context manager returns the starting working directory *if* it made a change and None otherwise, so that users can apply the necessary adjustment to their system calls in the event of a change. Example ------- :: cmd = 'dir' with AvoidUNCPath() as path: if path is not None: cmd = '"pushd %s &&"%s' % (path, cmd) os.system(cmd) """ def __enter__(self): self.path = os.getcwdu() self.is_unc_path = self.path.startswith(r"\\") if self.is_unc_path: # change to c drive (as cmd.exe cannot handle UNC addresses) os.chdir("C:") return self.path else: # We return None to signal that there was no change in the working # directory return None def __exit__(self, exc_type, exc_value, traceback): if self.is_unc_path: os.chdir(self.path) def _find_cmd(cmd): """Find the full path to a .bat or .exe using the win32api module.""" try: from win32api import SearchPath except ImportError: raise ImportError('you need to have pywin32 installed for this to work') else: PATH = os.environ['PATH'] extensions = ['.exe', '.com', '.bat', '.py'] path = None for ext in extensions: try: path = SearchPath(PATH, cmd + ext)[0] except: pass if path is None: raise OSError("command %r not found" % cmd) else: return path def _system_body(p): """Callback for _system.""" enc = DEFAULT_ENCODING for line in read_no_interrupt(p.stdout).splitlines(): line = line.decode(enc, 'replace') print(line, file=sys.stdout) for line in read_no_interrupt(p.stderr).splitlines(): line = line.decode(enc, 'replace') print(line, file=sys.stderr) # Wait to finish for returncode return p.wait() def system(cmd): """Win32 version of os.system() that works with network shares. Note that this implementation returns None, as meant for use in IPython. Parameters ---------- cmd : str A command to be executed in the system shell. Returns ------- None : we explicitly do NOT return the subprocess status code, as this utility is meant to be used extensively in IPython, where any return value would trigger :func:`sys.displayhook` calls. """ # The controller provides interactivity with both # stdin and stdout #import _process_win32_controller #_process_win32_controller.system(cmd) with AvoidUNCPath() as path: if path is not None: cmd = '"pushd %s &&"%s' % (path, cmd) return process_handler(cmd, _system_body) def getoutput(cmd): """Return standard output of executing cmd in a shell. Accepts the same arguments as os.system(). Parameters ---------- cmd : str A command to be executed in the system shell. Returns ------- stdout : str """ with AvoidUNCPath() as path: if path is not None: cmd = '"pushd %s &&"%s' % (path, cmd) out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT) if out is None: out = b'' return py3compat.bytes_to_str(out) try: CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)] CommandLineToArgvW.restype = POINTER(LPCWSTR) LocalFree = ctypes.windll.kernel32.LocalFree LocalFree.res_type = HLOCAL LocalFree.arg_types = [HLOCAL] def arg_split(commandline, posix=False, strict=True): """Split a command line's arguments in a shell-like manner. This is a special version for windows that use a ctypes call to CommandLineToArgvW to do the argv splitting. The posix paramter is ignored. If strict=False, process_common.arg_split(...strict=False) is used instead. """ #CommandLineToArgvW returns path to executable if called with empty string. if commandline.strip() == "": return [] if not strict: # not really a cl-arg, fallback on _process_common return py_arg_split(commandline, posix=posix, strict=strict) argvn = c_int() result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn)) result_array_type = LPCWSTR * argvn.value result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))] retval = LocalFree(result_pointer) return result except AttributeError: arg_split = py_arg_split
from oslo.config import cfg import requests from neutron.api.v2 import attributes as attr from neutron.common import exceptions as exc from neutron.db import portbindings_base from neutron.db import quota_db # noqa from neutron.extensions import external_net from neutron.extensions import portbindings from neutron.extensions import securitygroup from neutron import neutron_plugin_base_v2 from neutron.openstack.common import importutils from neutron.openstack.common import jsonutils as json from neutron.openstack.common import log as logging from simplejson import JSONDecodeError LOG = logging.getLogger(__name__) vnc_opts = [ cfg.StrOpt('api_server_ip', default='127.0.0.1', help='IP address to connect to VNC controller'), cfg.StrOpt('api_server_port', default='8082', help='Port to connect to VNC controller'), cfg.DictOpt('contrail_extensions', default={}, help='Enable Contrail extensions(policy, ipam)'), ] class ContrailNotFoundError(exc.NotFound): message = '%(msg)s' class ContrailConflictError(exc.Conflict): message = '%(msg)s' class ContrailBadRequestError(exc.BadRequest): message = '%(msg)s' class ContrailServiceUnavailableError(exc.ServiceUnavailable): message = '%(msg)s' class ContrailNotAuthorizedError(exc.NotAuthorized): message = '%(msg)s' class InvalidContrailExtensionError(exc.ServiceUnavailable): message = _("Invalid Contrail Extension: %(ext_name) %(ext_class)") CONTRAIL_EXCEPTION_MAP = { requests.codes.not_found: ContrailNotFoundError, requests.codes.conflict: ContrailConflictError, requests.codes.bad_request: ContrailBadRequestError, requests.codes.service_unavailable: ContrailServiceUnavailableError, requests.codes.unauthorized: ContrailNotAuthorizedError, requests.codes.request_timeout: ContrailServiceUnavailableError, } class NeutronPluginContrailCoreV2(neutron_plugin_base_v2.NeutronPluginBaseV2, securitygroup.SecurityGroupPluginBase, portbindings_base.PortBindingBaseMixin, external_net.External_net): supported_extension_aliases = ["security-group", "router", "port-security", "binding", "agent", "quotas", "external-net", "allowed-address-pairs", "extra_dhcp_opt"] PLUGIN_URL_PREFIX = '/neutron' __native_bulk_support = False # patch VIF_TYPES portbindings.__dict__['VIF_TYPE_VROUTER'] = 'vrouter' portbindings.VIF_TYPES.append(portbindings.VIF_TYPE_VROUTER) def _parse_class_args(self): """Parse the contrailplugin.ini file. Opencontrail supports extension such as ipam, policy, these extensions can be configured in the plugin configuration file as shown below. Plugin then loads the specified extensions. contrail_extensions=ipam:<classpath>,policy:<classpath> """ contrail_extensions = cfg.CONF.APISERVER.contrail_extensions # If multiple class specified for same extension, last one will win # according to DictOpt behavior for ext_name, ext_class in contrail_extensions.items(): try: if not ext_class: LOG.error(_('Malformed contrail extension...')) continue self.supported_extension_aliases.append(ext_name) ext_class = importutils.import_class(ext_class) ext_instance = ext_class() ext_instance.set_core(self) for method in dir(ext_instance): for prefix in ['get', 'update', 'delete', 'create']: if method.startswith('%s_' % prefix): setattr(self, method, ext_instance.__getattribute__(method)) except Exception: LOG.exception(_("Contrail Backend Error")) # Converting contrail backend error to Neutron Exception raise InvalidContrailExtensionError( ext_name=ext_name, ext_class=ext_class) #keystone self._authn_token = None if cfg.CONF.auth_strategy == 'keystone': kcfg = cfg.CONF.keystone_authtoken body = '{"auth":{"passwordCredentials":{' body += ' "username": "%s",' % (kcfg.admin_user) body += ' "password": "%s"},' % (kcfg.admin_password) body += ' "tenantName":"%s"}}' % (kcfg.admin_tenant_name) self._authn_body = body self._authn_token = cfg.CONF.keystone_authtoken.admin_token self._keystone_url = "%s://%s:%s%s" % ( cfg.CONF.keystone_authtoken.auth_protocol, cfg.CONF.keystone_authtoken.auth_host, cfg.CONF.keystone_authtoken.auth_port, "/v2.0/tokens") def __init__(self): super(NeutronPluginContrailCoreV2, self).__init__() portbindings_base.register_port_dict_function() cfg.CONF.register_opts(vnc_opts, 'APISERVER') self._parse_class_args() def _get_base_binding_dict(self): binding = { portbindings.VIF_TYPE: portbindings.VIF_TYPE_VROUTER, portbindings.VIF_DETAILS: { # TODO(praneetb): Replace with new VIF security details portbindings.CAP_PORT_FILTER: 'security-group' in self.supported_extension_aliases } } return binding def get_agents(self, context, filters=None, fields=None): # This method is implemented so that horizon is happy return [] def _request_api_server(self, url, data=None, headers=None): # Attempt to post to Api-Server response = requests.post(url, data=data, headers=headers) if (response.status_code == requests.codes.unauthorized): # Get token from keystone and save it for next request response = requests.post(self._keystone_url, data=self._authn_body, headers={'Content-type': 'application/json'}) if (response.status_code == requests.codes.ok): # plan is to re-issue original request with new token auth_headers = headers or {} authn_content = json.loads(response.text) self._authn_token = authn_content['access']['token']['id'] auth_headers['X-AUTH-TOKEN'] = self._authn_token response = self._request_api_server(url, data, auth_headers) else: raise RuntimeError('Authentication Failure') return response def _request_api_server_authn(self, url, data=None, headers=None): authn_headers = headers or {} if self._authn_token is not None: authn_headers['X-AUTH-TOKEN'] = self._authn_token response = self._request_api_server(url, data, headers=authn_headers) return response def _relay_request(self, url_path, data=None): """Send received request to api server.""" url = "http://%s:%s%s" % (cfg.CONF.APISERVER.api_server_ip, cfg.CONF.APISERVER.api_server_port, url_path) return self._request_api_server_authn( url, data=data, headers={'Content-type': 'application/json'}) def _request_backend(self, context, data_dict, obj_name, action): context_dict = self._encode_context(context, action, obj_name) data = json.dumps({'context': context_dict, 'data': data_dict}) url_path = "%s/%s" % (self.PLUGIN_URL_PREFIX, obj_name) response = self._relay_request(url_path, data=data) try: return response.status_code, response.json() except JSONDecodeError: return response.status_code, response.content def _encode_context(self, context, operation, apitype): cdict = {'user_id': getattr(context, 'user_id', ''), 'is_admin': getattr(context, 'is_admin', False), 'operation': operation, 'type': apitype, 'tenant_id': getattr(context, 'tenant_id', None)} if context.roles: cdict['roles'] = context.roles if context.tenant: cdict['tenant'] = context.tenant return cdict def _encode_resource(self, resource_id=None, resource=None, fields=None, filters=None): resource_dict = {} if resource_id: resource_dict['id'] = resource_id if resource: resource_dict['resource'] = resource resource_dict['filters'] = filters resource_dict['fields'] = fields return resource_dict def _prune(self, resource_dict, fields): if fields: return dict(((key, item) for key, item in resource_dict.items() if key in fields)) return resource_dict def _transform_response(self, status_code, info=None, obj_name=None, fields=None): if status_code == requests.codes.ok: if not isinstance(info, list): return self._prune(info, fields) else: return [self._prune(items, fields) for items in info] self._raise_contrail_error(status_code, info, obj_name) def _raise_contrail_error(self, status_code, info, obj_name): if status_code == requests.codes.bad_request: raise ContrailBadRequestError( msg=info['message'], resource=obj_name) error_class = CONTRAIL_EXCEPTION_MAP[status_code] raise error_class(msg=info['message']) def _create_resource(self, res_type, context, res_data): """Create a resource in API server. This method encodes neutron model, and sends it to the contrail api server. """ for key, value in res_data[res_type].items(): if value == attr.ATTR_NOT_SPECIFIED: del res_data[res_type][key] res_dict = self._encode_resource(resource=res_data[res_type]) status_code, res_info = self._request_backend(context, res_dict, res_type, 'CREATE') res_dicts = self._transform_response(status_code, info=res_info, obj_name=res_type) LOG.debug("create_%(res_type)s(): %(res_dicts)s", {'res_type': res_type, 'res_dicts': res_dicts}) return res_dicts def _get_resource(self, res_type, context, id, fields): """Get a resource from API server. This method gets a resource from the contrail api server """ res_dict = self._encode_resource(resource_id=id, fields=fields) status_code, res_info = self._request_backend(context, res_dict, res_type, 'READ') res_dicts = self._transform_response(status_code, info=res_info, fields=fields, obj_name=res_type) LOG.debug("get_%(res_type)s(): %(res_dicts)s", {'res_type': res_type, 'res_dicts': res_dicts}) return res_dicts def _update_resource(self, res_type, context, id, res_data): """Update a resource in API server. This method updates a resource in the contrail api server """ res_dict = self._encode_resource(resource_id=id, resource=res_data[res_type]) status_code, res_info = self._request_backend(context, res_dict, res_type, 'UPDATE') res_dicts = self._transform_response(status_code, info=res_info, obj_name=res_type) LOG.debug("update_%(res_type)s(): %(res_dicts)s", {'res_type': res_type, 'res_dicts': res_dicts}) return res_dicts def _delete_resource(self, res_type, context, id): """Delete a resource in API server This method deletes a resource in the contrail api server """ res_dict = self._encode_resource(resource_id=id) LOG.debug("delete_%(res_type)s(): %(id)s", {'res_type': res_type, 'id': id}) status_code, res_info = self._request_backend(context, res_dict, res_type, 'DELETE') if status_code != requests.codes.ok: self._raise_contrail_error(status_code, info=res_info, obj_name=res_type) def _list_resource(self, res_type, context, filters, fields): res_dict = self._encode_resource(filters=filters, fields=fields) status_code, res_info = self._request_backend(context, res_dict, res_type, 'READALL') res_dicts = self._transform_response(status_code, info=res_info, fields=fields, obj_name=res_type) LOG.debug( "get_%(res_type)s(): filters: %(filters)r data: %(res_dicts)r", {'res_type': res_type, 'filters': filters, 'res_dicts': res_dicts}) return res_dicts def _count_resource(self, res_type, context, filters): res_dict = self._encode_resource(filters=filters) status_code, res_count = self._request_backend(context, res_dict, res_type, 'READCOUNT') LOG.debug("get_%(res_type)s_count(): %(res_count)r", {'res_type': res_type, 'res_count': res_count}) return res_count def _get_network(self, context, id, fields=None): return self._get_resource('network', context, id, fields) def create_network(self, context, network): """Creates a new Virtual Network.""" return self._create_resource('network', context, network) def get_network(self, context, network_id, fields=None): """Get the attributes of a particular Virtual Network.""" return self._get_network(context, network_id, fields) def update_network(self, context, network_id, network): """Updates the attributes of a particular Virtual Network.""" return self._update_resource('network', context, network_id, network) def delete_network(self, context, network_id): """Creates a new Virtual Network. Deletes the network with the specified network identifier belonging to the specified tenant. """ self._delete_resource('network', context, network_id) def get_networks(self, context, filters=None, fields=None): """Get the list of Virtual Networks.""" return self._list_resource('network', context, filters, fields) def get_networks_count(self, context, filters=None): """Get the count of Virtual Network.""" networks_count = self._count_resource('network', context, filters) return networks_count['count'] def create_subnet(self, context, subnet): """Creates a new subnet, and assigns it a symbolic name.""" if subnet['subnet']['gateway_ip'] is None: subnet['subnet']['gateway_ip'] = '0.0.0.0' if subnet['subnet']['host_routes'] != attr.ATTR_NOT_SPECIFIED: if (len(subnet['subnet']['host_routes']) > cfg.CONF.max_subnet_host_routes): raise exc.HostRoutesExhausted(subnet_id=subnet[ 'subnet'].get('id', _('new subnet')), quota=cfg.CONF.max_subnet_host_routes) subnet_created = self._create_resource('subnet', context, subnet) return self._make_subnet_dict(subnet_created) def _make_subnet_dict(self, subnet): if 'gateway_ip' in subnet and subnet['gateway_ip'] == '0.0.0.0': subnet['gateway_ip'] = None return subnet def _get_subnet(self, context, subnet_id, fields=None): subnet = self._get_resource('subnet', context, subnet_id, fields) return self._make_subnet_dict(subnet) def get_subnet(self, context, subnet_id, fields=None): """Get the attributes of a particular subnet.""" return self._get_subnet(context, subnet_id, fields) def update_subnet(self, context, subnet_id, subnet): """Updates the attributes of a particular subnet.""" subnet = self._update_resource('subnet', context, subnet_id, subnet) return self._make_subnet_dict(subnet) def delete_subnet(self, context, subnet_id): """ Deletes the subnet with the specified subnet identifier belonging to the specified tenant. """ self._delete_resource('subnet', context, subnet_id) def get_subnets(self, context, filters=None, fields=None): """Get the list of subnets.""" return [self._make_subnet_dict(s) for s in self._list_resource( 'subnet', context, filters, fields)] def get_subnets_count(self, context, filters=None): """Get the count of subnets.""" subnets_count = self._count_resource('subnet', context, filters) return subnets_count['count'] def _extend_port_dict_security_group(self, port_res, port_db): # Security group bindings will be retrieved from the sqlalchemy # model. As they're loaded eagerly with ports because of the # joined load they will not cause an extra query. port_res[securitygroup.SECURITYGROUPS] = port_db.get( 'security_groups', []) or [] return port_res def _make_port_dict(self, port): return port def _get_port(self, context, id, fields=None): port = self._get_resource('port', context, id, fields) return self._make_port_dict(port) def _update_ips_for_port(self, context, network_id, port_id, original_ips, new_ips): """Add or remove IPs from the port.""" # These ips are still on the port and haven't been removed prev_ips = [] # the new_ips contain all of the fixed_ips that are to be updated if len(new_ips) > cfg.CONF.max_fixed_ips_per_port: msg = _('Exceeded maximim amount of fixed ips per port') raise exc.InvalidInput(error_message=msg) # Remove all of the intersecting elements for original_ip in original_ips[:]: for new_ip in new_ips[:]: if ('ip_address' in new_ip and original_ip['ip_address'] == new_ip['ip_address']): original_ips.remove(original_ip) new_ips.remove(new_ip) prev_ips.append(original_ip) return new_ips, prev_ips def create_port(self, context, port): """Creates a port on the specified Virtual Network.""" port = self._create_resource('port', context, port) return self._make_port_dict(port) def get_port(self, context, port_id, fields=None): """Get the attributes of a particular port.""" return self._get_port(context, port_id, fields) def update_port(self, context, port_id, port): """Updates a port. Updates the attributes of a port on the specified Virtual Network. """ if 'fixed_ips' in port['port']: original = self._get_port(context, port_id) added_ips, prev_ips = self._update_ips_for_port( context, original['network_id'], port_id, original['fixed_ips'], port['port']['fixed_ips']) port['port']['fixed_ips'] = prev_ips + added_ips port = self._update_resource('port', context, port_id, port) return self._make_port_dict(port) def delete_port(self, context, port_id): """Deletes a port. Deletes a port on a specified Virtual Network, if the port contains a remote interface attachment, the remote interface is first un-plugged and then the port is deleted. """ self._delete_resource('port', context, port_id) def get_ports(self, context, filters=None, fields=None): """Get all ports. Retrieves all port identifiers belonging to the specified Virtual Network with the specfied filter. """ return [self._make_port_dict(p) for p in self._list_resource('port', context, filters, fields)] def get_ports_count(self, context, filters=None): """Get the count of ports.""" ports_count = self._count_resource('port', context, filters) return ports_count['count'] # Router API handlers def create_router(self, context, router): """Creates a router. Creates a new Logical Router, and assigns it a symbolic name. """ return self._create_resource('router', context, router) def get_router(self, context, router_id, fields=None): """Get the attributes of a router.""" return self._get_resource('router', context, router_id, fields) def update_router(self, context, router_id, router): """Updates the attributes of a router.""" return self._update_resource('router', context, router_id, router) def delete_router(self, context, router_id): """Deletes a router.""" self._delete_resource('router', context, router_id) def get_routers(self, context, filters=None, fields=None): """Retrieves all router identifiers.""" return self._list_resource('router', context, filters, fields) def get_routers_count(self, context, filters=None): """Get the count of routers.""" routers_count = self._count_resource('router', context, filters) return routers_count['count'] def add_router_interface(self, context, router_id, interface_info): """Add interface to a router.""" if not interface_info: msg = _("Either subnet_id or port_id must be specified") raise exc.BadRequest(resource='router', msg=msg) if 'port_id' in interface_info: if 'subnet_id' in interface_info: msg = _("Cannot specify both subnet-id and port-id") raise exc.BadRequest(resource='router', msg=msg) res_dict = self._encode_resource(resource_id=router_id, resource=interface_info) status_code, res_info = self._request_backend(context, res_dict, 'router', 'ADDINTERFACE') if status_code != requests.codes.ok: self._raise_contrail_error(status_code, info=res_info, obj_name='add_router_interface') return res_info def remove_router_interface(self, context, router_id, interface_info): """Delete interface from a router.""" if not interface_info: msg = _("Either subnet_id or port_id must be specified") raise exc.BadRequest(resource='router', msg=msg) res_dict = self._encode_resource(resource_id=router_id, resource=interface_info) status_code, res_info = self._request_backend(context, res_dict, 'router', 'DELINTERFACE') if status_code != requests.codes.ok: self._raise_contrail_error(status_code, info=res_info, obj_name='remove_router_interface') return res_info # Floating IP API handlers def create_floatingip(self, context, floatingip): """Creates a floating IP.""" return self._create_resource('floatingip', context, floatingip) def update_floatingip(self, context, fip_id, floatingip): """Updates the attributes of a floating IP.""" return self._update_resource('floatingip', context, fip_id, floatingip) def get_floatingip(self, context, fip_id, fields=None): """Get the attributes of a floating ip.""" return self._get_resource('floatingip', context, fip_id, fields) def delete_floatingip(self, context, fip_id): """Deletes a floating IP.""" self._delete_resource('floatingip', context, fip_id) def get_floatingips(self, context, filters=None, fields=None): """Retrieves all floating ips identifiers.""" return self._list_resource('floatingip', context, filters, fields) def get_floatingips_count(self, context, filters=None): """Get the count of floating IPs.""" fips_count = self._count_resource('floatingip', context, filters) return fips_count['count'] # Security Group handlers def create_security_group(self, context, security_group): """Creates a Security Group.""" return self._create_resource('security_group', context, security_group) def get_security_group(self, context, sg_id, fields=None, tenant_id=None): """Get the attributes of a security group.""" return self._get_resource('security_group', context, sg_id, fields) def update_security_group(self, context, sg_id, security_group): """Updates the attributes of a security group.""" return self._update_resource('security_group', context, sg_id, security_group) def delete_security_group(self, context, sg_id): """Deletes a security group.""" self._delete_resource('security_group', context, sg_id) def get_security_groups(self, context, filters=None, fields=None, sorts=None, limit=None, marker=None, page_reverse=False): """Retrieves all security group identifiers.""" return self._list_resource('security_group', context, filters, fields) def get_security_groups_count(self, context, filters=None): return 0 def get_security_group_rules_count(self, context, filters=None): return 0 def create_security_group_rule(self, context, security_group_rule): """Creates a security group rule.""" return self._create_resource('security_group_rule', context, security_group_rule) def delete_security_group_rule(self, context, sg_rule_id): """Deletes a security group rule.""" self._delete_resource('security_group_rule', context, sg_rule_id) def get_security_group_rule(self, context, sg_rule_id, fields=None): """Get the attributes of a security group rule.""" return self._get_resource('security_group_rule', context, sg_rule_id, fields) def get_security_group_rules(self, context, filters=None, fields=None, sorts=None, limit=None, marker=None, page_reverse=False): """Retrieves all security group rules.""" return self._list_resource('security_group_rule', context, filters, fields)
"""Tests for tensorflow.ops.tf.gather.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import gradients_impl from tensorflow.python.platform import test _TEST_TYPES = (dtypes.int64, dtypes.float32, dtypes.complex64, dtypes.complex128) class GatherTest(test.TestCase): def _buildParams(self, data, dtype): data = data.astype(dtype.as_numpy_dtype) # For complex types, add an index-dependent imaginary component so we can # tell we got the right value. if dtype.is_complex: return data + 10j * data return data def testScalar1D(self): with self.cached_session(use_gpu=True): data = np.array([0, 1, 2, 3, 7, 5]) for dtype in _TEST_TYPES: for indices in 4, [1, 2, 2, 4, 5]: params_np = self._buildParams(data, dtype) params = constant_op.constant(params_np) indices_tf = constant_op.constant(indices) gather_t = array_ops.gather(params, indices_tf) gather_val = gather_t.eval() np_val = params_np[indices] self.assertAllEqual(np_val, gather_val) self.assertEqual(np_val.shape, gather_t.get_shape()) def testScalar2D(self): with self.session(use_gpu=True): data = np.array([[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], [12, 13, 14]]) for dtype in _TEST_TYPES: for axis in range(data.ndim): params_np = self._buildParams(data, dtype) params = constant_op.constant(params_np) indices = constant_op.constant(2) gather_t = array_ops.gather(params, indices, axis=axis) gather_val = gather_t.eval() self.assertAllEqual(np.take(params_np, 2, axis=axis), gather_val) expected_shape = data.shape[:axis] + data.shape[axis + 1:] self.assertEqual(expected_shape, gather_t.get_shape()) def testSimpleTwoD32(self): with self.session(use_gpu=True): data = np.array([[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], [12, 13, 14]]) for dtype in _TEST_TYPES: for axis in range(data.ndim): params_np = self._buildParams(data, dtype) params = constant_op.constant(params_np) # The indices must be in bounds for any axis. indices = constant_op.constant([0, 1, 0, 2]) gather_t = array_ops.gather(params, indices, axis=axis) gather_val = gather_t.eval() self.assertAllEqual(np.take(params_np, [0, 1, 0, 2], axis=axis), gather_val) expected_shape = data.shape[:axis] + (4,) + data.shape[axis + 1:] self.assertEqual(expected_shape, gather_t.get_shape()) def testHigherRank(self): # We check that scalar and empty indices shapes work as well shape = (2, 1, 3, 2) for indices_shape in (), (0,), (2, 0), (2, 3): for dtype in _TEST_TYPES: for axis in range(len(shape)): params = self._buildParams(np.random.randn(*shape), dtype) indices = np.random.randint(shape[axis], size=indices_shape) with self.cached_session(use_gpu=True) as sess: tf_params = constant_op.constant(params) tf_indices = constant_op.constant(indices) # Check that both positive and negative indices for axis work. tf_axis = constant_op.constant(axis) tf_negative_axis = constant_op.constant(-len(shape) + axis) gather = array_ops.gather(tf_params, tf_indices, axis=tf_axis) gather_negative_axis = array_ops.gather( tf_params, tf_indices, axis=tf_negative_axis) gather_value, gather_negative_axis_value = sess.run( [gather, gather_negative_axis]) gather_np = np.take(params, indices, axis) self.assertAllEqual(gather_np, gather_value) self.assertAllEqual(gather_np, gather_negative_axis_value) expected_shape = (params.shape[:axis] + indices.shape + params.shape[axis + 1:]) self.assertEqual(expected_shape, gather.shape) self.assertEqual(expected_shape, gather_negative_axis.shape) # Test gradients gather_grad = np.random.randn( *gather.get_shape().as_list()).astype(dtype.as_numpy_dtype) if dtype.is_complex: gather_grad -= 1j * gather_grad params_grad, indices_grad, axis_grad = gradients_impl.gradients( gather, [tf_params, tf_indices, tf_axis], gather_grad) self.assertEqual(indices_grad, None) self.assertEqual(axis_grad, None) if dtype.is_integer: self.assertEqual(params_grad, None) continue # For axis 0, we are able to create an efficient IndexedSlices for # the gradient. if axis == 0: self.assertEqual(type(params_grad), ops.IndexedSlices) params_grad = ops.convert_to_tensor(params_grad) correct_params_grad = np.zeros(shape).astype(dtype.as_numpy_dtype) outer_dims = axis inner_dims = len(shape) - axis - 1 gather_grad = gather_grad.reshape( shape[:axis] + (indices.size,) + shape[axis + 1:]) for source_index, dest_index in enumerate(indices.flat): dest_slice = ((slice(None),) * outer_dims + (dest_index,) + (slice(None),) * inner_dims) source_slice = ((slice(None),) * outer_dims + (source_index,) + (slice(None),) * inner_dims) correct_params_grad[dest_slice] += gather_grad[source_slice] self.assertAllClose(correct_params_grad, params_grad.eval(), atol=2e-6, rtol=2e-6) def testString(self): params = np.array([[b"asdf", b"zxcv"], [b"qwer", b"uiop"]]) with self.cached_session(): self.assertAllEqual([b"qwer", b"uiop"], array_ops.gather(params, 1, axis=0).eval()) self.assertAllEqual([b"asdf", b"qwer"], array_ops.gather(params, 0, axis=1).eval()) def testUInt32AndUInt64(self): for unsigned_type in (dtypes.uint32, dtypes.uint64): params = self._buildParams( np.array([[1, 2, 3], [7, 8, 9]]), unsigned_type) with self.cached_session(): self.assertAllEqual([7, 8, 9], array_ops.gather(params, 1, axis=0).eval()) self.assertAllEqual([1, 7], array_ops.gather(params, 0, axis=1).eval()) def testUnknownIndices(self): params = constant_op.constant([[0, 1, 2]]) indices = array_ops.placeholder(dtypes.int32) gather_t = array_ops.gather(params, indices) self.assertEqual(None, gather_t.get_shape()) def testUnknownAxis(self): params = constant_op.constant([[0, 1, 2]]) indices = constant_op.constant([[0, 0], [0, 0]]) axis = array_ops.placeholder(dtypes.int32) gather_t = array_ops.gather(params, indices, axis=axis) # Rank 2 params with rank 2 indices results in a rank 3 shape. self.assertEqual([None, None, None], gather_t.shape.as_list()) # If indices is also unknown the result rank is unknown. indices = array_ops.placeholder(dtypes.int32) gather_t = array_ops.gather(params, indices, axis=axis) self.assertEqual(None, gather_t.shape) def testBadIndicesCPU(self): with self.session(use_gpu=False): params = [[0, 1, 2], [3, 4, 5]] with self.assertRaisesOpError(r"indices\[0,0\] = 7 is not in \[0, 2\)"): array_ops.gather(params, [[7]], axis=0).eval() with self.assertRaisesOpError(r"indices\[0,0\] = 7 is not in \[0, 3\)"): array_ops.gather(params, [[7]], axis=1).eval() def _disabledTestBadIndicesGPU(self): # TODO disabled due to different behavior on GPU and CPU # On GPU the bad indices do not raise error but fetch 0 values if not test.is_gpu_available(): return with self.session(use_gpu=True): params = [[0, 1, 2], [3, 4, 5]] with self.assertRaisesOpError(r"indices\[0,0\] = 7 is not in \[0, 2\)"): array_ops.gather(params, [[7]], axis=0).eval() with self.assertRaisesOpError(r"indices\[0,0\] = 7 is not in \[0, 3\)"): array_ops.gather(params, [[7]], axis=1).eval() def testBadAxis(self): with self.session(use_gpu=True): params = [0, 1, 2] params_ph = array_ops.placeholder(dtypes.int32) indices = 0 for bad_axis in (1, 2, -2): # Shape inference can validate axis for known params rank. with self.assertRaisesWithPredicateMatch( ValueError, "Shape must be at least rank . but is rank 1"): array_ops.gather(params, indices, axis=bad_axis) # If params rank is unknown, an op error occurs. with self.assertRaisesOpError( r"Expected axis in the range \[-1, 1\), but got %s" % bad_axis): array_ops.gather(params_ph, indices, axis=bad_axis).eval( feed_dict={params_ph: params}) def testEmptySlices(self): with self.session(use_gpu=True): for dtype in _TEST_TYPES: for itype in np.int32, np.int64: # Leading axis gather. params = np.zeros((7, 0, 0), dtype=dtype.as_numpy_dtype) indices = np.array([3, 4], dtype=itype) gather = array_ops.gather(params, indices, axis=0) self.assertAllEqual(gather.eval(), np.zeros((2, 0, 0))) # Middle axis gather. params = np.zeros((0, 7, 0), dtype=dtype.as_numpy_dtype) gather = array_ops.gather(params, indices, axis=1) self.assertAllEqual(gather.eval(), np.zeros((0, 2, 0))) # Trailing axis gather. params = np.zeros((0, 0, 7), dtype=dtype.as_numpy_dtype) gather = array_ops.gather(params, indices, axis=2) self.assertAllEqual(gather.eval(), np.zeros((0, 0, 2))) if __name__ == "__main__": test.main()
import unittest from enum import Enum from airflow.utils.weekday import WeekDay class TestWeekDay(unittest.TestCase): def test_weekday_enum_length(self): assert len(WeekDay) == 7 def test_weekday_name_value(self): weekdays = "MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY SUNDAY" weekdays = weekdays.split() for i, weekday in enumerate(weekdays, start=1): weekday_enum = WeekDay(i) assert weekday_enum == i assert int(weekday_enum) == i assert weekday_enum.name == weekday assert weekday_enum in WeekDay assert 0 < weekday_enum < 8 assert isinstance(weekday_enum, WeekDay) assert isinstance(weekday_enum, int) assert isinstance(weekday_enum, Enum)
from tempest import clients from tempest.common.utils.data_utils import rand_name import tempest.test class BaseIdentityAdminTest(tempest.test.BaseTestCase): @classmethod def setUpClass(cls): super(BaseIdentityAdminTest, cls).setUpClass() os = clients.AdminManager(interface=cls._interface) cls.client = os.identity_client cls.token_client = os.token_client cls.endpoints_client = os.endpoints_client cls.v3_client = os.identity_v3_client cls.service_client = os.service_client cls.policy_client = os.policy_client cls.v3_token = os.token_v3_client cls.creds_client = os.credentials_client if not cls.client.has_admin_extensions(): raise cls.skipException("Admin extensions disabled") cls.data = DataGenerator(cls.client) cls.v3data = DataGenerator(cls.v3_client) os = clients.Manager(interface=cls._interface) cls.non_admin_client = os.identity_client cls.v3_non_admin_client = os.identity_v3_client @classmethod def tearDownClass(cls): cls.data.teardown_all() cls.v3data.teardown_all() super(BaseIdentityAdminTest, cls).tearDownClass() def disable_user(self, user_name): user = self.get_user_by_name(user_name) self.client.enable_disable_user(user['id'], False) def disable_tenant(self, tenant_name): tenant = self.get_tenant_by_name(tenant_name) self.client.update_tenant(tenant['id'], enabled=False) def get_user_by_name(self, name): _, users = self.client.get_users() user = [u for u in users if u['name'] == name] if len(user) > 0: return user[0] def get_tenant_by_name(self, name): _, tenants = self.client.list_tenants() tenant = [t for t in tenants if t['name'] == name] if len(tenant) > 0: return tenant[0] def get_role_by_name(self, name): _, roles = self.client.list_roles() role = [r for r in roles if r['name'] == name] if len(role) > 0: return role[0] class DataGenerator(object): def __init__(self, client): self.client = client self.users = [] self.tenants = [] self.roles = [] self.role_name = None self.v3_users = [] self.projects = [] self.v3_roles = [] def setup_test_user(self): """Set up a test user.""" self.setup_test_tenant() self.test_user = rand_name('test_user_') self.test_password = rand_name('pass_') self.test_email = self.test_user + '@testmail.tm' resp, self.user = self.client.create_user(self.test_user, self.test_password, self.tenant['id'], self.test_email) self.users.append(self.user) def setup_test_tenant(self): """Set up a test tenant.""" self.test_tenant = rand_name('test_tenant_') self.test_description = rand_name('desc_') resp, self.tenant = self.client.create_tenant( name=self.test_tenant, description=self.test_description) self.tenants.append(self.tenant) def setup_test_role(self): """Set up a test role.""" self.test_role = rand_name('role') resp, self.role = self.client.create_role(self.test_role) self.roles.append(self.role) def setup_test_v3_user(self): """Set up a test v3 user.""" self.setup_test_project() self.test_user = rand_name('test_user_') self.test_password = rand_name('pass_') self.test_email = self.test_user + '@testmail.tm' resp, self.v3_user = self.client.create_user(self.test_user, self.test_password, self.project['id'], self.test_email) self.v3_users.append(self.v3_user) def setup_test_project(self): """Set up a test project.""" self.test_project = rand_name('test_project_') self.test_description = rand_name('desc_') resp, self.project = self.client.create_project( name=self.test_project, description=self.test_description) self.projects.append(self.project) def setup_test_v3_role(self): """Set up a test v3 role.""" self.test_role = rand_name('role') resp, self.v3_role = self.client.create_role(self.test_role) self.v3_roles.append(self.v3_role) def teardown_all(self): for user in self.users: self.client.delete_user(user['id']) for tenant in self.tenants: self.client.delete_tenant(tenant['id']) for role in self.roles: self.client.delete_role(role['id']) for v3_user in self.v3_users: self.client.delete_user(v3_user['id']) for v3_project in self.projects: self.client.delete_project(v3_project['id']) for v3_role in self.v3_roles: self.client.delete_role(v3_role['id'])
"""Support for NX584 alarm control panels.""" import logging from nx584 import client import requests import voluptuous as vol import homeassistant.components.alarm_control_panel as alarm from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PORT, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_HOST = "localhost" DEFAULT_NAME = "NX584" DEFAULT_PORT = 5007 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the NX584 platform.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) port = config.get(CONF_PORT) url = f"http://{host}:{port}" try: add_entities([NX584Alarm(hass, url, name)]) except requests.exceptions.ConnectionError as ex: _LOGGER.error("Unable to connect to NX584: %s", str(ex)) return class NX584Alarm(alarm.AlarmControlPanel): """Representation of a NX584-based alarm panel.""" def __init__(self, hass, url, name): """Init the nx584 alarm panel.""" self._hass = hass self._name = name self._url = url self._alarm = client.Client(self._url) # Do an initial list operation so that we will try to actually # talk to the API and trigger a requests exception for setup_platform() # to catch self._alarm.list_zones() self._state = None @property def name(self): """Return the name of the device.""" return self._name @property def code_format(self): """Return one or more digits/characters.""" return alarm.FORMAT_NUMBER @property def state(self): """Return the state of the device.""" return self._state @property def supported_features(self) -> int: """Return the list of supported features.""" return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY def update(self): """Process new events from panel.""" try: part = self._alarm.list_partitions()[0] zones = self._alarm.list_zones() except requests.exceptions.ConnectionError as ex: _LOGGER.error( "Unable to connect to %(host)s: %(reason)s", dict(host=self._url, reason=ex), ) self._state = None zones = [] except IndexError: _LOGGER.error("NX584 reports no partitions") self._state = None zones = [] bypassed = False for zone in zones: if zone["bypassed"]: _LOGGER.debug( "Zone %(zone)s is bypassed, assuming HOME", dict(zone=zone["number"]), ) bypassed = True break if not part["armed"]: self._state = STATE_ALARM_DISARMED elif bypassed: self._state = STATE_ALARM_ARMED_HOME else: self._state = STATE_ALARM_ARMED_AWAY for flag in part["condition_flags"]: if flag == "Siren on": self._state = STATE_ALARM_TRIGGERED def alarm_disarm(self, code=None): """Send disarm command.""" self._alarm.disarm(code) def alarm_arm_home(self, code=None): """Send arm home command.""" self._alarm.arm("stay") def alarm_arm_away(self, code=None): """Send arm away command.""" self._alarm.arm("exit")
import sys def tokens(nodes): for i in range(0, nodes): print (i * (2 ** 127 - 1) / nodes) tokens(int(sys.argv[1]))
""" Tests for swift.common.compressing_file_reader """ import unittest import cStringIO from slogging.compressing_file_reader import CompressingFileReader class TestCompressingFileReader(unittest.TestCase): def test_read(self): plain = 'obj\ndata' s = cStringIO.StringIO(plain) expected = '\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\xcaO\xca\xe2JI,'\ 'I\x04\x00\x00\x00\xff\xff\x03\x00P(\xa8\x1f\x08\x00\x00'\ '\x00' x = CompressingFileReader(s) compressed = ''.join(iter(lambda: x.read(), '')) self.assertEquals(compressed, expected) self.assertEquals(x.read(), '')
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. from clr import AddReference AddReference("System") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Common") AddReference("QuantConnect.Indicators") from System import * from QuantConnect import * from QuantConnect.Indicators import * from QuantConnect.Data import * from QuantConnect.Data.Market import * from QuantConnect.Data.Custom import * from QuantConnect.Algorithm import * from QuantConnect.Python import PythonQuandl class CustomDataIndicatorExtensionsAlgorithm(QCAlgorithm): # Initialize the data and resolution you require for your strategy def Initialize(self): self.SetStartDate(2014,1,1) self.SetEndDate(2018,1,1) self.SetCash(25000) self.vix = 'CBOE/VIX' self.vxv = 'CBOE/VXV' # Define the symbol and "type" of our generic data self.AddData(QuandlVix, self.vix, Resolution.Daily) self.AddData(Quandl, self.vxv, Resolution.Daily) # Set up default Indicators, these are just 'identities' of the closing price self.vix_sma = self.SMA(self.vix, 1, Resolution.Daily) self.vxv_sma = self.SMA(self.vxv, 1, Resolution.Daily) # This will create a new indicator whose value is smaVXV / smaVIX self.ratio = IndicatorExtensions.Over(self.vxv_sma, self.vix_sma) # Plot indicators each time they update using the PlotIndicator function self.PlotIndicator("Ratio", self.ratio) self.PlotIndicator("Data", self.vix_sma, self.vxv_sma) # OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here. def OnData(self, data): # Wait for all indicators to fully initialize if not (self.vix_sma.IsReady and self.vxv_sma.IsReady and self.ratio.IsReady): return if not self.Portfolio.Invested and self.ratio.Current.Value > 1: self.MarketOrder(self.vix, 100) elif self.ratio.Current.Value < 1: self.Liquidate() class QuandlVix(PythonQuandl): def __init__(self): self.ValueColumnName = "VIX Close"
"""AFF4 object representing client stats.""" from grr.lib import aff4 from grr.lib import rdfvalue from grr.lib.aff4_objects import standard class ClientStats(standard.VFSDirectory): """A container for all client statistics.""" class SchemaCls(standard.VFSDirectory.SchemaCls): STATS = aff4.Attribute("aff4:stats", rdfvalue.ClientStats, "Client Stats.", "Client stats")
""" WSGI config for mysite project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os, sys from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evewspace.settings") application = get_wsgi_application()
from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks from autobahn.wamp.types import CallResult from autobahn.twisted.wamp import ApplicationSession class Component(ApplicationSession): """ Application component that provides procedures which return complex results. """ def onConnect(self): self.join("realm1") def onJoin(self, details): def add_complex(a, ai, b, bi): return CallResult(c = a + b, ci = ai + bi) self.register(add_complex, 'com.myapp.add_complex') def split_name(fullname): forename, surname = fullname.split() return CallResult(forename, surname) self.register(split_name, 'com.myapp.split_name')
import collections import copy import sys import uuid import eventlet import mock from oslo_config import cfg import oslo_messaging import testtools from neutron.agent.common import config from neutron.agent.dhcp import agent as dhcp_agent from neutron.agent.dhcp import config as dhcp_config from neutron.agent import dhcp_agent as entry from neutron.agent.linux import dhcp from neutron.agent.linux import interface from neutron.common import config as common_config from neutron.common import constants as const from neutron.common import exceptions from neutron.common import utils from neutron import context from neutron.tests import base HOSTNAME = 'hostname' dev_man = dhcp.DeviceManager rpc_api = dhcp_agent.DhcpPluginApi DEVICE_MANAGER = '%s.%s' % (dev_man.__module__, dev_man.__name__) DHCP_PLUGIN = '%s.%s' % (rpc_api.__module__, rpc_api.__name__) fake_tenant_id = 'aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa' fake_subnet1_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.9.2', end='172.9.9.254')) fake_subnet1 = dhcp.DictModel(dict(id='bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb', network_id='12345678-1234-5678-1234567890ab', cidr='172.9.9.0/24', enable_dhcp=True, name='', tenant_id=fake_tenant_id, gateway_ip='172.9.9.1', host_routes=[], dns_nameservers=[], ip_version=4, ipv6_ra_mode=None, ipv6_address_mode=None, allocation_pools=fake_subnet1_allocation_pools)) fake_subnet2_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.8.2', end='172.9.8.254')) fake_subnet2 = dhcp.DictModel(dict(id='dddddddd-dddd-dddd-dddddddddddd', network_id='12345678-1234-5678-1234567890ab', cidr='172.9.8.0/24', enable_dhcp=False, name='', tenant_id=fake_tenant_id, gateway_ip='172.9.8.1', host_routes=[], dns_nameservers=[], ip_version=4, allocation_pools=fake_subnet2_allocation_pools)) fake_subnet3 = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb', network_id='12345678-1234-5678-1234567890ab', cidr='192.168.1.1/24', enable_dhcp=True)) fake_ipv6_subnet = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb', network_id='12345678-1234-5678-1234567890ab', cidr='2001:0db8::0/64', enable_dhcp=True, tenant_id=fake_tenant_id, gateway_ip='2001:0db8::1', ip_version=6, ipv6_ra_mode='slaac', ipv6_address_mode=None)) fake_meta_subnet = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb', network_id='12345678-1234-5678-1234567890ab', cidr='169.254.169.252/30', gateway_ip='169.254.169.253', enable_dhcp=True)) fake_fixed_ip1 = dhcp.DictModel(dict(id='', subnet_id=fake_subnet1.id, ip_address='172.9.9.9')) fake_fixed_ip2 = dhcp.DictModel(dict(id='', subnet_id=fake_subnet1.id, ip_address='172.9.9.10')) fake_fixed_ipv6 = dhcp.DictModel(dict(id='', subnet_id=fake_ipv6_subnet.id, ip_address='2001:db8::a8bb:ccff:fedd:ee99')) fake_meta_fixed_ip = dhcp.DictModel(dict(id='', subnet=fake_meta_subnet, ip_address='169.254.169.254')) fake_allocation_pool_subnet1 = dhcp.DictModel(dict(id='', start='172.9.9.2', end='172.9.9.254')) fake_port1 = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab', device_id='dhcp-12345678-1234-aaaa-1234567890ab', device_owner='', allocation_pools=fake_subnet1_allocation_pools, mac_address='aa:bb:cc:dd:ee:ff', network_id='12345678-1234-5678-1234567890ab', fixed_ips=[fake_fixed_ip1])) fake_dhcp_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-123456789022', device_id='dhcp-12345678-1234-aaaa-123456789022', device_owner='network:dhcp', allocation_pools=fake_subnet1_allocation_pools, mac_address='aa:bb:cc:dd:ee:22', network_id='12345678-1234-5678-1234567890ab', fixed_ips=[fake_fixed_ip2])) fake_port2 = dhcp.DictModel(dict(id='12345678-1234-aaaa-123456789000', device_id='dhcp-12345678-1234-aaaa-123456789000', device_owner='', mac_address='aa:bb:cc:dd:ee:99', network_id='12345678-1234-5678-1234567890ab', fixed_ips=[fake_fixed_ip2])) fake_ipv6_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-123456789000', device_owner='', mac_address='aa:bb:cc:dd:ee:99', network_id='12345678-1234-5678-1234567890ab', fixed_ips=[fake_fixed_ipv6])) fake_meta_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab', mac_address='aa:bb:cc:dd:ee:ff', network_id='12345678-1234-5678-1234567890ab', device_owner=const.DEVICE_OWNER_ROUTER_INTF, device_id='forzanapoli', fixed_ips=[fake_meta_fixed_ip])) fake_meta_dvr_port = dhcp.DictModel(fake_meta_port.copy()) fake_meta_dvr_port.device_owner = const.DEVICE_OWNER_DVR_INTERFACE fake_dist_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab', mac_address='aa:bb:cc:dd:ee:ff', network_id='12345678-1234-5678-1234567890ab', device_owner=const.DEVICE_OWNER_DVR_INTERFACE, device_id='forzanapoli', fixed_ips=[fake_meta_fixed_ip])) FAKE_NETWORK_UUID = '12345678-1234-5678-1234567890ab' FAKE_NETWORK_DHCP_NS = "qdhcp-%s" % FAKE_NETWORK_UUID fake_network = dhcp.NetModel(True, dict(id=FAKE_NETWORK_UUID, tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_subnet1, fake_subnet2], ports=[fake_port1])) fake_network_ipv6 = dhcp.NetModel(True, dict( id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_ipv6_subnet], ports=[fake_ipv6_port])) fake_network_ipv6_ipv4 = dhcp.NetModel(True, dict( id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_ipv6_subnet, fake_subnet1], ports=[fake_port1])) isolated_network = dhcp.NetModel( True, dict( id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_subnet1], ports=[fake_port1])) nonisolated_dist_network = dhcp.NetModel( True, dict( id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_subnet1], ports=[fake_port1, fake_port2])) empty_network = dhcp.NetModel( True, dict( id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_subnet1], ports=[])) fake_meta_network = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_meta_subnet], ports=[fake_meta_port])) fake_meta_dvr_network = dhcp.NetModel(True, fake_meta_network.copy()) fake_meta_dvr_network.ports = [fake_meta_dvr_port] fake_dist_network = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[fake_meta_subnet], ports=[fake_meta_port, fake_dist_port])) fake_down_network = dhcp.NetModel( True, dict(id='12345678-dddd-dddd-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=False, subnets=[], ports=[])) class TestDhcpAgent(base.BaseTestCase): def setUp(self): super(TestDhcpAgent, self).setUp() entry.register_options(cfg.CONF) cfg.CONF.set_override('interface_driver', 'neutron.agent.linux.interface.NullDriver') # disable setting up periodic state reporting cfg.CONF.set_override('report_interval', 0, 'AGENT') self.driver_cls_p = mock.patch( 'neutron.agent.dhcp.agent.importutils.import_class') self.driver = mock.Mock(name='driver') self.driver.existing_dhcp_networks.return_value = [] self.driver_cls = self.driver_cls_p.start() self.driver_cls.return_value = self.driver self.mock_makedirs_p = mock.patch("os.makedirs") self.mock_makedirs = self.mock_makedirs_p.start() def test_init_host(self): dhcp = dhcp_agent.DhcpAgent(HOSTNAME) with mock.patch.object(dhcp, 'sync_state') as sync_state: dhcp.init_host() sync_state.assert_called_once_with() def test_dhcp_agent_manager(self): state_rpc_str = 'neutron.agent.rpc.PluginReportStateAPI' # sync_state is needed for this test cfg.CONF.set_override('report_interval', 1, 'AGENT') with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport, 'sync_state', autospec=True) as mock_sync_state: with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport, 'periodic_resync', autospec=True) as mock_periodic_resync: with mock.patch(state_rpc_str) as state_rpc: with mock.patch.object(sys, 'argv') as sys_argv: sys_argv.return_value = [ 'dhcp', '--config-file', base.etcdir('neutron.conf')] cfg.CONF.register_opts(dhcp_config.DHCP_AGENT_OPTS) config.register_interface_driver_opts_helper(cfg.CONF) config.register_agent_state_opts_helper(cfg.CONF) cfg.CONF.register_opts(interface.OPTS) common_config.init(sys.argv[1:]) agent_mgr = dhcp_agent.DhcpAgentWithStateReport( 'testhost') eventlet.greenthread.sleep(1) agent_mgr.after_start() mock_sync_state.assert_called_once_with(agent_mgr) mock_periodic_resync.assert_called_once_with(agent_mgr) state_rpc.assert_has_calls( [mock.call(mock.ANY), mock.call().report_state(mock.ANY, mock.ANY, mock.ANY)]) def test_dhcp_agent_main_agent_manager(self): logging_str = 'neutron.agent.common.config.setup_logging' launcher_str = 'oslo_service.service.ServiceLauncher' with mock.patch(logging_str): with mock.patch.object(sys, 'argv') as sys_argv: with mock.patch(launcher_str) as launcher: sys_argv.return_value = ['dhcp', '--config-file', base.etcdir('neutron.conf')] entry.main() launcher.assert_has_calls( [mock.call(cfg.CONF), mock.call().launch_service(mock.ANY), mock.call().wait()]) def test_run_completes_single_pass(self): with mock.patch(DEVICE_MANAGER): dhcp = dhcp_agent.DhcpAgent(HOSTNAME) attrs_to_mock = dict( [(a, mock.DEFAULT) for a in ['sync_state', 'periodic_resync']]) with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks: dhcp.run() mocks['sync_state'].assert_called_once_with() mocks['periodic_resync'].assert_called_once_with() def test_call_driver(self): network = mock.Mock() network.id = '1' dhcp = dhcp_agent.DhcpAgent(cfg.CONF) self.assertTrue(dhcp.call_driver('foo', network)) self.driver.assert_called_once_with(cfg.CONF, mock.ANY, mock.ANY, mock.ANY, mock.ANY) def _test_call_driver_failure(self, exc=None, trace_level='exception', expected_sync=True): network = mock.Mock() network.id = '1' self.driver.return_value.foo.side_effect = exc or Exception with mock.patch.object(dhcp_agent.LOG, trace_level) as log: dhcp = dhcp_agent.DhcpAgent(HOSTNAME) with mock.patch.object(dhcp, 'schedule_resync') as schedule_resync: self.assertIsNone(dhcp.call_driver('foo', network)) self.driver.assert_called_once_with(cfg.CONF, mock.ANY, mock.ANY, mock.ANY, mock.ANY) self.assertEqual(log.call_count, 1) self.assertEqual(expected_sync, schedule_resync.called) def test_call_driver_ip_address_generation_failure(self): error = oslo_messaging.RemoteError( exc_type='IpAddressGenerationFailure') self._test_call_driver_failure(exc=error, expected_sync=False) def test_call_driver_failure(self): self._test_call_driver_failure() def test_call_driver_remote_error_net_not_found(self): self._test_call_driver_failure( exc=oslo_messaging.RemoteError(exc_type='NetworkNotFound'), trace_level='warning') def test_call_driver_network_not_found(self): self._test_call_driver_failure( exc=exceptions.NetworkNotFound(net_id='1'), trace_level='warning') def test_call_driver_conflict(self): self._test_call_driver_failure( exc=exceptions.Conflict(), trace_level='warning', expected_sync=False) def _test_sync_state_helper(self, known_net_ids, active_net_ids): active_networks = set(mock.Mock(id=netid) for netid in active_net_ids) with mock.patch(DHCP_PLUGIN) as plug: mock_plugin = mock.Mock() mock_plugin.get_active_networks_info.return_value = active_networks plug.return_value = mock_plugin dhcp = dhcp_agent.DhcpAgent(HOSTNAME) attrs_to_mock = dict([(a, mock.DEFAULT) for a in ['disable_dhcp_helper', 'cache', 'safe_configure_dhcp_for_network']]) with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks: mocks['cache'].get_network_ids.return_value = known_net_ids dhcp.sync_state() diff = set(known_net_ids) - set(active_net_ids) exp_disable = [mock.call(net_id) for net_id in diff] mocks['cache'].assert_has_calls([mock.call.get_network_ids()]) mocks['disable_dhcp_helper'].assert_has_calls(exp_disable) def test_sync_state_initial(self): self._test_sync_state_helper([], ['a']) def test_sync_state_same(self): self._test_sync_state_helper(['a'], ['a']) def test_sync_state_disabled_net(self): self._test_sync_state_helper(['b'], ['a']) def test_sync_state_waitall(self): with mock.patch.object(dhcp_agent.eventlet.GreenPool, 'waitall') as w: active_net_ids = ['1', '2', '3', '4', '5'] known_net_ids = ['1', '2', '3', '4', '5'] self._test_sync_state_helper(known_net_ids, active_net_ids) w.assert_called_once_with() def test_sync_state_plugin_error(self): with mock.patch(DHCP_PLUGIN) as plug: mock_plugin = mock.Mock() mock_plugin.get_active_networks_info.side_effect = Exception plug.return_value = mock_plugin with mock.patch.object(dhcp_agent.LOG, 'exception') as log: dhcp = dhcp_agent.DhcpAgent(HOSTNAME) with mock.patch.object(dhcp, 'schedule_resync') as schedule_resync: dhcp.sync_state() self.assertTrue(log.called) self.assertTrue(schedule_resync.called) def test_periodic_resync(self): dhcp = dhcp_agent.DhcpAgent(HOSTNAME) with mock.patch.object(dhcp_agent.eventlet, 'spawn') as spawn: dhcp.periodic_resync() spawn.assert_called_once_with(dhcp._periodic_resync_helper) def test_periodic_resync_helper(self): with mock.patch.object(dhcp_agent.eventlet, 'sleep') as sleep: dhcp = dhcp_agent.DhcpAgent(HOSTNAME) resync_reasons = collections.OrderedDict( (('a', 'reason1'), ('b', 'reason2'))) dhcp.needs_resync_reasons = resync_reasons with mock.patch.object(dhcp, 'sync_state') as sync_state: sync_state.side_effect = RuntimeError with testtools.ExpectedException(RuntimeError): dhcp._periodic_resync_helper() sync_state.assert_called_once_with(resync_reasons.keys()) sleep.assert_called_once_with(dhcp.conf.resync_interval) self.assertEqual(len(dhcp.needs_resync_reasons), 0) def test_populate_cache_on_start_without_active_networks_support(self): # emul dhcp driver that doesn't support retrieving of active networks self.driver.existing_dhcp_networks.side_effect = NotImplementedError with mock.patch.object(dhcp_agent.LOG, 'debug') as log: dhcp = dhcp_agent.DhcpAgent(HOSTNAME) self.driver.existing_dhcp_networks.assert_called_once_with( dhcp.conf, ) self.assertFalse(dhcp.cache.get_network_ids()) self.assertTrue(log.called) def test_populate_cache_on_start(self): networks = ['aaa', 'bbb'] self.driver.existing_dhcp_networks.return_value = networks dhcp = dhcp_agent.DhcpAgent(HOSTNAME) self.driver.existing_dhcp_networks.assert_called_once_with( dhcp.conf, ) self.assertEqual(set(networks), set(dhcp.cache.get_network_ids())) def test_none_interface_driver(self): cfg.CONF.set_override('interface_driver', None) self.assertRaises(SystemExit, dhcp.DeviceManager, cfg.CONF, None) def test_nonexistent_interface_driver(self): # Temporarily turn off mock, so could use the real import_class # to import interface_driver. self.driver_cls_p.stop() self.addCleanup(self.driver_cls_p.start) cfg.CONF.set_override('interface_driver', 'foo.bar') self.assertRaises(SystemExit, dhcp.DeviceManager, cfg.CONF, None) class TestLogArgs(base.BaseTestCase): def test_log_args_without_log_dir_and_file(self): conf_dict = {'debug': True, 'verbose': False, 'log_dir': None, 'log_file': None, 'use_syslog': True, 'syslog_log_facility': 'LOG_USER'} conf = dhcp.DictModel(conf_dict) expected_args = ['--debug', '--use-syslog', '--syslog-log-facility=LOG_USER'] args = config.get_log_args(conf, 'log_file_name') self.assertEqual(expected_args, args) def test_log_args_without_log_file(self): conf_dict = {'debug': True, 'verbose': True, 'log_dir': '/etc/tests', 'log_file': None, 'use_syslog': False, 'syslog_log_facility': 'LOG_USER'} conf = dhcp.DictModel(conf_dict) expected_args = ['--debug', '--verbose', '--log-file=log_file_name', '--log-dir=/etc/tests'] args = config.get_log_args(conf, 'log_file_name') self.assertEqual(expected_args, args) def test_log_args_with_log_dir_and_file(self): conf_dict = {'debug': True, 'verbose': False, 'log_dir': '/etc/tests', 'log_file': 'tests/filelog', 'use_syslog': False, 'syslog_log_facility': 'LOG_USER'} conf = dhcp.DictModel(conf_dict) expected_args = ['--debug', '--log-file=log_file_name', '--log-dir=/etc/tests/tests'] args = config.get_log_args(conf, 'log_file_name') self.assertEqual(expected_args, args) def test_log_args_without_log_dir(self): conf_dict = {'debug': True, 'verbose': False, 'log_file': 'tests/filelog', 'log_dir': None, 'use_syslog': False, 'syslog_log_facility': 'LOG_USER'} conf = dhcp.DictModel(conf_dict) expected_args = ['--debug', '--log-file=log_file_name', '--log-dir=tests'] args = config.get_log_args(conf, 'log_file_name') self.assertEqual(expected_args, args) def test_log_args_with_filelog_and_syslog(self): conf_dict = {'debug': True, 'verbose': True, 'log_file': 'tests/filelog', 'log_dir': '/etc/tests', 'use_syslog': True, 'syslog_log_facility': 'LOG_USER'} conf = dhcp.DictModel(conf_dict) expected_args = ['--debug', '--verbose', '--log-file=log_file_name', '--log-dir=/etc/tests/tests'] args = config.get_log_args(conf, 'log_file_name') self.assertEqual(expected_args, args) class TestDhcpAgentEventHandler(base.BaseTestCase): def setUp(self): super(TestDhcpAgentEventHandler, self).setUp() config.register_interface_driver_opts_helper(cfg.CONF) cfg.CONF.set_override('interface_driver', 'neutron.agent.linux.interface.NullDriver') entry.register_options(cfg.CONF) # register all dhcp cfg options self.plugin_p = mock.patch(DHCP_PLUGIN) plugin_cls = self.plugin_p.start() self.plugin = mock.Mock() plugin_cls.return_value = self.plugin self.cache_p = mock.patch('neutron.agent.dhcp.agent.NetworkCache') cache_cls = self.cache_p.start() self.cache = mock.Mock() cache_cls.return_value = self.cache self.mock_makedirs_p = mock.patch("os.makedirs") self.mock_makedirs = self.mock_makedirs_p.start() self.mock_init_p = mock.patch('neutron.agent.dhcp.agent.' 'DhcpAgent._populate_networks_cache') self.mock_init = self.mock_init_p.start() self.dhcp = dhcp_agent.DhcpAgent(HOSTNAME) self.call_driver_p = mock.patch.object(self.dhcp, 'call_driver') self.call_driver = self.call_driver_p.start() self.schedule_resync_p = mock.patch.object(self.dhcp, 'schedule_resync') self.schedule_resync = self.schedule_resync_p.start() self.external_process_p = mock.patch( 'neutron.agent.linux.external_process.ProcessManager' ) self.external_process = self.external_process_p.start() def _process_manager_constructor_call(self, ns=FAKE_NETWORK_DHCP_NS): return mock.call(conf=cfg.CONF, uuid=FAKE_NETWORK_UUID, namespace=ns, default_cmd_callback=mock.ANY) def _enable_dhcp_helper(self, network, enable_isolated_metadata=False, is_isolated_network=False): self.dhcp._process_monitor = mock.Mock() if enable_isolated_metadata: cfg.CONF.set_override('enable_isolated_metadata', True) self.plugin.get_network_info.return_value = network self.dhcp.enable_dhcp_helper(network.id) self.plugin.assert_has_calls([ mock.call.get_network_info(network.id)]) self.call_driver.assert_called_once_with('enable', network) self.cache.assert_has_calls([mock.call.put(network)]) if is_isolated_network: self.external_process.assert_has_calls([ self._process_manager_constructor_call(), mock.call().enable() ]) else: self.assertFalse(self.external_process.call_count) def test_enable_dhcp_helper_enable_metadata_isolated_network(self): self._enable_dhcp_helper(isolated_network, enable_isolated_metadata=True, is_isolated_network=True) def test_enable_dhcp_helper_enable_metadata_no_gateway(self): isolated_network_no_gateway = copy.deepcopy(isolated_network) isolated_network_no_gateway.subnets[0].gateway_ip = None self._enable_dhcp_helper(isolated_network_no_gateway, enable_isolated_metadata=True, is_isolated_network=True) def test_enable_dhcp_helper_enable_metadata_nonisolated_network(self): nonisolated_network = copy.deepcopy(isolated_network) nonisolated_network.ports[0].device_owner = ( const.DEVICE_OWNER_ROUTER_INTF) nonisolated_network.ports[0].fixed_ips[0].ip_address = '172.9.9.1' self._enable_dhcp_helper(nonisolated_network, enable_isolated_metadata=True, is_isolated_network=False) def test_enable_dhcp_helper_enable_metadata_nonisolated_dist_network(self): nonisolated_dist_network.ports[0].device_owner = ( const.DEVICE_OWNER_ROUTER_INTF) nonisolated_dist_network.ports[0].fixed_ips[0].ip_address = '172.9.9.1' nonisolated_dist_network.ports[1].device_owner = ( const.DEVICE_OWNER_DVR_INTERFACE) nonisolated_dist_network.ports[1].fixed_ips[0].ip_address = '172.9.9.1' self._enable_dhcp_helper(nonisolated_dist_network, enable_isolated_metadata=True, is_isolated_network=False) def test_enable_dhcp_helper_enable_metadata_empty_network(self): self._enable_dhcp_helper(empty_network, enable_isolated_metadata=True, is_isolated_network=True) def test_enable_dhcp_helper_enable_metadata_ipv6_ipv4_network(self): self._enable_dhcp_helper(fake_network_ipv6_ipv4, enable_isolated_metadata=True, is_isolated_network=True) def test_enable_dhcp_helper_driver_failure_ipv6_ipv4_network(self): self.plugin.get_network_info.return_value = fake_network_ipv6_ipv4 self.call_driver.return_value = False cfg.CONF.set_override('enable_isolated_metadata', True) with mock.patch.object( self.dhcp, 'enable_isolated_metadata_proxy') as enable_metadata: self.dhcp.enable_dhcp_helper(fake_network_ipv6_ipv4.id) self.plugin.assert_has_calls( [mock.call.get_network_info(fake_network_ipv6_ipv4.id)]) self.call_driver.assert_called_once_with('enable', fake_network_ipv6_ipv4) self.assertFalse(self.cache.called) self.assertFalse(enable_metadata.called) self.assertFalse(self.external_process.called) def test_enable_dhcp_helper(self): self._enable_dhcp_helper(fake_network) def test_enable_dhcp_helper_ipv6_network(self): self._enable_dhcp_helper(fake_network_ipv6) def test_enable_dhcp_helper_down_network(self): self.plugin.get_network_info.return_value = fake_down_network self.dhcp.enable_dhcp_helper(fake_down_network.id) self.plugin.assert_has_calls( [mock.call.get_network_info(fake_down_network.id)]) self.assertFalse(self.call_driver.called) self.assertFalse(self.cache.called) self.assertFalse(self.external_process.called) def test_enable_dhcp_helper_network_none(self): self.plugin.get_network_info.return_value = None with mock.patch.object(dhcp_agent.LOG, 'warn') as log: self.dhcp.enable_dhcp_helper('fake_id') self.plugin.assert_has_calls( [mock.call.get_network_info('fake_id')]) self.assertFalse(self.call_driver.called) self.assertTrue(log.called) self.assertFalse(self.dhcp.schedule_resync.called) def test_enable_dhcp_helper_exception_during_rpc(self): self.plugin.get_network_info.side_effect = Exception with mock.patch.object(dhcp_agent.LOG, 'exception') as log: self.dhcp.enable_dhcp_helper(fake_network.id) self.plugin.assert_has_calls( [mock.call.get_network_info(fake_network.id)]) self.assertFalse(self.call_driver.called) self.assertTrue(log.called) self.assertTrue(self.schedule_resync.called) self.assertFalse(self.cache.called) self.assertFalse(self.external_process.called) def test_enable_dhcp_helper_driver_failure(self): self.plugin.get_network_info.return_value = fake_network self.call_driver.return_value = False self.dhcp.enable_dhcp_helper(fake_network.id) self.plugin.assert_has_calls( [mock.call.get_network_info(fake_network.id)]) self.call_driver.assert_called_once_with('enable', fake_network) self.assertFalse(self.cache.called) self.assertFalse(self.external_process.called) def _disable_dhcp_helper_known_network(self, isolated_metadata=False): if isolated_metadata: cfg.CONF.set_override('enable_isolated_metadata', True) self.cache.get_network_by_id.return_value = fake_network self.dhcp.disable_dhcp_helper(fake_network.id) self.cache.assert_has_calls( [mock.call.get_network_by_id(fake_network.id)]) self.call_driver.assert_called_once_with('disable', fake_network) if isolated_metadata: self.external_process.assert_has_calls([ self._process_manager_constructor_call(ns=None), mock.call().disable()]) else: self.assertFalse(self.external_process.call_count) def test_disable_dhcp_helper_known_network_isolated_metadata(self): self._disable_dhcp_helper_known_network(isolated_metadata=True) def test_disable_dhcp_helper_known_network(self): self._disable_dhcp_helper_known_network() def test_disable_dhcp_helper_unknown_network(self): self.cache.get_network_by_id.return_value = None self.dhcp.disable_dhcp_helper('abcdef') self.cache.assert_has_calls( [mock.call.get_network_by_id('abcdef')]) self.assertEqual(0, self.call_driver.call_count) self.assertFalse(self.external_process.called) def _disable_dhcp_helper_driver_failure(self, isolated_metadata=False): if isolated_metadata: cfg.CONF.set_override('enable_isolated_metadata', True) self.cache.get_network_by_id.return_value = fake_network self.call_driver.return_value = False self.dhcp.disable_dhcp_helper(fake_network.id) self.cache.assert_has_calls( [mock.call.get_network_by_id(fake_network.id)]) self.call_driver.assert_called_once_with('disable', fake_network) self.cache.assert_has_calls( [mock.call.get_network_by_id(fake_network.id)]) if isolated_metadata: self.external_process.assert_has_calls([ self._process_manager_constructor_call(ns=None), mock.call().disable() ]) else: self.assertFalse(self.external_process.call_count) def test_disable_dhcp_helper_driver_failure_isolated_metadata(self): self._disable_dhcp_helper_driver_failure(isolated_metadata=True) def test_disable_dhcp_helper_driver_failure(self): self._disable_dhcp_helper_driver_failure() def test_enable_isolated_metadata_proxy(self): self.dhcp._process_monitor = mock.Mock() self.dhcp.enable_isolated_metadata_proxy(fake_network) self.external_process.assert_has_calls([ self._process_manager_constructor_call(), mock.call().enable() ]) def test_disable_isolated_metadata_proxy(self): method_path = ('neutron.agent.metadata.driver.MetadataDriver' '.destroy_monitored_metadata_proxy') with mock.patch(method_path) as destroy: self.dhcp.disable_isolated_metadata_proxy(fake_network) destroy.assert_called_once_with(self.dhcp._process_monitor, fake_network.id, cfg.CONF) def _test_metadata_network(self, network): cfg.CONF.set_override('enable_metadata_network', True) cfg.CONF.set_override('debug', True) cfg.CONF.set_override('verbose', False) cfg.CONF.set_override('log_file', 'test.log') method_path = ('neutron.agent.metadata.driver.MetadataDriver' '.spawn_monitored_metadata_proxy') with mock.patch(method_path) as spawn: self.dhcp.enable_isolated_metadata_proxy(network) spawn.assert_called_once_with(self.dhcp._process_monitor, network.namespace, dhcp.METADATA_PORT, cfg.CONF, router_id='forzanapoli') def test_enable_isolated_metadata_proxy_with_metadata_network(self): self._test_metadata_network(fake_meta_network) def test_enable_isolated_metadata_proxy_with_metadata_network_dvr(self): self._test_metadata_network(fake_meta_dvr_network) def test_enable_isolated_metadata_proxy_with_dist_network(self): self._test_metadata_network(fake_dist_network) def test_network_create_end(self): payload = dict(network=dict(id=fake_network.id)) with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable: self.dhcp.network_create_end(None, payload) enable.assert_called_once_with(fake_network.id) def test_network_update_end_admin_state_up(self): payload = dict(network=dict(id=fake_network.id, admin_state_up=True)) with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable: self.dhcp.network_update_end(None, payload) enable.assert_called_once_with(fake_network.id) def test_network_update_end_admin_state_down(self): payload = dict(network=dict(id=fake_network.id, admin_state_up=False)) with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable: self.dhcp.network_update_end(None, payload) disable.assert_called_once_with(fake_network.id) def test_network_delete_end(self): payload = dict(network_id=fake_network.id) with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable: self.dhcp.network_delete_end(None, payload) disable.assert_called_once_with(fake_network.id) def test_refresh_dhcp_helper_no_dhcp_enabled_networks(self): network = dhcp.NetModel(True, dict(id='net-id', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[], ports=[])) self.cache.get_network_by_id.return_value = network self.plugin.get_network_info.return_value = network with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable: self.dhcp.refresh_dhcp_helper(network.id) disable.assert_called_once_with(network.id) self.assertFalse(self.cache.called) self.assertFalse(self.call_driver.called) self.cache.assert_has_calls( [mock.call.get_network_by_id('net-id')]) def test_refresh_dhcp_helper_exception_during_rpc(self): network = dhcp.NetModel(True, dict(id='net-id', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', admin_state_up=True, subnets=[], ports=[])) self.cache.get_network_by_id.return_value = network self.plugin.get_network_info.side_effect = Exception with mock.patch.object(dhcp_agent.LOG, 'exception') as log: self.dhcp.refresh_dhcp_helper(network.id) self.assertFalse(self.call_driver.called) self.cache.assert_has_calls( [mock.call.get_network_by_id('net-id')]) self.assertTrue(log.called) self.assertTrue(self.dhcp.schedule_resync.called) def test_subnet_update_end(self): payload = dict(subnet=dict(network_id=fake_network.id)) self.cache.get_network_by_id.return_value = fake_network self.plugin.get_network_info.return_value = fake_network self.dhcp.subnet_update_end(None, payload) self.cache.assert_has_calls([mock.call.put(fake_network)]) self.call_driver.assert_called_once_with('reload_allocations', fake_network) def test_subnet_update_end_restart(self): new_state = dhcp.NetModel(True, dict(id=fake_network.id, tenant_id=fake_network.tenant_id, admin_state_up=True, subnets=[fake_subnet1, fake_subnet3], ports=[fake_port1])) payload = dict(subnet=dict(network_id=fake_network.id)) self.cache.get_network_by_id.return_value = fake_network self.plugin.get_network_info.return_value = new_state self.dhcp.subnet_update_end(None, payload) self.cache.assert_has_calls([mock.call.put(new_state)]) self.call_driver.assert_called_once_with('restart', new_state) def test_subnet_update_end_delete_payload(self): prev_state = dhcp.NetModel(True, dict(id=fake_network.id, tenant_id=fake_network.tenant_id, admin_state_up=True, subnets=[fake_subnet1, fake_subnet3], ports=[fake_port1])) payload = dict(subnet_id=fake_subnet1.id) self.cache.get_network_by_subnet_id.return_value = prev_state self.cache.get_network_by_id.return_value = prev_state self.plugin.get_network_info.return_value = fake_network self.dhcp.subnet_delete_end(None, payload) self.cache.assert_has_calls([ mock.call.get_network_by_subnet_id( 'bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb'), mock.call.get_network_by_id('12345678-1234-5678-1234567890ab'), mock.call.put(fake_network)]) self.call_driver.assert_called_once_with('restart', fake_network) def test_port_update_end(self): payload = dict(port=fake_port2) self.cache.get_network_by_id.return_value = fake_network self.cache.get_port_by_id.return_value = fake_port2 self.dhcp.port_update_end(None, payload) self.cache.assert_has_calls( [mock.call.get_network_by_id(fake_port2.network_id), mock.call.put_port(mock.ANY)]) self.call_driver.assert_called_once_with('reload_allocations', fake_network) def test_port_update_change_ip_on_port(self): payload = dict(port=fake_port1) self.cache.get_network_by_id.return_value = fake_network updated_fake_port1 = copy.deepcopy(fake_port1) updated_fake_port1.fixed_ips[0].ip_address = '172.9.9.99' self.cache.get_port_by_id.return_value = updated_fake_port1 self.dhcp.port_update_end(None, payload) self.cache.assert_has_calls( [mock.call.get_network_by_id(fake_port1.network_id), mock.call.put_port(mock.ANY)]) self.call_driver.assert_has_calls( [mock.call.call_driver('reload_allocations', fake_network)]) def test_port_update_change_ip_on_dhcp_agents_port(self): self.cache.get_network_by_id.return_value = fake_network self.cache.get_port_by_id.return_value = fake_port1 payload = dict(port=copy.deepcopy(fake_port1)) device_id = utils.get_dhcp_agent_device_id( payload['port']['network_id'], self.dhcp.conf.host) payload['port']['fixed_ips'][0]['ip_address'] = '172.9.9.99' payload['port']['device_id'] = device_id self.dhcp.port_update_end(None, payload) self.call_driver.assert_has_calls( [mock.call.call_driver('restart', fake_network)]) def test_port_update_on_dhcp_agents_port_no_ip_change(self): self.cache.get_network_by_id.return_value = fake_network self.cache.get_port_by_id.return_value = fake_port1 payload = dict(port=fake_port1) device_id = utils.get_dhcp_agent_device_id( payload['port']['network_id'], self.dhcp.conf.host) payload['port']['device_id'] = device_id self.dhcp.port_update_end(None, payload) self.call_driver.assert_has_calls( [mock.call.call_driver('reload_allocations', fake_network)]) def test_port_delete_end(self): payload = dict(port_id=fake_port2.id) self.cache.get_network_by_id.return_value = fake_network self.cache.get_port_by_id.return_value = fake_port2 self.dhcp.port_delete_end(None, payload) self.cache.assert_has_calls( [mock.call.get_port_by_id(fake_port2.id), mock.call.get_network_by_id(fake_network.id), mock.call.remove_port(fake_port2)]) self.call_driver.assert_has_calls( [mock.call.call_driver('reload_allocations', fake_network)]) def test_port_delete_end_unknown_port(self): payload = dict(port_id='unknown') self.cache.get_port_by_id.return_value = None self.dhcp.port_delete_end(None, payload) self.cache.assert_has_calls([mock.call.get_port_by_id('unknown')]) self.assertEqual(self.call_driver.call_count, 0) class TestDhcpPluginApiProxy(base.BaseTestCase): def _test_dhcp_api(self, method, **kwargs): ctxt = context.get_admin_context() proxy = dhcp_agent.DhcpPluginApi('foo', ctxt, None, host='foo') with mock.patch.object(proxy.client, 'call') as rpc_mock,\ mock.patch.object(proxy.client, 'prepare') as prepare_mock: prepare_mock.return_value = proxy.client rpc_mock.return_value = kwargs.pop('return_value', []) prepare_args = {} if 'version' in kwargs: prepare_args['version'] = kwargs.pop('version') retval = getattr(proxy, method)(**kwargs) self.assertEqual(retval, rpc_mock.return_value) prepare_mock.assert_called_once_with(**prepare_args) kwargs['host'] = proxy.host rpc_mock.assert_called_once_with(ctxt, method, **kwargs) def test_get_active_networks_info(self): self._test_dhcp_api('get_active_networks_info', version='1.1') def test_get_network_info(self): self._test_dhcp_api('get_network_info', network_id='fake_id', return_value=None) def test_create_dhcp_port(self): self._test_dhcp_api('create_dhcp_port', port='fake_port', return_value=None, version='1.1') def test_update_dhcp_port(self): self._test_dhcp_api('update_dhcp_port', port_id='fake_id', port='fake_port', return_value=None, version='1.1') def test_release_dhcp_port(self): self._test_dhcp_api('release_dhcp_port', network_id='fake_id', device_id='fake_id_2') def test_release_port_fixed_ip(self): self._test_dhcp_api('release_port_fixed_ip', network_id='fake_id', device_id='fake_id_2', subnet_id='fake_id_3') class TestNetworkCache(base.BaseTestCase): def test_put_network(self): nc = dhcp_agent.NetworkCache() nc.put(fake_network) self.assertEqual(nc.cache, {fake_network.id: fake_network}) self.assertEqual(nc.subnet_lookup, {fake_subnet1.id: fake_network.id, fake_subnet2.id: fake_network.id}) self.assertEqual(nc.port_lookup, {fake_port1.id: fake_network.id}) def test_put_network_existing(self): prev_network_info = mock.Mock() nc = dhcp_agent.NetworkCache() with mock.patch.object(nc, 'remove') as remove: nc.cache[fake_network.id] = prev_network_info nc.put(fake_network) remove.assert_called_once_with(prev_network_info) self.assertEqual(nc.cache, {fake_network.id: fake_network}) self.assertEqual(nc.subnet_lookup, {fake_subnet1.id: fake_network.id, fake_subnet2.id: fake_network.id}) self.assertEqual(nc.port_lookup, {fake_port1.id: fake_network.id}) def test_remove_network(self): nc = dhcp_agent.NetworkCache() nc.cache = {fake_network.id: fake_network} nc.subnet_lookup = {fake_subnet1.id: fake_network.id, fake_subnet2.id: fake_network.id} nc.port_lookup = {fake_port1.id: fake_network.id} nc.remove(fake_network) self.assertEqual(len(nc.cache), 0) self.assertEqual(len(nc.subnet_lookup), 0) self.assertEqual(len(nc.port_lookup), 0) def test_get_network_by_id(self): nc = dhcp_agent.NetworkCache() nc.put(fake_network) self.assertEqual(nc.get_network_by_id(fake_network.id), fake_network) def test_get_network_ids(self): nc = dhcp_agent.NetworkCache() nc.put(fake_network) self.assertEqual(list(nc.get_network_ids()), [fake_network.id]) def test_get_network_by_subnet_id(self): nc = dhcp_agent.NetworkCache() nc.put(fake_network) self.assertEqual(nc.get_network_by_subnet_id(fake_subnet1.id), fake_network) def test_get_network_by_port_id(self): nc = dhcp_agent.NetworkCache() nc.put(fake_network) self.assertEqual(nc.get_network_by_port_id(fake_port1.id), fake_network) def test_put_port(self): fake_net = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', subnets=[fake_subnet1], ports=[fake_port1])) nc = dhcp_agent.NetworkCache() nc.put(fake_net) nc.put_port(fake_port2) self.assertEqual(len(nc.port_lookup), 2) self.assertIn(fake_port2, fake_net.ports) def test_put_port_existing(self): fake_net = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', subnets=[fake_subnet1], ports=[fake_port1, fake_port2])) nc = dhcp_agent.NetworkCache() nc.put(fake_net) nc.put_port(fake_port2) self.assertEqual(len(nc.port_lookup), 2) self.assertIn(fake_port2, fake_net.ports) def test_remove_port_existing(self): fake_net = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa', subnets=[fake_subnet1], ports=[fake_port1, fake_port2])) nc = dhcp_agent.NetworkCache() nc.put(fake_net) nc.remove_port(fake_port2) self.assertEqual(len(nc.port_lookup), 1) self.assertNotIn(fake_port2, fake_net.ports) def test_get_port_by_id(self): nc = dhcp_agent.NetworkCache() nc.put(fake_network) self.assertEqual(nc.get_port_by_id(fake_port1.id), fake_port1) class FakePort1(object): id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' class FakeV4Subnet(object): id = 'dddddddd-dddd-dddd-dddd-dddddddddddd' ip_version = 4 cidr = '192.168.0.0/24' gateway_ip = '192.168.0.1' enable_dhcp = True class FakeV4SubnetNoGateway(object): id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' ip_version = 4 cidr = '192.168.1.0/24' gateway_ip = None enable_dhcp = True class FakeV4Network(object): id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' subnets = [FakeV4Subnet()] ports = [FakePort1()] namespace = 'qdhcp-aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' class FakeV4NetworkNoSubnet(object): id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' subnets = [] ports = [] class FakeV4NetworkNoGateway(object): id = 'cccccccc-cccc-cccc-cccc-cccccccccccc' subnets = [FakeV4SubnetNoGateway()] ports = [FakePort1()] class TestDeviceManager(base.BaseTestCase): def setUp(self): super(TestDeviceManager, self).setUp() config.register_interface_driver_opts_helper(cfg.CONF) config.register_use_namespaces_opts_helper(cfg.CONF) cfg.CONF.register_opts(dhcp_config.DHCP_AGENT_OPTS) cfg.CONF.set_override('interface_driver', 'neutron.agent.linux.interface.NullDriver') cfg.CONF.set_override('use_namespaces', True) cfg.CONF.set_override('enable_isolated_metadata', True) self.ensure_device_is_ready_p = mock.patch( 'neutron.agent.linux.ip_lib.ensure_device_is_ready') self.ensure_device_is_ready = (self.ensure_device_is_ready_p.start()) self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver') self.iproute_cls_p = mock.patch('neutron.agent.linux.' 'ip_lib.IpRouteCommand') driver_cls = self.dvr_cls_p.start() iproute_cls = self.iproute_cls_p.start() self.mock_driver = mock.MagicMock() self.mock_driver.DEV_NAME_LEN = ( interface.LinuxInterfaceDriver.DEV_NAME_LEN) self.mock_driver.use_gateway_ips = False self.mock_iproute = mock.MagicMock() driver_cls.return_value = self.mock_driver iproute_cls.return_value = self.mock_iproute iptables_cls_p = mock.patch( 'neutron.agent.linux.iptables_manager.IptablesManager') iptables_cls = iptables_cls_p.start() self.iptables_inst = mock.Mock() iptables_cls.return_value = self.iptables_inst self.mangle_inst = mock.Mock() self.iptables_inst.ipv4 = {'mangle': self.mangle_inst} def _test_setup_helper(self, device_is_ready, net=None, port=None): net = net or fake_network port = port or fake_port1 plugin = mock.Mock() plugin.create_dhcp_port.return_value = port or fake_port1 self.ensure_device_is_ready.return_value = device_is_ready self.mock_driver.get_device_name.return_value = 'tap12345678-12' dh = dhcp.DeviceManager(cfg.CONF, plugin) dh._set_default_route = mock.Mock() interface_name = dh.setup(net) self.assertEqual(interface_name, 'tap12345678-12') plugin.assert_has_calls([ mock.call.create_dhcp_port( {'port': {'name': '', 'admin_state_up': True, 'network_id': net.id, 'tenant_id': net.tenant_id, 'fixed_ips': [{'subnet_id': port.fixed_ips[0].subnet_id}], 'device_id': mock.ANY}})]) if port == fake_ipv6_port: expected_ips = ['169.254.169.254/16'] else: expected_ips = ['172.9.9.9/24', '169.254.169.254/16'] expected = [ mock.call.get_device_name(port), mock.call.init_l3( 'tap12345678-12', expected_ips, namespace=net.namespace)] if not device_is_ready: expected.insert(1, mock.call.plug(net.id, port.id, 'tap12345678-12', 'aa:bb:cc:dd:ee:ff', namespace=net.namespace)) self.mock_driver.assert_has_calls(expected) dh._set_default_route.assert_called_once_with(net, 'tap12345678-12') def test_setup(self): cfg.CONF.set_override('enable_metadata_network', False) self._test_setup_helper(False) cfg.CONF.set_override('enable_metadata_network', True) self._test_setup_helper(False) def test_setup_calls_fill_dhcp_udp_checksums(self): self._test_setup_helper(False) rule = ('-p udp --dport %d -j CHECKSUM --checksum-fill' % const.DHCP_RESPONSE_PORT) expected = [mock.call.add_rule('POSTROUTING', rule)] self.mangle_inst.assert_has_calls(expected) def test_setup_create_dhcp_port(self): plugin = mock.Mock() net = copy.deepcopy(fake_network) plugin.create_dhcp_port.return_value = fake_dhcp_port dh = dhcp.DeviceManager(cfg.CONF, plugin) dh.setup(net) plugin.assert_has_calls([ mock.call.create_dhcp_port( {'port': {'name': '', 'admin_state_up': True, 'network_id': net.id, 'tenant_id': net.tenant_id, 'fixed_ips': [{'subnet_id': fake_dhcp_port.fixed_ips[0].subnet_id}], 'device_id': mock.ANY}})]) self.assertIn(fake_dhcp_port, net.ports) def test_setup_ipv6(self): self._test_setup_helper(True, net=fake_network_ipv6, port=fake_ipv6_port) def test_setup_device_is_ready(self): self._test_setup_helper(True) def test_create_dhcp_port_raise_conflict(self): plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) plugin.create_dhcp_port.return_value = None self.assertRaises(exceptions.Conflict, dh.setup_dhcp_port, fake_network) def test_create_dhcp_port_create_new(self): plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) plugin.create_dhcp_port.return_value = fake_network.ports[0] dh.setup_dhcp_port(fake_network) plugin.assert_has_calls([ mock.call.create_dhcp_port( {'port': {'name': '', 'admin_state_up': True, 'network_id': fake_network.id, 'tenant_id': fake_network.tenant_id, 'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}], 'device_id': mock.ANY}})]) def test_create_dhcp_port_update_add_subnet(self): plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) fake_network_copy = copy.deepcopy(fake_network) fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network) fake_network_copy.subnets[1].enable_dhcp = True plugin.update_dhcp_port.return_value = fake_network.ports[0] dh.setup_dhcp_port(fake_network_copy) port_body = {'port': { 'network_id': fake_network.id, 'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id, 'ip_address': fake_fixed_ip1.ip_address}, {'subnet_id': fake_subnet2.id}]}} plugin.assert_has_calls([ mock.call.update_dhcp_port(fake_network_copy.ports[0].id, port_body)]) def test_update_dhcp_port_raises_conflict(self): plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) fake_network_copy = copy.deepcopy(fake_network) fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network) fake_network_copy.subnets[1].enable_dhcp = True plugin.update_dhcp_port.return_value = None self.assertRaises(exceptions.Conflict, dh.setup_dhcp_port, fake_network_copy) def test_create_dhcp_port_no_update_or_create(self): plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) fake_network_copy = copy.deepcopy(fake_network) fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network) dh.setup_dhcp_port(fake_network_copy) self.assertFalse(plugin.setup_dhcp_port.called) self.assertFalse(plugin.update_dhcp_port.called) def test_setup_dhcp_port_with_non_enable_dhcp_subnet(self): plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) fake_network_copy = copy.deepcopy(fake_network) fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network) plugin.update_dhcp_port.return_value = fake_port1 self.assertEqual(fake_subnet1.id, dh.setup_dhcp_port(fake_network_copy).fixed_ips[0].subnet_id) def test_destroy(self): fake_net = dhcp.NetModel( True, dict(id=FAKE_NETWORK_UUID, tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa')) with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls: mock_driver = mock.MagicMock() mock_driver.get_device_name.return_value = 'tap12345678-12' dvr_cls.return_value = mock_driver plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) dh.destroy(fake_net, 'tap12345678-12') dvr_cls.assert_called_once_with(cfg.CONF) mock_driver.assert_has_calls( [mock.call.unplug('tap12345678-12', namespace='qdhcp-' + fake_net.id)]) plugin.assert_has_calls( [mock.call.release_dhcp_port(fake_net.id, mock.ANY)]) def test_get_interface_name(self): fake_net = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa')) fake_port = dhcp.DictModel( dict(id='12345678-1234-aaaa-1234567890ab', mac_address='aa:bb:cc:dd:ee:ff')) with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls: mock_driver = mock.MagicMock() mock_driver.get_device_name.return_value = 'tap12345678-12' dvr_cls.return_value = mock_driver plugin = mock.Mock() dh = dhcp.DeviceManager(cfg.CONF, plugin) dh.get_interface_name(fake_net, fake_port) dvr_cls.assert_called_once_with(cfg.CONF) mock_driver.assert_has_calls( [mock.call.get_device_name(fake_port)]) self.assertEqual(len(plugin.mock_calls), 0) def test_get_device_id(self): fake_net = dhcp.NetModel( True, dict(id='12345678-1234-5678-1234567890ab', tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa')) expected = ('dhcp1ae5f96c-c527-5079-82ea-371a01645457-12345678-1234-' '5678-1234567890ab') with mock.patch('uuid.uuid5') as uuid5: uuid5.return_value = '1ae5f96c-c527-5079-82ea-371a01645457' dh = dhcp.DeviceManager(cfg.CONF, None) uuid5.called_once_with(uuid.NAMESPACE_DNS, cfg.CONF.host) self.assertEqual(dh.get_device_id(fake_net), expected) def test_update(self): # Try with namespaces and no metadata network cfg.CONF.set_override('use_namespaces', True) cfg.CONF.set_override('enable_metadata_network', False) dh = dhcp.DeviceManager(cfg.CONF, None) dh._set_default_route = mock.Mock() network = mock.Mock() dh.update(network, 'ns-12345678-12') dh._set_default_route.assert_called_once_with(network, 'ns-12345678-12') # No namespaces, shouldn't set default route. cfg.CONF.set_override('use_namespaces', False) cfg.CONF.set_override('enable_metadata_network', False) dh = dhcp.DeviceManager(cfg.CONF, None) dh._set_default_route = mock.Mock() dh.update(FakeV4Network(), 'tap12345678-12') self.assertFalse(dh._set_default_route.called) # Meta data network enabled, don't interfere with its gateway. cfg.CONF.set_override('use_namespaces', True) cfg.CONF.set_override('enable_metadata_network', True) dh = dhcp.DeviceManager(cfg.CONF, None) dh._set_default_route = mock.Mock() dh.update(FakeV4Network(), 'ns-12345678-12') self.assertTrue(dh._set_default_route.called) # For completeness cfg.CONF.set_override('use_namespaces', False) cfg.CONF.set_override('enable_metadata_network', True) dh = dhcp.DeviceManager(cfg.CONF, None) dh._set_default_route = mock.Mock() dh.update(FakeV4Network(), 'ns-12345678-12') self.assertFalse(dh._set_default_route.called) def test_set_default_route(self): dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = None # Basic one subnet with gateway. network = FakeV4Network() dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) self.assertFalse(device.route.delete_gateway.called) device.route.add_gateway.assert_called_once_with('192.168.0.1') def test_set_default_route_no_subnet(self): dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = None network = FakeV4NetworkNoSubnet() network.namespace = 'qdhcp-1234' dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) self.assertFalse(device.route.delete_gateway.called) self.assertFalse(device.route.add_gateway.called) def test_set_default_route_no_subnet_delete_gateway(self): dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = dict(gateway='192.168.0.1') network = FakeV4NetworkNoSubnet() network.namespace = 'qdhcp-1234' dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) device.route.delete_gateway.assert_called_once_with('192.168.0.1') self.assertFalse(device.route.add_gateway.called) def test_set_default_route_no_gateway(self): dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = dict(gateway='192.168.0.1') network = FakeV4NetworkNoGateway() network.namespace = 'qdhcp-1234' dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) device.route.delete_gateway.assert_called_once_with('192.168.0.1') self.assertFalse(device.route.add_gateway.called) def test_set_default_route_do_nothing(self): dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = dict(gateway='192.168.0.1') network = FakeV4Network() dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) self.assertFalse(device.route.delete_gateway.called) self.assertFalse(device.route.add_gateway.called) def test_set_default_route_change_gateway(self): dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = dict(gateway='192.168.0.2') network = FakeV4Network() dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) self.assertFalse(device.route.delete_gateway.called) device.route.add_gateway.assert_called_once_with('192.168.0.1') def test_set_default_route_two_subnets(self): # Try two subnets. Should set gateway from the first. dh = dhcp.DeviceManager(cfg.CONF, None) with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice: device = mock.Mock() mock_IPDevice.return_value = device device.route.get_gateway.return_value = None network = FakeV4Network() subnet2 = FakeV4Subnet() subnet2.gateway_ip = '192.168.1.1' network.subnets = [subnet2, FakeV4Subnet()] dh._set_default_route(network, 'tap-name') self.assertEqual(device.route.get_gateway.call_count, 1) self.assertFalse(device.route.delete_gateway.called) device.route.add_gateway.assert_called_once_with('192.168.1.1') class TestDictModel(base.BaseTestCase): def test_basic_dict(self): d = dict(a=1, b=2) m = dhcp.DictModel(d) self.assertEqual(m.a, 1) self.assertEqual(m.b, 2) def test_dict_has_sub_dict(self): d = dict(a=dict(b=2)) m = dhcp.DictModel(d) self.assertEqual(m.a.b, 2) def test_dict_contains_list(self): d = dict(a=[1, 2]) m = dhcp.DictModel(d) self.assertEqual(m.a, [1, 2]) def test_dict_contains_list_of_dicts(self): d = dict(a=[dict(b=2), dict(c=3)]) m = dhcp.DictModel(d) self.assertEqual(m.a[0].b, 2) self.assertEqual(m.a[1].c, 3) class TestNetModel(base.BaseTestCase): def test_ns_name(self): network = dhcp.NetModel(True, {'id': 'foo'}) self.assertEqual(network.namespace, 'qdhcp-foo') def test_ns_name_false_namespace(self): network = dhcp.NetModel(False, {'id': 'foo'}) self.assertIsNone(network.namespace) def test_ns_name_none_namespace(self): network = dhcp.NetModel(None, {'id': 'foo'}) self.assertIsNone(network.namespace)
from Experiment import Experiment # Main experiment class from pUtil import tolog # Logging method that sends text to the pilot log from pUtil import readpar # Used to read values from the schedconfig DB (queuedata) from pUtil import isAnalysisJob # Is the current job a user analysis job or a production job? from pUtil import verifyReleaseString # To verify the release string (move to Experiment later) from pUtil import timedCommand # Standard time-out function from PilotErrors import PilotErrors # Error codes from ATLASExperiment import ATLASExperiment import os import re import commands from glob import glob class NordugridATLASExperiment(ATLASExperiment): # private data members __experiment = "Nordugrid-ATLAS" __instance = None __warning = "" __analysisJob = False __job = None # Required methods def __init__(self): """ Default initialization """ # e.g. self.__errorLabel = errorLabel pass def __new__(cls, *args, **kwargs): """ Override the __new__ method to make the class a singleton """ if not cls.__instance: cls.__instance = super(ATLASExperiment, cls).__new__(cls, *args, **kwargs) return cls.__instance def getExperiment(self): """ Return a string with the experiment name """ return self.__experiment def setParameters(self, *args, **kwargs): """ Set any internally needed variables """ # set initial values self.__job = kwargs.get('job', None) if self.__job: self.__analysisJob = isAnalysisJob(self.__job.trf) else: self.__warning = "setParameters found no job object" def getJobExecutionCommandObsolete(self, job, jobSite, pilot_initdir): """ Define and test the command(s) that will be used to execute the payload """ # Input tuple: (method is called from RunJob*) # job: Job object # jobSite: Site object # pilot_initdir: launch directory of pilot.py # # Return tuple: # pilot_error_code, pilot_error_diagnostics, job_execution_command, special_setup_command, JEM, cmtconfig # where # pilot_error_code : self.__error.<PILOT ERROR CODE as defined in PilotErrors class> (value should be 0 for successful setup) # pilot_error_diagnostics: any output from problematic command or explanatory error diagnostics # job_execution_command : command to execute payload, e.g. cmd = "source <path>/setup.sh; <path>/python trf.py [options]" # special_setup_command : any special setup command that can be insterted into job_execution_command and is sent to stage-in/out methods # JEM : Job Execution Monitor activation state (default value "NO", meaning JEM is not to be used. See JEMstub.py) # cmtconfig : cmtconfig symbol from the job def or schedconfig, e.g. "x86_64-slc5-gcc43-opt" [NOT USED IN THIS CLASS] pilotErrorDiag = "" cmd = "" special_setup_cmd = "" pysiteroot = "" siteroot = "" JEM = "NO" cmtconfig = "" # Is it's an analysis job or not? analysisJob = isAnalysisJob(job.trf) # Set the INDS env variable (used by runAthena) if analysisJob: self.setINDS(job.realDatasetsIn) # Command used to download runAthena or runGen wgetCommand = 'wget' # special setup for NG status, pilotErrorDiag, cmd = self.setupNordugridTrf(job, analysisJob, wgetCommand, pilot_initdir) if status != 0: return status, pilotErrorDiag, "", special_setup_cmd, JEM, cmtconfig # add FRONTIER debugging and RUCIO env variables cmd = self.addEnvVars2Cmd(cmd, job.jobId, job.taskID, job.processingType, jobSite.sitename, analysisJob) if readpar('cloud') == "DE": # Should JEM be used? metaOut = {} try: import sys from JEMstub import updateRunCommand4JEM # If JEM should be used, the command will get updated by the JEMstub automatically. cmd = updateRunCommand4JEM(cmd, job, jobSite, tolog, metaOut=metaOut) except: # On failure, cmd stays the same tolog("Failed to update run command for JEM - will run unmonitored.") # Is JEM to be used? if metaOut.has_key("JEMactive"): JEM = metaOut["JEMactive"] tolog("Use JEM: %s (dictionary = %s)" % (JEM, str(metaOut))) elif '--enable-jem' in cmd: tolog("!!WARNING!!1111!! JEM can currently only be used on certain sites in DE") # Pipe stdout/err for payload to files cmd += " 1>%s 2>%s" % (job.stdout, job.stderr) tolog("\nCommand to run the job is: \n%s" % (cmd)) tolog("ATLAS_PYTHON_PILOT = %s" % (os.environ['ATLAS_PYTHON_PILOT'])) if special_setup_cmd != "": tolog("Special setup command: %s" % (special_setup_cmd)) return 0, pilotErrorDiag, cmd, special_setup_cmd, JEM, cmtconfig def willDoFileLookups(self): """ Should (LFC) file lookups be done by the pilot or not? """ return False def willDoFileRegistration(self): """ Should (LFC) file registration be done by the pilot or not? """ return False # Additional optional methods def setupNordugridTrf(self, job, analysisJob, wgetCommand, pilot_initdir): """ perform the Nordugrid trf setup """ error = PilotErrors() pilotErrorDiag = "" cmd = "" # assume that the runtime script has already been created if not os.environ.has_key('RUNTIME_CONFIG_DIR'): pilotErrorDiag = "Environment variable not set: RUNTIME_CONFIG_DIR" tolog("!!FAILED!!3000!! %s" % (pilotErrorDiag)) return error.ERR_SETUPFAILURE, pilotErrorDiag, "" runtime_script = "%s/APPS/HEP/ATLAS-%s" % (os.environ['RUNTIME_CONFIG_DIR'], job.release) if os.path.exists(runtime_script): cmd = ". %s 1" % (runtime_script) if analysisJob: # try to download the analysis trf status, pilotErrorDiag, trfName = self.getAnalysisTrf(wgetCommand, job.trf, pilot_initdir) if status != 0: return status, pilotErrorDiag, "" trfName = "./" + trfName else: trfName = job.trf cmd += '; export ATLAS_RELEASE=%s;export AtlasVersion=%s;export AtlasPatchVersion=%s' % (job.homePackage.split('/')[-1],job.homePackage.split('/')[-1],job.homePackage.split('/')[-1]) cmd += "; %s %s" % (trfName, job.jobPars) elif verifyReleaseString(job.release) == "NULL": if analysisJob: # try to download the analysis trf status, pilotErrorDiag, trfName = self.getAnalysisTrf(wgetCommand, job.trf, pilot_initdir) if status != 0: return status, pilotErrorDiag, "" trfName = "./" + trfName else: trfName = job.trf cmd = "%s %s" % (trfName, job.jobPars) else: pilotErrorDiag = "Could not locate runtime script: %s" % (runtime_script) tolog("!!FAILED!!3000!! %s" % (pilotErrorDiag)) return error.ERR_SETUPFAILURE, pilotErrorDiag, "" # correct for multi-core if necessary (especially important in case coreCount=1 to limit parallel make) cmd = self.addMAKEFLAGS(job.coreCount, "") + cmd return 0, pilotErrorDiag, cmd def getWarning(self): """ Return any warning message passed to __warning """ return self.__warning def getReleaseObsolete(self, release): """ Return a list of the software release id's """ # Assuming 'release' is a string that separates release id's with '\n' # Used in the case of payload using multiple steps with different release versions # E.g. release = "19.0.0\n19.1.0" -> ['19.0.0', '19.1.0'] if os.environ.has_key('Nordugrid_pilot') and os.environ.has_key('ATLAS_RELEASE'): return os.environ['ATLAS_RELEASE'].split(",") else: return release.split("\n") def checkSpecialEnvVars(self, sitename): """ Check special environment variables """ # Set a special env variable that will be used to identify Nordugrid in other pilot classes os.environ['Nordugrid_pilot'] = "" # Call the method from the parent class ec = super(NordugridATLASExperiment, self).checkSpecialEnvVars(sitename) return ec # Optional def shouldExecuteUtility(self): """ Determine where a memory utility monitor should be executed """ # The RunJob class has the possibility to execute a memory utility monitor that can track the memory usage # of the payload. The monitor is executed if this method returns True. The monitor is expected to produce # a summary JSON file whose name is defined by the getMemoryMonitorJSONFilename() method. The contents of # this file (ie. the full JSON dictionary) will be added to the jobMetrics at the end of the job (see # PandaServerClient class). return True # Optional def getUtilityJSONFilename(self): """ Return the filename of the memory monitor JSON file """ # For explanation, see shouldExecuteUtility() return "memory_monitor_summary.json" def getSetupPath(self, job_command, trf): """ Get the setup path from the job execution command """ setup = "" # Trim the trf if necessary (i.e. remove any paths which are present in buildJob jobs) trf = self.trimTrfName(trf) # Take care of special cases, e.g. trf="buildJob-.." but job_command="..; ./buildJob-.." special_case = "./%s" % (trf) if special_case in job_command: trf = special_case # Strip the setup command at the location of the trf name l = job_command.find(trf) if l > 0: setup = job_command[:l] # Make sure to remove any unwanted white spaces as well return setup.strip() def trimTrfName(self, trfName): """ Remove any unwanted strings from the trfName """ if "/" in trfName: trfName = os.path.basename(trfName) return trfName def updateSetupPathWithReleaseAndCmtconfig(self, setup_path, release, alt_release, patched_release, alt_patched_release, cmtconfig, alt_cmtconfig): """ Update the setup path with an alternative release, pathched release and cmtconfig """ # This method can be used to modify a setup path with an alternative release, patched release and cmtconfig # E.g. this can be used by a tool that might want to fall back to a preferred setup # Correct the release info if "-" in release: # the cmtconfig is appended, e.g. release='17.2.7-X86_64-SLC5-GCC43-OPT' cmtconfig = release[release.find('-')+1:] release = release[:release.find('-')] # Update the patched release with a tmp string if patched_release != "" and patched_release in setup_path: setup_path = setup_path.replace(patched_release, '__PATCHED_RELEASE__') # Update the release if release in setup_path: setup_path = setup_path.replace(release, alt_release) # Update the patched release if '__PATCHED_RELEASE__' in setup_path: setup_path = setup_path.replace('__PATCHED_RELEASE__', alt_patched_release) # Update the cmtconfig if cmtconfig != "" and cmtconfig in setup_path: setup_path = setup_path.replace(cmtconfig, alt_cmtconfig.upper()) return setup_path # Optional def getUtilityCommand(self, **argdict): """ Prepare a utility command string """ # This method can be used to prepare a setup string for an optional utility tool, e.g. a memory monitor, # that will be executed by the pilot in parallel with the payload. # The pilot will look for an output JSON file (summary.json) and will extract pre-determined fields # from it and report them with the job updates. Currently the pilot expects to find fields related # to memory information. pid = argdict.get('pid', 0) summary = self.getUtilityJSONFilename() workdir = argdict.get('workdir', '.') interval = 60 default_release = "21.0.22" #"21.0.18" #"21.0.17" #"20.7.5" #"20.1.5" # default_patch_release = "20.7.5.8" #"20.1.5.2" #"20.1.4.1" # default_cmtconfig = "x86_64-slc6-gcc49-opt" default_cmtconfig = "x86_64-slc6-gcc62-opt" # default_swbase = "%s/atlas.cern.ch/repo/sw/software" % (self.getCVMFSPath()) default_swbase = "%s/atlas.cern.ch/repo" % (self.getCVMFSPath()) default_setup = self.getModernASetup() + " Athena," + default_release + " --platform " + default_cmtconfig tolog("Will use default (fallback) setup for MemoryMonitor") cmd = default_setup # Now add the MemoryMonitor command cmd += "; MemoryMonitor --pid %d --filename %s --json-summary %s --interval %d" % (pid, self.getUtilityOutputFilename(), summary, interval) cmd = "cd " + workdir + ";" + cmd return cmd if __name__ == "__main__": print "Implement test cases here"
''' Saves relevant data fed back from TwitterStream etc next to its PID and timestamp ready for analysis Needs to do limited analysis to work out which keywords in the tweet stream correspond to which programme ''' from datetime import datetime import os import string import time as time2 from time import time from Axon.Ipc import producerFinished from Axon.Ipc import shutdownMicroprocess from Axon.ThreadedComponent import threadedcomponent import MySQLdb import _mysql_exceptions import cjson from dateutil.parser import parse class DataCollector(threadedcomponent): Inboxes = { "inbox" : "Receives data in the format [tweetjson,[pid,pid]]", "control" : "" } Outboxes = { "outbox" : "", "signal" : "" } def __init__(self,dbuser,dbpass): super(DataCollector, self).__init__() self.dbuser = dbuser self.dbpass = dbpass def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def dbConnect(self): db = MySQLdb.connect(user=self.dbuser,passwd=self.dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def main(self): cursor = self.dbConnect() while not self.finished(): twitdata = list() # Collect all current received tweet JSON and their related PIDs into a twitdata list while self.dataReady("inbox"): pids = list() data = self.recv("inbox") for pid in data[1]: pids.append(pid) twitdata.append([data[0],pids]) if len(twitdata) > 0: # Process the received twitdata for tweet in twitdata: tweet[0] = tweet[0].replace("\\/","/") # Fix slashes in links: This may need moving further down the line - ideally it would be handled by cjson if tweet[0] != "\r\n": # If \r\n is received, this is just a keep alive signal from Twitter every 30 secs # At this point, each 'tweet' contains tweetdata, and a list of possible pids newdata = cjson.decode(tweet[0]) if newdata.has_key('delete') or newdata.has_key('scrub_geo') or newdata.has_key('limit'): # Keep a record of all requests from Twitter for deletions, location removal etc # As yet none of these have been received, but this code will store them if they are received to enable debugging filepath = "contentDebug.txt" if os.path.exists(filepath): file = open(filepath, 'r') filecontents = file.read() else: filecontents = "" file = open(filepath, 'w') file.write(filecontents + "\n" + str(datetime.utcnow()) + " " + cjson.encode(newdata)) file.close() else: # This is a real tweet tweetid = newdata['id'] print "New tweet! @" + newdata['user']['screen_name'] + ": " + newdata['text'] for pid in tweet[1]: # Cycle through possible pids, grabbing that pid's keywords from the DB # Then, check this tweet against the keywords and save to DB where appropriate (there may be more than one location) cursor.execute("""SELECT keyword,type FROM keywords WHERE pid = %s""",(pid)) data = cursor.fetchall() for row in data: # Some keywords are stored with a ^. These must be split, and the tweet checked to see if it has both keywords, but not necessarily next to each other keywords = row[0].split("^") if len(keywords) == 2: if string.lower(keywords[0]) in string.lower(newdata['text']) and string.lower(keywords[1]) in string.lower(newdata['text']): cursor.execute("""SELECT timestamp,timediff FROM programmes WHERE pid = %s ORDER BY timestamp DESC""",(pid)) progdata = cursor.fetchone() if progdata != None: # Ensure the user hasn't already tweeted the same text # Also ensure they haven't tweeted in the past 10 seconds timestamp = time2.mktime(parse(newdata['created_at']).timetuple()) cursor.execute("""SELECT * FROM rawdata WHERE (pid = %s AND text = %s AND user = %s) OR (pid = %s AND user = %s AND timestamp >= %s AND timestamp < %s)""",(pid,newdata['text'],newdata['user']['screen_name'],pid,newdata['user']['screen_name'],timestamp-10,timestamp+10)) if cursor.fetchone() == None: print ("Storing tweet for pid " + pid) # Work out where this tweet really occurred in the programme using timestamps and DVB bridge data progposition = timestamp - (progdata[0] - progdata[1]) cursor.execute("""INSERT INTO rawdata (tweet_id,pid,timestamp,text,user,programme_position) VALUES (%s,%s,%s,%s,%s,%s)""", (tweetid,pid,timestamp,newdata['text'],newdata['user']['screen_name'],progposition)) break # Break out of this loop and back to check the same tweet against the next programme else: print ("Duplicate tweet from user - ignoring") if string.lower(row[0]) in string.lower(newdata['text']): cursor.execute("""SELECT timestamp,timediff FROM programmes WHERE pid = %s ORDER BY timestamp DESC""",(pid)) progdata = cursor.fetchone() if progdata != None: # Ensure the user hasn't already tweeted the same text for this programme # Also ensure they haven't tweeted in the past 10 seconds timestamp = time2.mktime(parse(newdata['created_at']).timetuple()) cursor.execute("""SELECT * FROM rawdata WHERE (pid = %s AND text = %s AND user = %s) OR (pid = %s AND user = %s AND timestamp >= %s AND timestamp < %s)""",(pid,newdata['text'],newdata['user']['screen_name'],pid,newdata['user']['screen_name'],timestamp-10,timestamp+10)) if cursor.fetchone() == None: print ("Storing tweet for pid " + pid) # Work out where this tweet really occurred in the programme using timestamps and DVB bridge data progposition = timestamp - (progdata[0] - progdata[1]) cursor.execute("""INSERT INTO rawdata (tweet_id,pid,timestamp,text,user,programme_position) VALUES (%s,%s,%s,%s,%s,%s)""", (tweetid,pid,timestamp,newdata['text'],newdata['user']['screen_name'],progposition)) break # Break out of this loop and back to check the same tweet against the next programme else: print ("Duplicate tweet from user - ignoring") else: print "Blank line received from Twitter - no new data" print ("Done!") # new line to break up display else: time2.sleep(0.1) ''' The raw data collector differs from the plain data collector in that it stores the raw JSON containers for tweets next to their unique IDs, but with no relation to PIDs This is run concurrent to the other data collector, so the two won't necessarily run at the same rate and could be out of sync This possible lack of sync must be handled later ''' class RawDataCollector(threadedcomponent): Inboxes = { "inbox" : "Receives data in the format [tweetjson,[pid,pid]]", "control" : "" } Outboxes = { "outbox" : "", "signal" : "" } def __init__(self,dbuser,dbpass): super(RawDataCollector, self).__init__() self.dbuser = dbuser self.dbpass = dbpass def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def dbConnect(self): db = MySQLdb.connect(user=self.dbuser,passwd=self.dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def main(self): cursor = self.dbConnect() while not self.finished(): twitdata = list() # As in the data collector, create a list of all tweets currently received while self.dataReady("inbox"): data = self.recv("inbox") twitdata.append(data[0]) if len(twitdata) > 0: # Cycle through the tweets, fixing their URLs as before, and storing them if they aren't a status message for tweet in twitdata: tweet = tweet.replace("\\/","/") # This may need moving further down the line - ideally it would be handled by cjson if tweet != "\r\n": newdata = cjson.decode(tweet) if newdata.has_key('delete') or newdata.has_key('scrub_geo') or newdata.has_key('limit'): # It is assumed here that the original data collector has handled the Twitter status message print "Discarding tweet instruction - captured by other component" else: tweetid = newdata['id'] # Capture exactly when this tweet was stored tweetstamp = time() tweetsecs = int(tweetstamp) # Include the fractions of seconds portion of the timestamp in a separate field tweetfrac = tweetstamp - tweetsecs # We only have a 16000 VARCHAR field to use in MySQL (through choice) - this should be enough, but if not, the tweet will be written out to file if len(tweet) < 16000: try: cursor.execute("""INSERT INTO rawtweets (tweet_id,tweet_json,tweet_stored_seconds,tweet_stored_fraction) VALUES (%s,%s,%s,%s)""", (tweetid,tweet,tweetsecs,tweetfrac)) except _mysql_exceptions.IntegrityError, e: # Handle the possibility for Twitter having sent us a duplicate print "Duplicate tweet ID:", str(e) else: print "Discarding tweet - length limit exceeded" tweetcontents = "" homedir = os.path.expanduser("~") if os.path.exists(homedir + "/oversizedtweets.conf"): try: file = open(homedir + "/oversizedtweets.conf",'r') tweetcontents = file.read() file.close() except IOError, e: print ("Failed to load oversized tweet cache - it will be overwritten") try: file = open(homedir + "/oversizedtweets.conf",'w') tweetcontents = tweetcontents + tweet file.write(tweetcontents) file.close() except IOError, e: print ("Failed to save oversized tweet cache") else: time2.sleep(0.1)
""" ========================================================================== Illustration of prior and posterior Gaussian process for different kernels ========================================================================== This example illustrates the prior and posterior of a :class:`~sklearn.gaussian_process.GaussianProcessRegressor` with different kernels. Mean, standard deviation, and 5 samples are shown for both prior and posterior distributions. Here, we only give some illustration. To know more about kernels' formulation, refer to the :ref:`User Guide <gp_kernels>`. """ print(__doc__) import matplotlib.pyplot as plt import numpy as np def plot_gpr_samples(gpr_model, n_samples, ax): """Plot samples drawn from the Gaussian process model. If the Gaussian process model is not trained then the drawn samples are drawn from the prior distribution. Otherwise, the samples are drawn from the posterior distribution. Be aware that a sample here corresponds to a function. Parameters ---------- gpr_model : `GaussianProcessRegressor` A :class:`~sklearn.gaussian_process.GaussianProcessRegressor` model. n_samples : int The number of samples to draw from the Gaussian process distribution. ax : matplotlib axis The matplotlib axis where to plot the samples. """ x = np.linspace(0, 5, 100) X = x.reshape(-1, 1) y_mean, y_std = gpr_model.predict(X, return_std=True) y_samples = gpr_model.sample_y(X, n_samples) y_mean, y_std = gpr_model.predict(X, return_std=True) y_samples = gpr_model.sample_y(X, n_samples) for idx, single_prior in enumerate(y_samples.T): ax.plot( x, single_prior, linestyle="--", alpha=0.7, label=f"Sampled function #{idx + 1}", ) ax.plot(x, y_mean, color="black", label="Mean") ax.fill_between( x, y_mean - y_std, y_mean + y_std, alpha=0.1, color="black", label=r"$\pm$ 1 std. dev.", ) ax.set_xlabel("x") ax.set_ylabel("y") ax.set_ylim([-3, 3]) rng = np.random.RandomState(4) X_train = rng.uniform(0, 5, 10).reshape(-1, 1) y_train = np.sin((X_train[:, 0] - 2.5) ** 2) n_samples = 5 from sklearn.gaussian_process import GaussianProcessRegressor from sklearn.gaussian_process.kernels import RBF kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Radial Basis Function kernel", fontsize=18) plt.tight_layout() print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) from sklearn.gaussian_process.kernels import RationalQuadratic kernel = 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1, alpha_bounds=(1e-5, 1e15)) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Rational Quadratic kernel", fontsize=18) plt.tight_layout() print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) from sklearn.gaussian_process.kernels import ExpSineSquared kernel = 1.0 * ExpSineSquared( length_scale=1.0, periodicity=3.0, length_scale_bounds=(0.1, 10.0), periodicity_bounds=(1.0, 10.0), ) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Periodic kernel", fontsize=18) plt.tight_layout() print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) from sklearn.gaussian_process.kernels import ConstantKernel, DotProduct kernel = ConstantKernel(0.1, (0.01, 10.0)) * ( DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2 ) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Dot product kernel", fontsize=18) plt.tight_layout() print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) from sklearn.gaussian_process.kernels import Matern kernel = 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0), nu=1.5) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Mattern kernel", fontsize=18) plt.tight_layout() print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" )
test = { 'name': '', 'points': 1, 'suites': [ { 'cases': [ { 'code': r""" >>> type(imdb_by_year) == tables.Table True >>> imdb_by_year.column('Title').take(range(3)) array(['The Kid (1921)', 'The Gold Rush (1925)', 'The General (1926)'], dtype='<U75') """, 'hidden': False, 'locked': False }, ], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest' } ] }
from __future__ import unicode_literals from random import randint from .. import Provider as AddressProvider class Provider(AddressProvider): address_formats = ['{{street_address}}, {{city}}, {{postcode}}'] building_number_formats = ['#', '##', '###'] city_formats = ['{{city_prefix}} {{first_name}}'] street_address_formats = ['{{street_name}}, {{building_number}}'] street_name_formats = ['{{street_prefix}} {{last_name}}', '{{last_name}} {{street_suffix}}'] city_prefixes = ['місто', 'село', 'селище', 'хутір'] countries = [ 'Австралія', 'Австрія', 'Азербайджан', 'Албанія', 'Алжир', 'Ангола', 'Андорра', 'Антигуа і Барбуда', 'Аргентина', 'Афганістан', 'Багамські Острови', 'Бангладеш', 'Барбадос', 'Бахрейн', 'Беліз', 'Бельгія', 'Бенін', 'Білорусь', 'Болгарія', 'Болівія', 'Боснія і Герцеговина', 'Ботсвана', 'Бразилія', 'Бруней', 'Буркіна-Фасо', 'Бурунді', 'Бутан', 'Вануату', 'Ватикан', 'Велика Британія', 'Венесуела', 'В\'єтнам', 'Вірменія', 'Габон', 'Гаїті', 'Гаяна', 'Гамбія', 'Гана', 'Гватемала', 'Гвінея', 'Гвінея-Бісау', 'Гондурас', 'Гренада', 'Греція', 'Грузія', 'Данія', 'Джибуті', 'Домініка', 'Домініканська Республіка', 'Еквадор', 'Екваторіальна Гвінея', 'Еритрея', 'Естонія', 'Ефіопія', 'Єгипет', 'Ємен', 'Замбія', 'Західна Сахара', 'Зімбабве', 'Ізраїль', 'Індія', 'Індонезія', 'Ірак', 'Іран', 'Ірландія', 'Ісландія', 'Іспанія', 'Італія', 'Йорданія', 'Кабо-Верде', 'Казахстан', 'Камбоджа', 'Камерун', 'Канада', 'Катар', 'Кенія', 'Киргизстан', 'КНР', 'Кіпр', 'Кірибаті', 'Колумбія', 'Коморські Острови', 'Конго', 'ДР Конго', 'Південна Корея', 'Північна Корея', 'Косово', 'Коста-Рика', 'Кот-д\'Івуар', 'Куба', 'Кувейт', 'Лаос', 'Латвія', 'Лесото', 'Литва', 'Ліберія', 'Ліван', 'Лівія', 'Ліхтенштейн', 'Люксембург', 'Маврикій', 'Мавританія', 'Мадагаскар', 'Республіка Македонія', 'Малаві', 'Малайзія', 'Малі', 'Мальдіви', 'Мальта', 'Марокко', 'Маршаллові Острови', 'Мексика', 'Федеративні Штати Мікронезії', 'Мозамбік', 'Молдова', 'Монако', 'Монголія', 'М\'янма', 'Намібія', 'Науру', 'Непал', 'Нігер', 'Нігерія', 'Нідерланди', 'Нікарагуа', 'Німеччина', 'Нова Зеландія', 'Норвегія', 'ОАЕ', 'Оман', 'Пакистан', 'Палау', 'Палестинська держава', 'Панама', 'Папуа Нова Гвінея', 'ПАР', 'Парагвай', 'Перу', 'Південний Судан', 'Польща', 'Португалія', 'Росія', 'Руанда', 'Румунія', 'Сальвадор', 'Самоа', 'Сан-Марино', 'Сан-Томе і Принсіпі', 'Саудівська Аравія', 'Свазіленд', 'Сейшельські Острови', 'Сенегал', 'Сент-Вінсент і Гренадини', 'Сент-Кіттс і Невіс', 'Сент-Люсія', 'Сербія', 'Сінгапур', 'Сирія', 'Словаччина', 'Словенія', 'Соломонові Острови', 'Сомалі', 'Судан', 'Суринам', 'Східний Тимор', 'США', 'Сьєрра-Леоне', 'Таджикистан', 'Таїланд', 'Тайвань', 'Танзанія', 'Того', 'Тонга', 'Тринідад і Тобаго', 'Тувалу', 'Туніс', 'Туреччина', 'Туркменістан', 'Уганда', 'Угорщина', 'Узбекистан', 'Україна', 'Уругвай', 'Фіджі', 'Філіппіни', 'Фінляндія', 'Франція', 'Хорватія', 'Центральноафриканська Республіка', 'Чад', 'Чехія', 'Чилі', 'Чорногорія', 'Швейцарія', 'Швеція', 'Шрі-Ланка', 'Ямайка', 'Японія' ] street_prefixes = [ 'вулиця', 'проспект', 'майдан', 'набережна', 'бульвар', 'провулок' ] street_suffixes = ['узвіз'] @classmethod def city_prefix(cls): return cls.random_element(cls.city_prefixes) @classmethod def postcode(cls): """The code consists of five digits (01000-99999)""" return '{}{}'.format(randint(0, 10), randint(1000, 10000)) @classmethod def street_prefix(cls): return cls.random_element(cls.street_prefixes)
from oscar_vat_moss import fields from oscar.apps.address.abstract_models import AbstractShippingAddress from oscar.apps.address.abstract_models import AbstractBillingAddress class ShippingAddress(AbstractShippingAddress): vatin = fields.vatin() class BillingAddress(AbstractBillingAddress): vatin = fields.vatin() from oscar.apps.order.models import * # noqa
from __future__ import absolute_import from digits.utils import subclass, override, constants from digits.extensions.data.interface import DataIngestionInterface from .forms import DatasetForm, InferenceForm import numpy as np import os TEMPLATE = "templates/template.html" INFERENCE_TEMPLATE = "templates/inference_template.html" @subclass class DataIngestion(DataIngestionInterface): """ A data ingestion extension for an image gradient dataset """ def __init__(self, is_inference_db=False, **kwargs): super(DataIngestion, self).__init__(**kwargs) self.userdata['is_inference_db'] = is_inference_db # Used to calculate the gradients later self.yy, self.xx = np.mgrid[:self.image_height, :self.image_width].astype('float') @override def encode_entry(self, entry): xslope, yslope = entry label = np.array([xslope, yslope]) a = xslope * 255 / self.image_width b = yslope * 255 / self.image_height image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5 image = image.astype('uint8') # convert to 3D tensors image = image[np.newaxis, ...] label = label[np.newaxis, np.newaxis, ...] return image, label @staticmethod @override def get_category(): return "Images" @staticmethod @override def get_id(): return "image-gradients" @staticmethod @override def get_dataset_form(): return DatasetForm() @staticmethod @override def get_dataset_template(form): """ parameters: - form: form returned by get_dataset_form(). This may be populated with values if the job was cloned return: - (template, context) tuple - template is a Jinja template to use for rendering dataset creation options - context is a dictionary of context variables to use for rendering the form """ extension_dir = os.path.dirname(os.path.abspath(__file__)) template = open(os.path.join(extension_dir, TEMPLATE), "r").read() context = {'form': form} return (template, context) @override def get_inference_form(self): return InferenceForm() @staticmethod @override def get_inference_template(form): extension_dir = os.path.dirname(os.path.abspath(__file__)) template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read() context = {'form': form} return (template, context) @staticmethod @override def get_title(): return "Gradients" @override def itemize_entries(self, stage): count = 0 if self.userdata['is_inference_db']: if stage == constants.TEST_DB: if self.test_image_count: count = self.test_image_count else: return [(self.gradient_x, self.gradient_y)] else: if stage == constants.TRAIN_DB: count = self.train_image_count elif stage == constants.VAL_DB: count = self.val_image_count elif stage == constants.TEST_DB: count = self.test_image_count return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []
import datetime import sys import time from typing import Any, Union from pyshark.packet.fields import LayerFieldsContainer, LayerField from pyshark.packet.packet import Packet as RawPacket from pktverify.addrs import EthAddr, ExtAddr, Ipv6Addr from pktverify.bytes import Bytes from pktverify.consts import VALID_LAYER_NAMES from pktverify.null_field import nullField def _auto(v: Union[LayerFieldsContainer, LayerField]): """parse the layer field automatically according to its format""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 or v.get_default_value() is not None, v.fields dv = v.get_default_value() rv = v.raw_value if dv.startswith('0x'): return int(dv, 16) try: if dv == rv: return int(dv) elif int(dv) == int(rv, 16): return int(dv) except (ValueError, TypeError): pass if rv is None: try: return int(dv) except (ValueError, TypeError): pass if ':' in dv and '::' not in dv and dv.replace(':', '') == rv: # '88:00', '8800' return int(rv, 16) # timestamp: 'Jan 1, 1970 08:00:00.000000000 CST', '0000000000000000' # convert to seconds from 1970, ignore the nanosecond for now since # there are integer seconds applied in the test cases try: time_str = datetime.datetime.strptime(dv, "%b %d, %Y %H:%M:%S.%f000 %Z") time_in_sec = time.mktime(time_str.utctimetuple()) return int(time_in_sec) except (ValueError, TypeError): pass try: int(rv, 16) return int(dv) except Exception: pass raise ValueError((v, v.get_default_value(), v.raw_value)) def _payload(v: Union[LayerFieldsContainer, LayerField]) -> bytearray: """parse the layer field as a bytearray""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 hex_value = v.raw_value assert len(hex_value) % 2 == 0 s = bytearray() for i in range(0, len(hex_value), 2): s.append(int(hex_value[i:i + 2], 16)) return s def _hex(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return int(v.get_default_value(), 16) def _raw_hex(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a raw hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 iv = v.hex_value try: int(v.get_default_value()) assert int(v.get_default_value()) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass try: int(v.get_default_value(), 16) assert int(v.get_default_value(), 16) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass return iv def _raw_hex_rev(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a reversed raw hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 rv = v.raw_value octets = [rv[i:i + 2] for i in range(0, len(rv), 2)] iv = int(''.join(reversed(octets)), 16) try: int(v.get_default_value()) assert int(v.get_default_value()) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass try: int(v.get_default_value(), 16) assert int(v.get_default_value(), 16) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass return iv def _dec(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a decimal""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return int(v.get_default_value()) def _float(v: Union[LayerFieldsContainer, LayerField]) -> float: """parse the layer field as a float""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return float(v.get_default_value()) def _str(v: Union[LayerFieldsContainer, LayerField]) -> str: """parse the layer field as a string""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return str(v.get_default_value()) def _bytes(v: Union[LayerFieldsContainer, LayerField]) -> Bytes: """parse the layer field as raw bytes""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return Bytes(v.raw_value) def _ext_addr(v: Union[LayerFieldsContainer, LayerField]) -> ExtAddr: """parse the layer field as an extended address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return ExtAddr(v.get_default_value()) def _ipv6_addr(v: Union[LayerFieldsContainer, LayerField]) -> Ipv6Addr: """parse the layer field as an IPv6 address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return Ipv6Addr(v.get_default_value()) def _eth_addr(v: Union[LayerFieldsContainer, LayerField]) -> EthAddr: """parse the layer field as an Ethernet MAC address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1, v.fields return EthAddr(v.get_default_value()) def _routerid_set(v: Union[LayerFieldsContainer, LayerField]) -> set: """parse the layer field as a set of router ids Notes: the router ID mask in wireshark is a hexadecimal string separated by ':' """ assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 try: ridmask = str(v.get_default_value()) assert isinstance(ridmask, str), ridmask ridmask_int = int(ridmask.replace(':', ''), base=16) rid_set = set() count = 0 while ridmask_int: count += 1 if ridmask_int & 1: rid_set.add(64 - count) ridmask_int = ridmask_int >> 1 except ValueError: pass return rid_set class _first(object): """parse the first layer field""" def __init__(self, sub_parse): self._sub_parse = sub_parse def __call__(self, v: Union[LayerFieldsContainer, LayerField]): return self._sub_parse(v.fields[0]) class _list(object): """parse all layer fields into a list""" def __init__(self, sub_parse): self._sub_parse = sub_parse def __call__(self, v: Union[LayerFieldsContainer, LayerField]): return [self._sub_parse(f) for f in v.fields] _LAYER_FIELDS = { # WPAN 'wpan.fcf': _raw_hex_rev, 'wpan.cmd': _auto, 'wpan.security': _auto, 'wpan.frame_type': _auto, 'wpan.pending': _auto, 'wpan.ack_request': _auto, 'wpan.pan_id_compression': _auto, 'wpan.seqno_suppression': _auto, 'wpan.ie_present': _auto, 'wpan.dst_addr_mode': _auto, 'wpan.version': _auto, 'wpan.src_addr_mode': _auto, 'wpan.dst_pan': _auto, 'wpan.seq_no': _auto, 'wpan.src16': _auto, 'wpan.dst16': _auto, 'wpan.src64': _ext_addr, 'wpan.dst64': _ext_addr, 'wpan.fcs': _raw_hex_rev, 'wpan.fcs_ok': _auto, 'wpan.frame_length': _dec, 'wpan.key_number': _auto, 'wpan.aux_sec.sec_suite': _auto, 'wpan.aux_sec.security_control_field': _auto, 'wpan.aux_sec.sec_level': _auto, 'wpan.aux_sec.key_id_mode': _auto, 'wpan.aux_sec.frame_counter_suppression': _auto, 'wpan.aux_sec.asn_in_nonce': _auto, 'wpan.aux_sec.reserved': _auto, 'wpan.aux_sec.frame_counter': _auto, 'wpan.aux_sec.key_source': _auto, 'wpan.aux_sec.key_index': _auto, 'wpan.aux_sec.hdr': _str, 'wpan.mic': _auto, 'wpan.channel': _auto, 'wpan.header_ie.id': _list(_auto), 'wpan.header_ie.csl.period': _auto, 'wpan.payload_ie.vendor.oui': _auto, # MLE 'mle.cmd': _auto, 'mle.sec_suite': _hex, 'mle.tlv.type': _list(_dec), 'mle.tlv.len': _list(_dec), 'mle.tlv.mode.receiver_on_idle': _auto, 'mle.tlv.mode.reserved1': _auto, 'mle.tlv.mode.reserved2': _auto, 'mle.tlv.mode.device_type_bit': _auto, 'mle.tlv.mode.network_data': _auto, 'mle.tlv.challenge': _bytes, 'mle.tlv.scan_mask.r': _auto, 'mle.tlv.scan_mask.e': _auto, 'mle.tlv.version': _auto, 'mle.tlv.source_addr': _auto, 'mle.tlv.active_tstamp': _auto, 'mle.tlv.pending_tstamp': _auto, 'mle.tlv.leader_data.partition_id': _auto, 'mle.tlv.leader_data.weighting': _auto, 'mle.tlv.leader_data.data_version': _auto, 'mle.tlv.leader_data.stable_data_version': _auto, 'mle.tlv.leader_data.router_id': _auto, 'mle.tlv.route64.nbr_out': _list(_auto), 'mle.tlv.route64.nbr_in': _list(_auto), 'mle.tlv.route64.id_seq': _auto, 'mle.tlv.route64.id_mask': _routerid_set, 'mle.tlv.route64.cost': _list(_auto), 'mle.tlv.response': _bytes, 'mle.tlv.mle_frm_cntr': _auto, 'mle.tlv.ll_frm_cntr': _auto, 'mle.tlv.link_margin': _auto, 'mle.tlv.conn.sed_dgram_cnt': _auto, 'mle.tlv.conn.sed_buf_size': _auto, 'mle.tlv.conn.lq3': _auto, 'mle.tlv.conn.lq2': _auto, 'mle.tlv.conn.lq1': _auto, 'mle.tlv.conn.leader_cost': _auto, 'mle.tlv.conn.id_seq': _auto, 'mle.tlv.conn.flags.pp': _auto, 'mle.tlv.conn.active_rtrs': _auto, 'mle.tlv.timeout': _auto, 'mle.tlv.addr16': _auto, 'mle.tlv.channel': _auto, 'mle.tlv.addr_reg_iid': _list(_auto), 'mle.tlv.link_enh_ack_flags': _auto, 'mle.tlv.link_forward_series': _list(_auto), 'mle.tlv.link_requested_type_id_flags': _list(_hex), 'mle.tlv.link_sub_tlv': _auto, 'mle.tlv.link_status_sub_tlv': _auto, 'mle.tlv.query_id': _auto, 'mle.tlv.metric_type_id_flags.type': _list(_hex), 'mle.tlv.metric_type_id_flags.metric': _list(_hex), 'mle.tlv.metric_type_id_flags.l': _list(_hex), 'mle.tlv.link_requested_type_id_flags': _bytes, # IP 'ip.version': _auto, 'ip.src': _str, 'ip.src_host': _str, 'ip.dst': _str, 'ip.dst_host': _str, 'ip.ttl': _auto, 'ip.proto': _auto, 'ip.len': _auto, 'ip.id': _auto, 'ip.host': _list(_str), 'ip.hdr_len': _dec, 'ip.frag_offset': _auto, 'ip.flags.rb': _auto, 'ip.flags.mf': _auto, 'ip.flags.df': _auto, 'ip.dsfield.ecn': _auto, 'ip.dsfield.dscp': _auto, 'ip.checksum.status': _auto, 'ip.addr': _list(_str), 'ip.options.routeralert': _bytes, 'ip.opt.type.number': _auto, 'ip.opt.type.copy': _auto, 'ip.opt.type.class': _auto, 'ip.opt.ra': _auto, 'ip.opt.len': _auto, # UDP 'udp.stream': _auto, 'udp.srcport': _auto, 'udp.dstport': _auto, 'udp.length': _auto, 'udp.port': _list(_dec), 'udp.checksum.status': _auto, # IPv6 'ipv6.version': _auto, 'ipv6.src': _ipv6_addr, 'ipv6.src_host': _ipv6_addr, 'ipv6.dst': _ipv6_addr, 'ipv6.dst_host': _ipv6_addr, 'ipv6.addr': _list(_ipv6_addr), 'ipv6.tclass.dscp': _auto, 'ipv6.tclass.ecn': _auto, 'ipv6.flow': _auto, 'ipv6.hlim': _auto, 'ipv6.nxt': _auto, 'ipv6.hopopts.len': _auto, 'ipv6.hopopts.nxt': _auto, 'ipv6.hopopts.len_oct': _dec, 'ipv6.host': _list(_ipv6_addr), 'ipv6.plen': _auto, 'ipv6.opt.type.rest': _list(_auto), 'ipv6.opt.type.change': _list(_auto), 'ipv6.opt.type.action': _list(_auto), 'ipv6.opt.router_alert': _auto, 'ipv6.opt.padn': _str, 'ipv6.opt.length': _list(_auto), 'ipv6.opt.mpl.seed_id': _bytes, 'ipv6.opt.mpl.sequence': _auto, 'ipv6.opt.mpl.flag.v': _auto, 'ipv6.opt.mpl.flag.s': _auto, 'ipv6.opt.mpl.flag.rsv': _auto, 'ipv6.opt.mpl.flag.m': _auto, # Eth 'eth.src': _eth_addr, 'eth.src_resolved': _eth_addr, 'eth.dst': _eth_addr, 'eth.dst_resolved': _eth_addr, 'eth.type': _auto, 'eth.addr': _list(_eth_addr), 'eth.addr_resolved': _list(_eth_addr), 'eth.ig': _list(_auto), 'eth.lg': _list(_auto), # 6LOWPAN '6lowpan.src': _ipv6_addr, '6lowpan.dst': _ipv6_addr, '6lowpan.udp.src': _auto, '6lowpan.udp.dst': _auto, '6lowpan.udp.checksum': _auto, '6lowpan.frag.offset': _auto, '6lowpan.frag.tag': _auto, '6lowpan.frag.size': _auto, '6lowpan.pattern': _list(_auto), '6lowpan.hops': _auto, '6lowpan.padding': _auto, '6lowpan.next': _auto, '6lowpan.flow': _auto, '6lowpan.ecn': _auto, '6lowpan.iphc.tf': _auto, '6lowpan.iphc.m': _auto, '6lowpan.iphc.nh': _auto, '6lowpan.iphc.hlim': _auto, '6lowpan.iphc.cid': _auto, '6lowpan.iphc.sac': _auto, '6lowpan.iphc.sam': _auto, '6lowpan.iphc.dac': _auto, '6lowpan.iphc.dam': _auto, '6lowpan.iphc.sci': _auto, '6lowpan.iphc.dci': _auto, '6lowpan.iphc.sctx.prefix': _ipv6_addr, '6lowpan.iphc.dctx.prefix': _ipv6_addr, '6lowpan.mesh.v': _auto, '6lowpan.nhc.pattern': _list(_auto), '6lowpan.nhc.udp.checksum': _auto, '6lowpan.nhc.udp.ports': _auto, '6lowpan.nhc.ext.nh': _auto, '6lowpan.nhc.ext.length': _auto, '6lowpan.nhc.ext.eid': _auto, '6lowpan.reassembled.length': _auto, '6lowpan.fragments': _str, '6lowpan.fragment.count': _auto, '6lowpan.mesh.orig16': _auto, '6lowpan.mesh.hops8': _auto, '6lowpan.mesh.hops': _auto, '6lowpan.mesh.f': _auto, '6lowpan.mesh.dest16': _auto, # ICMPv6 'icmpv6.type': _first(_auto), 'icmpv6.code': _first(_auto), 'icmpv6.checksum': _first(_auto), 'icmpv6.reserved': _raw_hex, 'icmpv6.resptime': _float, 'icmpv6.resp_to': _auto, 'icmpv6.mldr.nb_mcast_records': _auto, 'icmpv6.nd.ra.cur_hop_limit': _auto, 'icmpv6.nd.ns.target_address': _ipv6_addr, 'icmpv6.nd.na.target_address': _ipv6_addr, 'icmpv6.nd.na.flag.s': _auto, 'icmpv6.nd.na.flag.o': _auto, 'icmpv6.nd.na.flag.r': _auto, 'icmpv6.nd.na.flag.rsv': _auto, 'icmpv6.mldr.mar.record_type': _list(_auto), 'icmpv6.mldr.mar.aux_data_len': _list(_auto), 'icmpv6.mldr.mar.nb_sources': _list(_auto), 'icmpv6.mldr.mar.multicast_address': _list(_ipv6_addr), 'icmpv6.opt.type': _list(_auto), 'icmpv6.opt.nonce': _bytes, 'icmpv6.opt.linkaddr': _eth_addr, 'icmpv6.opt.src_linkaddr': _eth_addr, 'icmpv6.opt.target_linkaddr': _eth_addr, 'icmpv6.opt.route_lifetime': _auto, 'icmpv6.opt.route_info.flag.route_preference': _auto, 'icmpv6.opt.route_info.flag.reserved': _auto, 'icmpv6.opt.prefix.valid_lifetime': _auto, 'icmpv6.opt.prefix.preferred_lifetime': _auto, 'icmpv6.opt.prefix.length': _list(_auto), 'icmpv6.opt.prefix.flag.reserved': _auto, 'icmpv6.opt.prefix.flag.r': _auto, 'icmpv6.opt.prefix.flag.l': _auto, 'icmpv6.opt.prefix.flag.a': _auto, 'icmpv6.opt.length': _list(_auto), 'icmpv6.opt.reserved': _str, 'icmpv6.nd.ra.router_lifetime': _auto, 'icmpv6.nd.ra.retrans_timer': _auto, 'icmpv6.nd.ra.reachable_time': _auto, 'icmpv6.nd.ra.flag.rsv': _auto, 'icmpv6.nd.ra.flag.prf': _auto, 'icmpv6.nd.ra.flag.p': _auto, 'icmpv6.nd.ra.flag.o': _auto, 'icmpv6.nd.ra.flag.m': _auto, 'icmpv6.nd.ra.flag.h': _auto, 'icmpv6.echo.sequence_number': _auto, 'icmpv6.echo.identifier': _auto, 'icmpv6.data.len': _auto, # COAP 'coap.code': _auto, 'coap.version': _auto, 'coap.type': _auto, 'coap.mid': _auto, 'coap.token_len': _auto, 'coap.token': _auto, 'coap.opt.uri_path': _list(_str), 'coap.opt.name': _list(_str), 'coap.opt.length': _list(_auto), 'coap.opt.uri_path_recon': _str, 'coap.payload': _payload, 'coap.payload_length': _auto, 'coap.payload_desc': _str, 'coap.opt.end_marker': _auto, 'coap.opt.desc': _list(_str), 'coap.opt.delta': _list(_auto), 'coap.response_to': _auto, 'coap.response_time': _float, # COAP TLVS 'coap.tlv.type': _list(_auto), 'coap.tlv.status': _auto, 'coap.tlv.target_eid': _ipv6_addr, 'coap.tlv.ml_eid': _ext_addr, 'coap.tlv.last_transaction_time': _auto, 'coap.tlv.rloc16': _auto, 'coap.tlv.net_name': _str, 'coap.tlv.ext_mac_addr': _ext_addr, 'coap.tlv.router_mask_assigned': _auto, 'coap.tlv.router_mask_id_seq': _auto, # dtls 'dtls.handshake.type': _list(_auto), 'dtls.handshake.cookie': _auto, 'dtls.record.content_type': _list(_auto), 'dtls.alert_message.desc': _auto, # thread beacon 'thread_bcn.protocol': _auto, 'thread_bcn.version': _auto, 'thread_bcn.network_name': _str, 'thread_bcn.epid': _ext_addr, # thread_address 'thread_address.tlv.len': _list(_auto), 'thread_address.tlv.type': _list(_auto), 'thread_address.tlv.status': _auto, 'thread_address.tlv.target_eid': _ipv6_addr, 'thread_address.tlv.ext_mac_addr': _ext_addr, 'thread_address.tlv.router_mask_id_seq': _auto, 'thread_address.tlv.router_mask_assigned': _bytes, 'thread_address.tlv.rloc16': _hex, 'thread_address.tlv.target_eid': _ipv6_addr, 'thread_address.tlv.ml_eid': _ext_addr, # thread bl 'thread_bl.tlv.type': _list(_auto), 'thread_bl.tlv.len': _list(_auto), 'thread_bl.tlv.target_eid': _ipv6_addr, 'thread_bl.tlv.ml_eid': _ext_addr, 'thread_bl.tlv.last_transaction_time': _auto, 'thread_bl.tlv.timeout': _auto, # THEAD NM 'thread_nm.tlv.type': _list(_auto), 'thread_nm.tlv.ml_eid': _ext_addr, 'thread_nm.tlv.target_eid': _ipv6_addr, 'thread_nm.tlv.status': _auto, 'thread_nm.tlv.timeout': _auto, # thread_meshcop is not a real layer 'thread_meshcop.len_size_mismatch': _str, 'thread_meshcop.tlv.type': _list(_auto), 'thread_meshcop.tlv.len8': _list(_auto), 'thread_meshcop.tlv.net_name': _list(_str), # from thread_bl 'thread_meshcop.tlv.commissioner_id': _str, 'thread_meshcop.tlv.commissioner_sess_id': _auto, # from mle "thread_meshcop.tlv.channel_page": _auto, # from ble "thread_meshcop.tlv.channel": _list(_auto), # from ble "thread_meshcop.tlv.chan_mask": _str, # from ble 'thread_meshcop.tlv.chan_mask_page': _auto, 'thread_meshcop.tlv.chan_mask_len': _auto, 'thread_meshcop.tlv.chan_mask_mask': _bytes, 'thread_meshcop.tlv.discovery_req_ver': _auto, 'thread_meshcop.tlv.discovery_rsp_ver': _auto, 'thread_meshcop.tlv.discovery_rsp_n': _auto, 'thread_meshcop.tlv.energy_list': _list(_auto), 'thread_meshcop.tlv.pan_id': _list(_auto), 'thread_meshcop.tlv.xpan_id': _bytes, 'thread_meshcop.tlv.ml_prefix': _bytes, 'thread_meshcop.tlv.master_key': _bytes, 'thread_meshcop.tlv.pskc': _bytes, 'thread_meshcop.tlv.sec_policy_rot': _auto, 'thread_meshcop.tlv.sec_policy_o': _auto, 'thread_meshcop.tlv.sec_policy_n': _auto, 'thread_meshcop.tlv.sec_policy_r': _auto, 'thread_meshcop.tlv.sec_policy_c': _auto, 'thread_meshcop.tlv.sec_policy_b': _auto, 'thread_meshcop.tlv.state': _auto, 'thread_meshcop.tlv.steering_data': _bytes, 'thread_meshcop.tlv.unknown': _bytes, 'thread_meshcop.tlv.udp_port': _list(_auto), 'thread_meshcop.tlv.ba_locator': _auto, 'thread_meshcop.tlv.jr_locator': _auto, 'thread_meshcop.tlv.active_tstamp': _auto, 'thread_meshcop.tlv.pending_tstamp': _auto, 'thread_meshcop.tlv.delay_timer': _auto, 'thread_meshcop.tlv.ipv6_addr': _list(_ipv6_addr), # THREAD NWD 'thread_nwd.tlv.type': _list(_auto), 'thread_nwd.tlv.len': _list(_auto), 'thread_nwd.tlv.stable': _list(_auto), 'thread_nwd.tlv.service.t': _auto, 'thread_nwd.tlv.service.s_id': _auto, 'thread_nwd.tlv.service.s_data_len': _auto, 'thread_nwd.tlv.service.s_data.seqno': _auto, 'thread_nwd.tlv.service.s_data.rrdelay': _auto, 'thread_nwd.tlv.service.s_data.mlrtimeout': _auto, 'thread_nwd.tlv.server_16': _list(_auto), 'thread_nwd.tlv.border_router_16': _list(_auto), 'thread_nwd.tlv.sub_tlvs': _list(_str), # TODO: support thread_nwd.tlv.prefix.length and thread_nwd.tlv.prefix.domain_id 'thread_nwd.tlv.prefix': _list(_ipv6_addr), 'thread_nwd.tlv.border_router.pref': _auto, 'thread_nwd.tlv.border_router.flag.s': _list(_auto), 'thread_nwd.tlv.border_router.flag.r': _list(_auto), 'thread_nwd.tlv.border_router.flag.p': _list(_auto), 'thread_nwd.tlv.border_router.flag.o': _list(_auto), 'thread_nwd.tlv.border_router.flag.n': _list(_auto), 'thread_nwd.tlv.border_router.flag.dp': _list(_auto), 'thread_nwd.tlv.border_router.flag.d': _list(_auto), 'thread_nwd.tlv.border_router.flag.c': _list(_auto), 'thread_nwd.tlv.6co.flag.reserved': _auto, 'thread_nwd.tlv.6co.flag.cid': _auto, 'thread_nwd.tlv.6co.flag.c': _list(_auto), 'thread_nwd.tlv.6co.context_length': _auto, # Thread Diagnostic 'thread_diagnostic.tlv.type': _list(_auto), 'thread_diagnostic.tlv.len8': _list(_auto), 'thread_diagnostic.tlv.general': _list(_str), # DNS 'dns.resp.ttl': _auto, 'dns.flags.response': _auto, } _layer_containers = set() for key in _LAYER_FIELDS: assert key.strip() == key and ' ' not in key, key secs = key.split('.') assert len(secs) >= 2 assert secs[0] in VALID_LAYER_NAMES, secs[0] for i in range(len(secs) - 2): path = secs[0] + '.' + '.'.join(secs[1:i + 2]) assert path not in _LAYER_FIELDS, '%s can not be both field and path' % path _layer_containers.add(path) def is_layer_field(uri: str) -> bool: """ Returns if the URI is a valid layer field. :param uri: The layer field URI. """ return uri in _LAYER_FIELDS def is_layer_field_container(uri: str) -> bool: """ Returns if the URI is a valid layer field container. :param uri: The layer field container URI. """ return uri in _layer_containers def get_layer_field(packet: RawPacket, field_uri: str) -> Any: """ Get a given layer field from the packet. :param packet: The packet. :param field_uri: The layer field URI. :return: The specified layer field. """ assert isinstance(packet, RawPacket) secs = field_uri.split('.') layer_depth = 0 layer_name = secs[0] if layer_name.endswith('inner'): layer_name = layer_name[:-len('inner')] field_uri = '.'.join([layer_name] + secs[1:]) layer_depth = 1 if is_layer_field(field_uri): candidate_layers = _get_candidate_layers(packet, layer_name) for layers in candidate_layers: if layer_depth >= len(layers): continue layer = layers[layer_depth] v = layer.get_field(field_uri) if v is not None: try: v = _LAYER_FIELDS[field_uri](v) print("[%s = %r] " % (field_uri, v), file=sys.stderr) return v except Exception as ex: raise ValueError('can not parse field %s = %r' % (field_uri, (v.get_default_value(), v.raw_value))) from ex print("[%s = %s] " % (field_uri, "null"), file=sys.stderr) return nullField elif is_layer_field_container(field_uri): from pktverify.layer_fields_container import LayerFieldsContainer return LayerFieldsContainer(packet, field_uri) else: raise NotImplementedError('Field %s is not valid, please add it to `_LAYER_FIELDS`' % field_uri) def check_layer_field_exists(packet, field_uri): """ Check if a given layer field URI exists in the packet. :param packet: The packet to check. :param field_uri: The layer field URI. :return: Whether the layer field URI exists in the packet. """ assert isinstance(packet, RawPacket) secs = field_uri.split('.') layer_name = secs[0] if not is_layer_field(field_uri) and not is_layer_field_container(field_uri): raise NotImplementedError('%s is neither a field or field container' % field_uri) candidate_layers = _get_candidate_layers(packet, layer_name) for layers in candidate_layers: for layer in layers: for k, v in layer._all_fields.items(): if k == field_uri or k.startswith(field_uri + '.'): return True return False def _get_candidate_layers(packet, layer_name): if layer_name == 'thread_meshcop': candidate_layer_names = ['thread_meshcop', 'mle', 'coap', 'thread_bl', 'thread_nm'] elif layer_name == 'thread_nwd': candidate_layer_names = ['mle', 'thread_address', 'thread_diagnostic'] elif layer_name == 'wpan': candidate_layer_names = ['wpan', 'mle'] elif layer_name == 'ip': candidate_layer_names = ['ip', 'ipv6'] elif layer_name == 'thread_bcn': candidate_layer_names = ['thread_bcn'] else: candidate_layer_names = [layer_name] layers = [] for ln in candidate_layer_names: if hasattr(packet, ln): layers.append(packet.get_multiple_layers(ln)) return layers
from __future__ import unicode_literals import warnings from django.core.checks import Error, Warning as DjangoWarning from django.db import models from django.db.models.fields.related import ForeignObject from django.test import ignore_warnings from django.test.testcases import SimpleTestCase, skipIfDBFeature from django.test.utils import isolate_apps, override_settings from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.version import get_docs_version @isolate_apps('invalid_models_tests') class RelativeFieldTests(SimpleTestCase): def test_valid_foreign_key_without_accessor(self): class Target(models.Model): # There would be a clash if Model.field installed an accessor. model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, related_name='+') field = Model._meta.get_field('field') errors = field.check() self.assertEqual(errors, []) @ignore_warnings(category=RemovedInDjango20Warning) def test_valid_foreign_key_without_on_delete(self): class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, related_name='+') def test_foreign_key_without_on_delete_warning(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, related_name='+') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), 'on_delete will be a required arg for ForeignKey in Django ' '2.0. Set it to models.CASCADE on models and in existing ' 'migrations if you want to maintain the current default ' 'behavior. See https://docs.djangoproject.com/en/%s/ref/models/fields/' '#django.db.models.ForeignKey.on_delete' % get_docs_version(), ) def test_foreign_key_to_field_as_arg(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, 'id') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), "The signature for ForeignKey will change in Django 2.0. " "Pass to_field='id' as a kwarg instead of as an arg." ) def test_one_to_one_field_without_on_delete_warning(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.OneToOneField(Target, related_name='+') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), 'on_delete will be a required arg for OneToOneField in Django ' '2.0. Set it to models.CASCADE on models and in existing ' 'migrations if you want to maintain the current default ' 'behavior. See https://docs.djangoproject.com/en/%s/ref/models/fields/' '#django.db.models.ForeignKey.on_delete' % get_docs_version(), ) def test_one_to_one_field_to_field_as_arg(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.OneToOneField(Target, 'id') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), "The signature for OneToOneField will change in Django 2.0. " "Pass to_field='id' as a kwarg instead of as an arg." ) def test_foreign_key_to_missing_model(self): # Model names are resolved when a model is being created, so we cannot # test relative fields in isolation and we need to attach them to a # model. class Model(models.Model): foreign_key = models.ForeignKey('Rel1', models.CASCADE) field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( ("Field defines a relation with model 'Rel1', " "which is either not installed, or is abstract."), hint=None, obj=field, id='fields.E300', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') def test_foreign_key_to_isolate_apps_model(self): """ #25723 - Referenced model registration lookup should be run against the field's model registry. """ class OtherModel(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey('OtherModel', models.CASCADE) field = Model._meta.get_field('foreign_key') self.assertEqual(field.check(from_model=Model), []) def test_many_to_many_to_missing_model(self): class Model(models.Model): m2m = models.ManyToManyField("Rel2") field = Model._meta.get_field('m2m') errors = field.check(from_model=Model) expected = [ Error( ("Field defines a relation with model 'Rel2', " "which is either not installed, or is abstract."), hint=None, obj=field, id='fields.E300', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') def test_many_to_many_to_isolate_apps_model(self): """ #25723 - Referenced model registration lookup should be run against the field's model registry. """ class OtherModel(models.Model): pass class Model(models.Model): m2m = models.ManyToManyField('OtherModel') field = Model._meta.get_field('m2m') self.assertEqual(field.check(from_model=Model), []) def test_many_to_many_with_useless_options(self): class Model(models.Model): name = models.CharField(max_length=20) class ModelM2M(models.Model): m2m = models.ManyToManyField(Model, null=True, validators=['']) errors = ModelM2M.check() field = ModelM2M._meta.get_field('m2m') expected = [ DjangoWarning( 'null has no effect on ManyToManyField.', hint=None, obj=field, id='fields.W340', ) ] expected.append( DjangoWarning( 'ManyToManyField does not support validators.', hint=None, obj=field, id='fields.W341', ) ) self.assertEqual(errors, expected) def test_ambiguous_relationship_model(self): class Person(models.Model): pass class Group(models.Model): field = models.ManyToManyField('Person', through="AmbiguousRelationship", related_name='tertiary') class AmbiguousRelationship(models.Model): # Too much foreign keys to Person. first_person = models.ForeignKey(Person, models.CASCADE, related_name="first") second_person = models.ForeignKey(Person, models.CASCADE, related_name="second") second_model = models.ForeignKey(Group, models.CASCADE) field = Group._meta.get_field('field') errors = field.check(from_model=Group) expected = [ Error( ("The model is used as an intermediate model by " "'invalid_models_tests.Group.field', but it has more than one " "foreign key to 'Person', which is ambiguous. You must specify " "which foreign key Django should use via the through_fields " "keyword argument."), hint=('If you want to create a recursive relationship, use ' 'ForeignKey("self", symmetrical=False, ' 'through="AmbiguousRelationship").'), obj=field, id='fields.E335', ), ] self.assertEqual(errors, expected) def test_relationship_model_with_foreign_key_to_wrong_model(self): class WrongModel(models.Model): pass class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through="InvalidRelationship") class InvalidRelationship(models.Model): person = models.ForeignKey(Person, models.CASCADE) wrong_foreign_key = models.ForeignKey(WrongModel, models.CASCADE) # The last foreign key should point to Group model. field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( ("The model is used as an intermediate model by " "'invalid_models_tests.Group.members', but it does not " "have a foreign key to 'Group' or 'Person'."), hint=None, obj=InvalidRelationship, id='fields.E336', ), ] self.assertEqual(errors, expected) def test_relationship_model_missing_foreign_key(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through="InvalidRelationship") class InvalidRelationship(models.Model): group = models.ForeignKey(Group, models.CASCADE) # No foreign key to Person field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( ("The model is used as an intermediate model by " "'invalid_models_tests.Group.members', but it does not have " "a foreign key to 'Group' or 'Person'."), hint=None, obj=InvalidRelationship, id='fields.E336', ), ] self.assertEqual(errors, expected) def test_missing_relationship_model(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through="MissingM2MModel") field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( ("Field specifies a many-to-many relation through model " "'MissingM2MModel', which has not been installed."), hint=None, obj=field, id='fields.E331', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') def test_many_to_many_through_isolate_apps_model(self): """ #25723 - Through model registration lookup should be run against the field's model registry. """ class GroupMember(models.Model): person = models.ForeignKey('Person', models.CASCADE) group = models.ForeignKey('Group', models.CASCADE) class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through='GroupMember') field = Group._meta.get_field('members') self.assertEqual(field.check(from_model=Group), []) def test_symmetrical_self_referential_field(self): class Person(models.Model): # Implicit symmetrical=False. friends = models.ManyToManyField('self', through="Relationship") class Relationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( 'Many-to-many fields with intermediate tables must not be symmetrical.', hint=None, obj=field, id='fields.E332', ), ] self.assertEqual(errors, expected) def test_too_many_foreign_keys_in_self_referential_model(self): class Person(models.Model): friends = models.ManyToManyField('self', through="InvalidRelationship", symmetrical=False) class InvalidRelationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set_2") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set_2") third = models.ForeignKey(Person, models.CASCADE, related_name="too_many_by_far") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( ("The model is used as an intermediate model by " "'invalid_models_tests.Person.friends', but it has more than two " "foreign keys to 'Person', which is ambiguous. You must specify " "which two foreign keys Django should use via the through_fields " "keyword argument."), hint='Use through_fields to specify which two foreign keys Django should use.', obj=InvalidRelationship, id='fields.E333', ), ] self.assertEqual(errors, expected) def test_symmetric_self_reference_with_intermediate_table(self): class Person(models.Model): # Explicit symmetrical=True. friends = models.ManyToManyField('self', through="Relationship", symmetrical=True) class Relationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( 'Many-to-many fields with intermediate tables must not be symmetrical.', hint=None, obj=field, id='fields.E332', ), ] self.assertEqual(errors, expected) def test_symmetric_self_reference_with_intermediate_table_and_through_fields(self): """ Using through_fields in a m2m with an intermediate model shouldn't mask its incompatibility with symmetry. """ class Person(models.Model): # Explicit symmetrical=True. friends = models.ManyToManyField('self', symmetrical=True, through="Relationship", through_fields=('first', 'second')) class Relationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set") referee = models.ForeignKey(Person, models.CASCADE, related_name="referred") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( 'Many-to-many fields with intermediate tables must not be symmetrical.', hint=None, obj=field, id='fields.E332', ), ] self.assertEqual(errors, expected) def test_foreign_key_to_abstract_model(self): class AbstractModel(models.Model): class Meta: abstract = True class Model(models.Model): rel_string_foreign_key = models.ForeignKey('AbstractModel', models.CASCADE) rel_class_foreign_key = models.ForeignKey(AbstractModel, models.CASCADE) fields = [ Model._meta.get_field('rel_string_foreign_key'), Model._meta.get_field('rel_class_foreign_key'), ] expected_error = Error( "Field defines a relation with model 'AbstractModel', " "which is either not installed, or is abstract.", id='fields.E300', ) for field in fields: expected_error.obj = field errors = field.check() self.assertEqual(errors, [expected_error]) def test_m2m_to_abstract_model(self): class AbstractModel(models.Model): class Meta: abstract = True class Model(models.Model): rel_string_m2m = models.ManyToManyField('AbstractModel') rel_class_m2m = models.ManyToManyField(AbstractModel) fields = [ Model._meta.get_field('rel_string_m2m'), Model._meta.get_field('rel_class_m2m'), ] expected_error = Error( "Field defines a relation with model 'AbstractModel', " "which is either not installed, or is abstract.", id='fields.E300', ) for field in fields: expected_error.obj = field errors = field.check(from_model=Model) self.assertEqual(errors, [expected_error]) def test_unique_m2m(self): class Person(models.Model): name = models.CharField(max_length=5) class Group(models.Model): members = models.ManyToManyField('Person', unique=True) field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( 'ManyToManyFields cannot be unique.', hint=None, obj=field, id='fields.E330', ), ] self.assertEqual(errors, expected) def test_foreign_key_to_non_unique_field(self): class Target(models.Model): bad = models.IntegerField() # No unique=True class Model(models.Model): foreign_key = models.ForeignKey('Target', models.CASCADE, to_field='bad') field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( "'Target.bad' must set unique=True because it is referenced by a foreign key.", hint=None, obj=field, id='fields.E311', ), ] self.assertEqual(errors, expected) def test_foreign_key_to_non_unique_field_under_explicit_model(self): class Target(models.Model): bad = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, to_field='bad') field = Model._meta.get_field('field') errors = field.check() expected = [ Error( "'Target.bad' must set unique=True because it is referenced by a foreign key.", hint=None, obj=field, id='fields.E311', ), ] self.assertEqual(errors, expected) def test_foreign_object_to_non_unique_fields(self): class Person(models.Model): # Note that both fields are not unique. country_id = models.IntegerField() city_id = models.IntegerField() class MMembership(models.Model): person_country_id = models.IntegerField() person_city_id = models.IntegerField() person = models.ForeignObject(Person, on_delete=models.CASCADE, from_fields=['person_country_id', 'person_city_id'], to_fields=['country_id', 'city_id']) field = MMembership._meta.get_field('person') errors = field.check() expected = [ Error( "No subset of the fields 'country_id', 'city_id' on model 'Person' is unique.", hint=( "Add unique=True on any of those fields or add at least " "a subset of them to a unique_together constraint." ), obj=field, id='fields.E310', ) ] self.assertEqual(errors, expected) def test_on_delete_set_null_on_non_nullable_field(self): class Person(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey('Person', models.SET_NULL) field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( 'Field specifies on_delete=SET_NULL, but cannot be null.', hint='Set null=True argument on the field, or change the on_delete rule.', obj=field, id='fields.E320', ), ] self.assertEqual(errors, expected) def test_on_delete_set_default_without_default_value(self): class Person(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey('Person', models.SET_DEFAULT) field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( 'Field specifies on_delete=SET_DEFAULT, but has no default value.', hint='Set a default value, or change the on_delete rule.', obj=field, id='fields.E321', ), ] self.assertEqual(errors, expected) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_nullable_primary_key(self): class Model(models.Model): field = models.IntegerField(primary_key=True, null=True) field = Model._meta.get_field('field') errors = field.check() expected = [ Error( 'Primary keys must not have null=True.', hint='Set null=False on the field, or remove primary_key=True argument.', obj=field, id='fields.E007', ), ] self.assertEqual(errors, expected) def test_not_swapped_model(self): class SwappableModel(models.Model): # A model that can be, but isn't swapped out. References to this # model should *not* raise any validation error. class Meta: swappable = 'TEST_SWAPPABLE_MODEL' class Model(models.Model): explicit_fk = models.ForeignKey(SwappableModel, models.CASCADE, related_name='explicit_fk') implicit_fk = models.ForeignKey('invalid_models_tests.SwappableModel', models.CASCADE, related_name='implicit_fk') explicit_m2m = models.ManyToManyField(SwappableModel, related_name='explicit_m2m') implicit_m2m = models.ManyToManyField( 'invalid_models_tests.SwappableModel', related_name='implicit_m2m') explicit_fk = Model._meta.get_field('explicit_fk') self.assertEqual(explicit_fk.check(), []) implicit_fk = Model._meta.get_field('implicit_fk') self.assertEqual(implicit_fk.check(), []) explicit_m2m = Model._meta.get_field('explicit_m2m') self.assertEqual(explicit_m2m.check(from_model=Model), []) implicit_m2m = Model._meta.get_field('implicit_m2m') self.assertEqual(implicit_m2m.check(from_model=Model), []) @override_settings(TEST_SWAPPED_MODEL='invalid_models_tests.Replacement') def test_referencing_to_swapped_model(self): class Replacement(models.Model): pass class SwappedModel(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL' class Model(models.Model): explicit_fk = models.ForeignKey(SwappedModel, models.CASCADE, related_name='explicit_fk') implicit_fk = models.ForeignKey('invalid_models_tests.SwappedModel', models.CASCADE, related_name='implicit_fk') explicit_m2m = models.ManyToManyField(SwappedModel, related_name='explicit_m2m') implicit_m2m = models.ManyToManyField( 'invalid_models_tests.SwappedModel', related_name='implicit_m2m') fields = [ Model._meta.get_field('explicit_fk'), Model._meta.get_field('implicit_fk'), Model._meta.get_field('explicit_m2m'), Model._meta.get_field('implicit_m2m'), ] expected_error = Error( ("Field defines a relation with the model " "'invalid_models_tests.SwappedModel', which has been swapped out."), hint="Update the relation to point at 'settings.TEST_SWAPPED_MODEL'.", id='fields.E301', ) for field in fields: expected_error.obj = field errors = field.check(from_model=Model) self.assertEqual(errors, [expected_error]) def test_related_field_has_invalid_related_name(self): digit = 0 illegal_non_alphanumeric = '!' whitespace = '\t' invalid_related_names = [ '%s_begins_with_digit' % digit, '%s_begins_with_illegal_non_alphanumeric' % illegal_non_alphanumeric, '%s_begins_with_whitespace' % whitespace, 'contains_%s_illegal_non_alphanumeric' % illegal_non_alphanumeric, 'contains_%s_whitespace' % whitespace, 'ends_with_with_illegal_non_alphanumeric_%s' % illegal_non_alphanumeric, 'ends_with_whitespace_%s' % whitespace, 'with', # a Python keyword 'related_name\n', '', ] # Python 2 crashes on non-ASCII strings. if six.PY3: invalid_related_names.append(',') class Parent(models.Model): pass for invalid_related_name in invalid_related_names: Child = type(str('Child_%s') % str(invalid_related_name), (models.Model,), { 'parent': models.ForeignKey('Parent', models.CASCADE, related_name=invalid_related_name), '__module__': Parent.__module__, }) field = Child._meta.get_field('parent') errors = Child.check() expected = [ Error( "The name '%s' is invalid related_name for field Child_%s.parent" % (invalid_related_name, invalid_related_name), hint="Related name must be a valid Python identifier or end with a '+'", obj=field, id='fields.E306', ), ] self.assertEqual(errors, expected) def test_related_field_has_valid_related_name(self): lowercase = 'a' uppercase = 'A' digit = 0 related_names = [ '%s_starts_with_lowercase' % lowercase, '%s_tarts_with_uppercase' % uppercase, '_starts_with_underscore', 'contains_%s_digit' % digit, 'ends_with_plus+', '_', '_+', '+', ] # Python 2 crashes on non-ASCII strings. if six.PY3: related_names.extend(['試', '試驗+']) class Parent(models.Model): pass for related_name in related_names: Child = type(str('Child_%s') % str(related_name), (models.Model,), { 'parent': models.ForeignKey('Parent', models.CASCADE, related_name=related_name), '__module__': Parent.__module__, }) errors = Child.check() self.assertFalse(errors) @isolate_apps('invalid_models_tests') class AccessorClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_accessor_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_fk(self): self._test_accessor_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_m2m(self): self._test_accessor_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE)) def test_m2m_to_integer(self): self._test_accessor_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target')) def test_m2m_to_fk(self): self._test_accessor_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target')) def test_m2m_to_m2m(self): self._test_accessor_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target')) def _test_accessor_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): model_set = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.rel' clashes with field name 'Target.model_set'.", hint=("Rename field 'Target.model_set', or add/change " "a related_name argument to the definition " "for field 'Model.rel'."), obj=Model._meta.get_field('rel'), id='fields.E302', ), ] self.assertEqual(errors, expected) def test_clash_between_accessors(self): class Target(models.Model): pass class Model(models.Model): foreign = models.ForeignKey(Target, models.CASCADE) m2m = models.ManyToManyField(Target) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.foreign' clashes with reverse accessor for 'Model.m2m'.", hint=("Add or change a related_name argument to the definition " "for 'Model.foreign' or 'Model.m2m'."), obj=Model._meta.get_field('foreign'), id='fields.E304', ), Error( "Reverse accessor for 'Model.m2m' clashes with reverse accessor for 'Model.foreign'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m' or 'Model.foreign'."), obj=Model._meta.get_field('m2m'), id='fields.E304', ), ] self.assertEqual(errors, expected) def test_m2m_to_m2m_with_inheritance(self): """ Ref #22047. """ class Target(models.Model): pass class Model(models.Model): children = models.ManyToManyField('Child', related_name="m2m_clash", related_query_name="no_clash") class Parent(models.Model): m2m_clash = models.ManyToManyField('Target') class Child(Parent): pass errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.children' clashes with field name 'Child.m2m_clash'.", hint=("Rename field 'Child.m2m_clash', or add/change " "a related_name argument to the definition " "for field 'Model.children'."), obj=Model._meta.get_field('children'), id='fields.E302', ) ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ReverseQueryNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_reverse_query_name_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_fk(self): self._test_reverse_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_m2m(self): self._test_reverse_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE)) def test_m2m_to_integer(self): self._test_reverse_query_name_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target')) def test_m2m_to_fk(self): self._test_reverse_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target')) def test_m2m_to_m2m(self): self._test_reverse_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target')) def _test_reverse_query_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): model = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse query name for 'Model.rel' clashes with field name 'Target.model'.", hint=("Rename field 'Target.model', or add/change " "a related_name argument to the definition " "for field 'Model.rel'."), obj=Model._meta.get_field('rel'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ExplicitRelatedNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_explicit_related_name_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE, related_name='clash')) def test_fk_to_fk(self): self._test_explicit_related_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE, related_name='clash')) def test_fk_to_m2m(self): self._test_explicit_related_name_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE, related_name='clash')) def test_m2m_to_integer(self): self._test_explicit_related_name_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target', related_name='clash')) def test_m2m_to_fk(self): self._test_explicit_related_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target', related_name='clash')) def test_m2m_to_m2m(self): self._test_explicit_related_name_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target', related_name='clash')) def _test_explicit_related_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): clash = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.rel' clashes with field name 'Target.clash'.", hint=("Rename field 'Target.clash', or add/change " "a related_name argument to the definition " "for field 'Model.rel'."), obj=Model._meta.get_field('rel'), id='fields.E302', ), Error( "Reverse query name for 'Model.rel' clashes with field name 'Target.clash'.", hint=("Rename field 'Target.clash', or add/change " "a related_name argument to the definition " "for field 'Model.rel'."), obj=Model._meta.get_field('rel'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ExplicitRelatedQueryNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_explicit_related_query_name_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE, related_query_name='clash')) def test_fk_to_fk(self): self._test_explicit_related_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE, related_query_name='clash')) def test_fk_to_m2m(self): self._test_explicit_related_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE, related_query_name='clash')) def test_m2m_to_integer(self): self._test_explicit_related_query_name_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target', related_query_name='clash')) def test_m2m_to_fk(self): self._test_explicit_related_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target', related_query_name='clash')) def test_m2m_to_m2m(self): self._test_explicit_related_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target', related_query_name='clash')) def _test_explicit_related_query_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): clash = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse query name for 'Model.rel' clashes with field name 'Target.clash'.", hint=("Rename field 'Target.clash', or add/change a related_name " "argument to the definition for field 'Model.rel'."), obj=Model._meta.get_field('rel'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class SelfReferentialM2MClashTests(SimpleTestCase): def test_clash_between_accessors(self): class Model(models.Model): first_m2m = models.ManyToManyField('self', symmetrical=False) second_m2m = models.ManyToManyField('self', symmetrical=False) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.first_m2m' clashes with reverse accessor for 'Model.second_m2m'.", hint=("Add or change a related_name argument to the definition " "for 'Model.first_m2m' or 'Model.second_m2m'."), obj=Model._meta.get_field('first_m2m'), id='fields.E304', ), Error( "Reverse accessor for 'Model.second_m2m' clashes with reverse accessor for 'Model.first_m2m'.", hint=("Add or change a related_name argument to the definition " "for 'Model.second_m2m' or 'Model.first_m2m'."), obj=Model._meta.get_field('second_m2m'), id='fields.E304', ), ] self.assertEqual(errors, expected) def test_accessor_clash(self): class Model(models.Model): model_set = models.ManyToManyField("self", symmetrical=False) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.model_set' clashes with field name 'Model.model_set'.", hint=("Rename field 'Model.model_set', or add/change " "a related_name argument to the definition " "for field 'Model.model_set'."), obj=Model._meta.get_field('model_set'), id='fields.E302', ), ] self.assertEqual(errors, expected) def test_reverse_query_name_clash(self): class Model(models.Model): model = models.ManyToManyField("self", symmetrical=False) errors = Model.check() expected = [ Error( "Reverse query name for 'Model.model' clashes with field name 'Model.model'.", hint=("Rename field 'Model.model', or add/change a related_name " "argument to the definition for field 'Model.model'."), obj=Model._meta.get_field('model'), id='fields.E303', ), ] self.assertEqual(errors, expected) def test_clash_under_explicit_related_name(self): class Model(models.Model): clash = models.IntegerField() m2m = models.ManyToManyField("self", symmetrical=False, related_name='clash') errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.m2m' clashes with field name 'Model.clash'.", hint=("Rename field 'Model.clash', or add/change a related_name " "argument to the definition for field 'Model.m2m'."), obj=Model._meta.get_field('m2m'), id='fields.E302', ), Error( "Reverse query name for 'Model.m2m' clashes with field name 'Model.clash'.", hint=("Rename field 'Model.clash', or add/change a related_name " "argument to the definition for field 'Model.m2m'."), obj=Model._meta.get_field('m2m'), id='fields.E303', ), ] self.assertEqual(errors, expected) def test_valid_model(self): class Model(models.Model): first = models.ManyToManyField("self", symmetrical=False, related_name='first_accessor') second = models.ManyToManyField("self", symmetrical=False, related_name='second_accessor') errors = Model.check() self.assertEqual(errors, []) @isolate_apps('invalid_models_tests') class SelfReferentialFKClashTests(SimpleTestCase): def test_accessor_clash(self): class Model(models.Model): model_set = models.ForeignKey("Model", models.CASCADE) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.model_set' clashes with field name 'Model.model_set'.", hint=("Rename field 'Model.model_set', or add/change " "a related_name argument to the definition " "for field 'Model.model_set'."), obj=Model._meta.get_field('model_set'), id='fields.E302', ), ] self.assertEqual(errors, expected) def test_reverse_query_name_clash(self): class Model(models.Model): model = models.ForeignKey("Model", models.CASCADE) errors = Model.check() expected = [ Error( "Reverse query name for 'Model.model' clashes with field name 'Model.model'.", hint=("Rename field 'Model.model', or add/change " "a related_name argument to the definition " "for field 'Model.model'."), obj=Model._meta.get_field('model'), id='fields.E303', ), ] self.assertEqual(errors, expected) def test_clash_under_explicit_related_name(self): class Model(models.Model): clash = models.CharField(max_length=10) foreign = models.ForeignKey("Model", models.CASCADE, related_name='clash') errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.foreign' clashes with field name 'Model.clash'.", hint=("Rename field 'Model.clash', or add/change " "a related_name argument to the definition " "for field 'Model.foreign'."), obj=Model._meta.get_field('foreign'), id='fields.E302', ), Error( "Reverse query name for 'Model.foreign' clashes with field name 'Model.clash'.", hint=("Rename field 'Model.clash', or add/change " "a related_name argument to the definition " "for field 'Model.foreign'."), obj=Model._meta.get_field('foreign'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ComplexClashTests(SimpleTestCase): # New tests should not be included here, because this is a single, # self-contained sanity check, not a test of everything. def test_complex_clash(self): class Target(models.Model): tgt_safe = models.CharField(max_length=10) clash = models.CharField(max_length=10) model = models.CharField(max_length=10) clash1_set = models.CharField(max_length=10) class Model(models.Model): src_safe = models.CharField(max_length=10) foreign_1 = models.ForeignKey(Target, models.CASCADE, related_name='id') foreign_2 = models.ForeignKey(Target, models.CASCADE, related_name='src_safe') m2m_1 = models.ManyToManyField(Target, related_name='id') m2m_2 = models.ManyToManyField(Target, related_name='src_safe') errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.foreign_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.foreign_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E302', ), Error( "Reverse query name for 'Model.foreign_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.foreign_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E303', ), Error( "Reverse accessor for 'Model.foreign_1' clashes with reverse accessor for 'Model.m2m_1'.", hint=("Add or change a related_name argument to " "the definition for 'Model.foreign_1' or 'Model.m2m_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E304', ), Error( "Reverse query name for 'Model.foreign_1' clashes with reverse query name for 'Model.m2m_1'.", hint=("Add or change a related_name argument to " "the definition for 'Model.foreign_1' or 'Model.m2m_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E305', ), Error( "Reverse accessor for 'Model.foreign_2' clashes with reverse accessor for 'Model.m2m_2'.", hint=("Add or change a related_name argument " "to the definition for 'Model.foreign_2' or 'Model.m2m_2'."), obj=Model._meta.get_field('foreign_2'), id='fields.E304', ), Error( "Reverse query name for 'Model.foreign_2' clashes with reverse query name for 'Model.m2m_2'.", hint=("Add or change a related_name argument to " "the definition for 'Model.foreign_2' or 'Model.m2m_2'."), obj=Model._meta.get_field('foreign_2'), id='fields.E305', ), Error( "Reverse accessor for 'Model.m2m_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.m2m_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E302', ), Error( "Reverse query name for 'Model.m2m_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.m2m_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E303', ), Error( "Reverse accessor for 'Model.m2m_1' clashes with reverse accessor for 'Model.foreign_1'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m_1' or 'Model.foreign_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E304', ), Error( "Reverse query name for 'Model.m2m_1' clashes with reverse query name for 'Model.foreign_1'.", hint=("Add or change a related_name argument to " "the definition for 'Model.m2m_1' or 'Model.foreign_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E305', ), Error( "Reverse accessor for 'Model.m2m_2' clashes with reverse accessor for 'Model.foreign_2'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m_2' or 'Model.foreign_2'."), obj=Model._meta.get_field('m2m_2'), id='fields.E304', ), Error( "Reverse query name for 'Model.m2m_2' clashes with reverse query name for 'Model.foreign_2'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m_2' or 'Model.foreign_2'."), obj=Model._meta.get_field('m2m_2'), id='fields.E305', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class M2mThroughFieldsTests(SimpleTestCase): def test_m2m_field_argument_validation(self): """ Tests that ManyToManyField accepts the ``through_fields`` kwarg only if an intermediary table is specified. """ class Fan(models.Model): pass with self.assertRaisesMessage(ValueError, 'Cannot specify through_fields without a through model'): models.ManyToManyField(Fan, through_fields=('f1', 'f2')) def test_invalid_order(self): """ Tests that mixing up the order of link fields to ManyToManyField.through_fields triggers validation errors. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=('invitee', 'event')) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name='+') field = Event._meta.get_field('invitees') errors = field.check(from_model=Event) expected = [ Error( ("'Invitation.invitee' is not a foreign key to 'Event'."), hint="Did you mean one of the following foreign keys to 'Event': event?", obj=field, id='fields.E339'), Error( ("'Invitation.event' is not a foreign key to 'Fan'."), hint="Did you mean one of the following foreign keys to 'Fan': invitee, inviter?", obj=field, id='fields.E339'), ] self.assertEqual(expected, errors) def test_invalid_field(self): """ Tests that providing invalid field names to ManyToManyField.through_fields triggers validation errors. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField( Fan, through='Invitation', through_fields=('invalid_field_1', 'invalid_field_2'), ) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name='+') field = Event._meta.get_field('invitees') errors = field.check(from_model=Event) expected = [ Error( "The intermediary model 'invalid_models_tests.Invitation' has no field 'invalid_field_1'.", hint="Did you mean one of the following foreign keys to 'Event': event?", obj=field, id='fields.E338'), Error( "The intermediary model 'invalid_models_tests.Invitation' has no field 'invalid_field_2'.", hint="Did you mean one of the following foreign keys to 'Fan': invitee, inviter?", obj=field, id='fields.E338'), ] self.assertEqual(expected, errors) def test_explicit_field_names(self): """ Tests that if ``through_fields`` kwarg is given, it must specify both link fields of the intermediary table. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=(None, 'invitee')) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name='+') field = Event._meta.get_field('invitees') errors = field.check(from_model=Event) expected = [ Error( "Field specifies 'through_fields' but does not provide the names " "of the two link fields that should be used for the relation " "through model 'invalid_models_tests.Invitation'.", hint=("Make sure you specify 'through_fields' as " "through_fields=('field1', 'field2')"), obj=field, id='fields.E337')] self.assertEqual(expected, errors) def test_superset_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() class Meta: unique_together = (('a', 'b', 'c'),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=('a', 'b'), to_fields=('a', 'b'), related_name='children', ) field = Child._meta.get_field('parent') errors = field.check(from_model=Child) expected = [ Error( "No subset of the fields 'a', 'b' on model 'Parent' is unique.", hint=( "Add unique=True on any of those fields or add at least " "a subset of them to a unique_together constraint." ), obj=field, id='fields.E310', ), ] self.assertEqual(expected, errors) def test_intersection_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() d = models.PositiveIntegerField() class Meta: unique_together = (('a', 'b', 'c'),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() d = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=('a', 'b', 'd'), to_fields=('a', 'b', 'd'), related_name='children', ) field = Child._meta.get_field('parent') errors = field.check(from_model=Child) expected = [ Error( "No subset of the fields 'a', 'b', 'd' on model 'Parent' is unique.", hint=( "Add unique=True on any of those fields or add at least " "a subset of them to a unique_together constraint." ), obj=field, id='fields.E310', ), ] self.assertEqual(expected, errors)
""" exec_command Implements exec_command function that is (almost) equivalent to commands.getstatusoutput function but on NT, DOS systems the returned status is actually correct (though, the returned status values may be different by a factor). In addition, exec_command takes keyword arguments for (re-)defining environment variables. Provides functions: exec_command --- execute command in a specified directory and in the modified environment. find_executable --- locate a command using info from environment variable PATH. Equivalent to posix `which` command. Author: Pearu Peterson <pearu@cens.ioc.ee> Created: 11 January 2003 Requires: Python 2.x Successfully tested on: ======== ============ ================================================= os.name sys.platform comments ======== ============ ================================================= posix linux2 Debian (sid) Linux, Python 2.1.3+, 2.2.3+, 2.3.3 PyCrust 0.9.3, Idle 1.0.2 posix linux2 Red Hat 9 Linux, Python 2.1.3, 2.2.2, 2.3.2 posix sunos5 SunOS 5.9, Python 2.2, 2.3.2 posix darwin Darwin 7.2.0, Python 2.3 nt win32 Windows Me Python 2.3(EE), Idle 1.0, PyCrust 0.7.2 Python 2.1.1 Idle 0.8 nt win32 Windows 98, Python 2.1.1. Idle 0.8 nt win32 Cygwin 98-4.10, Python 2.1.1(MSC) - echo tests fail i.e. redefining environment variables may not work. FIXED: don't use cygwin echo! Comment: also `cmd /c echo` will not work but redefining environment variables do work. posix cygwin Cygwin 98-4.10, Python 2.3.3(cygming special) nt win32 Windows XP, Python 2.3.3 ======== ============ ================================================= Known bugs: * Tests, that send messages to stderr, fail when executed from MSYS prompt because the messages are lost at some point. """ from __future__ import division, absolute_import, print_function __all__ = ['exec_command', 'find_executable'] import os import sys import subprocess import locale import warnings from numpy.distutils.misc_util import is_sequence, make_temp_file from numpy.distutils import log def filepath_from_subprocess_output(output): """ Convert `bytes` in the encoding used by a subprocess into a filesystem-appropriate `str`. Inherited from `exec_command`, and possibly incorrect. """ mylocale = locale.getpreferredencoding(False) if mylocale is None: mylocale = 'ascii' output = output.decode(mylocale, errors='replace') output = output.replace('\r\n', '\n') # Another historical oddity if output[-1:] == '\n': output = output[:-1] # stdio uses bytes in python 2, so to avoid issues, we simply # remove all non-ascii characters if sys.version_info < (3, 0): output = output.encode('ascii', errors='replace') return output def forward_bytes_to_stdout(val): """ Forward bytes from a subprocess call to the console, without attempting to decode them. The assumption is that the subprocess call already returned bytes in a suitable encoding. """ if sys.version_info.major < 3: # python 2 has binary output anyway sys.stdout.write(val) elif hasattr(sys.stdout, 'buffer'): # use the underlying binary output if there is one sys.stdout.buffer.write(val) elif hasattr(sys.stdout, 'encoding'): # round-trip the encoding if necessary sys.stdout.write(val.decode(sys.stdout.encoding)) else: # make a best-guess at the encoding sys.stdout.write(val.decode('utf8', errors='replace')) def temp_file_name(): # 2019-01-30, 1.17 warnings.warn('temp_file_name is deprecated since NumPy v1.17, use ' 'tempfile.mkstemp instead', DeprecationWarning, stacklevel=1) fo, name = make_temp_file() fo.close() return name def get_pythonexe(): pythonexe = sys.executable if os.name in ['nt', 'dos']: fdir, fn = os.path.split(pythonexe) fn = fn.upper().replace('PYTHONW', 'PYTHON') pythonexe = os.path.join(fdir, fn) assert os.path.isfile(pythonexe), '%r is not a file' % (pythonexe,) return pythonexe def find_executable(exe, path=None, _cache={}): """Return full path of a executable or None. Symbolic links are not followed. """ key = exe, path try: return _cache[key] except KeyError: pass log.debug('find_executable(%r)' % exe) orig_exe = exe if path is None: path = os.environ.get('PATH', os.defpath) if os.name=='posix': realpath = os.path.realpath else: realpath = lambda a:a if exe.startswith('"'): exe = exe[1:-1] suffixes = [''] if os.name in ['nt', 'dos', 'os2']: fn, ext = os.path.splitext(exe) extra_suffixes = ['.exe', '.com', '.bat'] if ext.lower() not in extra_suffixes: suffixes = extra_suffixes if os.path.isabs(exe): paths = [''] else: paths = [ os.path.abspath(p) for p in path.split(os.pathsep) ] for path in paths: fn = os.path.join(path, exe) for s in suffixes: f_ext = fn+s if not os.path.islink(f_ext): f_ext = realpath(f_ext) if os.path.isfile(f_ext) and os.access(f_ext, os.X_OK): log.info('Found executable %s' % f_ext) _cache[key] = f_ext return f_ext log.warn('Could not locate executable %s' % orig_exe) return None def _preserve_environment( names ): log.debug('_preserve_environment(%r)' % (names)) env = {name: os.environ.get(name) for name in names} return env def _update_environment( **env ): log.debug('_update_environment(...)') for name, value in env.items(): os.environ[name] = value or '' def exec_command(command, execute_in='', use_shell=None, use_tee=None, _with_python = 1, **env ): """ Return (status,output) of executed command. .. deprecated:: 1.17 Use subprocess.Popen instead Parameters ---------- command : str A concatenated string of executable and arguments. execute_in : str Before running command ``cd execute_in`` and after ``cd -``. use_shell : {bool, None}, optional If True, execute ``sh -c command``. Default None (True) use_tee : {bool, None}, optional If True use tee. Default None (True) Returns ------- res : str Both stdout and stderr messages. Notes ----- On NT, DOS systems the returned status is correct for external commands. Wild cards will not work for non-posix systems or when use_shell=0. """ # 2019-01-30, 1.17 warnings.warn('exec_command is deprecated since NumPy v1.17, use ' 'subprocess.Popen instead', DeprecationWarning, stacklevel=1) log.debug('exec_command(%r,%s)' % (command,\ ','.join(['%s=%r'%kv for kv in env.items()]))) if use_tee is None: use_tee = os.name=='posix' if use_shell is None: use_shell = os.name=='posix' execute_in = os.path.abspath(execute_in) oldcwd = os.path.abspath(os.getcwd()) if __name__[-12:] == 'exec_command': exec_dir = os.path.dirname(os.path.abspath(__file__)) elif os.path.isfile('exec_command.py'): exec_dir = os.path.abspath('.') else: exec_dir = os.path.abspath(sys.argv[0]) if os.path.isfile(exec_dir): exec_dir = os.path.dirname(exec_dir) if oldcwd!=execute_in: os.chdir(execute_in) log.debug('New cwd: %s' % execute_in) else: log.debug('Retaining cwd: %s' % oldcwd) oldenv = _preserve_environment( list(env.keys()) ) _update_environment( **env ) try: st = _exec_command(command, use_shell=use_shell, use_tee=use_tee, **env) finally: if oldcwd!=execute_in: os.chdir(oldcwd) log.debug('Restored cwd to %s' % oldcwd) _update_environment(**oldenv) return st def _exec_command(command, use_shell=None, use_tee = None, **env): """ Internal workhorse for exec_command(). """ if use_shell is None: use_shell = os.name=='posix' if use_tee is None: use_tee = os.name=='posix' if os.name == 'posix' and use_shell: # On POSIX, subprocess always uses /bin/sh, override sh = os.environ.get('SHELL', '/bin/sh') if is_sequence(command): command = [sh, '-c', ' '.join(command)] else: command = [sh, '-c', command] use_shell = False elif os.name == 'nt' and is_sequence(command): # On Windows, join the string for CreateProcess() ourselves as # subprocess does it a bit differently command = ' '.join(_quote_arg(arg) for arg in command) # Inherit environment by default env = env or None try: # universal_newlines is set to False so that communicate() # will return bytes. We need to decode the output ourselves # so that Python will not raise a UnicodeDecodeError when # it encounters an invalid character; rather, we simply replace it proc = subprocess.Popen(command, shell=use_shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=False) except EnvironmentError: # Return 127, as os.spawn*() and /bin/sh do return 127, '' text, err = proc.communicate() mylocale = locale.getpreferredencoding(False) if mylocale is None: mylocale = 'ascii' text = text.decode(mylocale, errors='replace') text = text.replace('\r\n', '\n') # Another historical oddity if text[-1:] == '\n': text = text[:-1] # stdio uses bytes in python 2, so to avoid issues, we simply # remove all non-ascii characters if sys.version_info < (3, 0): text = text.encode('ascii', errors='replace') if use_tee and text: print(text) return proc.returncode, text def _quote_arg(arg): """ Quote the argument for safe use in a shell command line. """ # If there is a quote in the string, assume relevants parts of the # string are already quoted (e.g. '-I"C:\\Program Files\\..."') if '"' not in arg and ' ' in arg: return '"%s"' % arg return arg
""" wakatime.dependencies.haxe ~~~~~~~~~~~~~~~~~~~~~~~~~~ Parse dependencies from Haxe code. :copyright: (c) 2018 Alan Hamlett. :license: BSD, see LICENSE for more details. """ from . import TokenParser class HaxeParser(TokenParser): exclude = [ r'^haxe$', ] state = None def parse(self): for index, token, content in self.tokens: self._process_token(token, content) return self.dependencies def _process_token(self, token, content): if self.partial(token) == 'Namespace': self._process_namespace(token, content) elif self.partial(token) == 'Text': self._process_text(token, content) else: self._process_other(token, content) def _process_namespace(self, token, content): if self.state == 'import': self.append(self._format(content)) self.state = None else: self.state = content def _process_text(self, token, content): pass def _process_other(self, token, content): self.state = None def _format(self, content): return content.strip()
from __future__ import print_function from __future__ import absolute_import import contextlib import logging import os import py_utils from py_utils import binary_manager from py_utils import cloud_storage from py_utils import dependency_util import dependency_manager from dependency_manager import base_config from devil import devil_env from telemetry.core import exceptions from telemetry.core import util TELEMETRY_PROJECT_CONFIG = os.path.join( util.GetTelemetryDir(), 'telemetry', 'binary_dependencies.json') CHROME_BINARY_CONFIG = os.path.join(util.GetCatapultDir(), 'common', 'py_utils', 'py_utils', 'chrome_binaries.json') SUPPORTED_DEP_PLATFORMS = ( 'linux_aarch64', 'linux_x86_64', 'linux_armv7l', 'linux_mips', 'mac_x86_64', 'mac_arm64', 'win_x86', 'win_AMD64', 'android_arm64-v8a', 'android_armeabi-v7a', 'android_arm', 'android_x64', 'android_x86' ) PLATFORMS_TO_DOWNLOAD_FOLDER_MAP = { 'linux_aarch64': 'bin/linux/aarch64', 'linux_x86_64': 'bin/linux/x86_64', 'linux_armv7l': 'bin/linux/armv7l', 'linux_mips': 'bin/linux/mips', 'mac_x86_64': 'bin/mac/x86_64', 'mac_arm64': 'bin/mac/arm64', 'win_x86': 'bin/win/x86', 'win_AMD64': 'bin/win/AMD64', 'android_arm64-v8a': 'bin/android/arm64-v8a', 'android_armeabi-v7a': 'bin/android/armeabi-v7a', 'android_arm': 'bin/android/arm', 'android_x64': 'bin/android/x64', 'android_x86': 'bin/android/x86', } NoPathFoundError = dependency_manager.NoPathFoundError CloudStorageError = dependency_manager.CloudStorageError _binary_manager = None _installed_helpers = set() TELEMETRY_BINARY_BASE_CS_FOLDER = 'binary_dependencies' TELEMETRY_BINARY_CS_BUCKET = cloud_storage.PUBLIC_BUCKET def NeedsInit(): return not _binary_manager def InitDependencyManager(client_configs): if GetBinaryManager(): raise exceptions.InitializationError( 'Trying to re-initialize the binary manager with config %s' % client_configs) configs = [] if client_configs: configs += client_configs configs += [TELEMETRY_PROJECT_CONFIG, CHROME_BINARY_CONFIG] SetBinaryManager(binary_manager.BinaryManager(configs)) devil_env.config.Initialize() @contextlib.contextmanager def TemporarilyReplaceBinaryManager(manager): old_manager = GetBinaryManager() try: SetBinaryManager(manager) yield finally: SetBinaryManager(old_manager) def GetBinaryManager(): return _binary_manager def SetBinaryManager(manager): global _binary_manager # pylint: disable=global-statement _binary_manager = manager def _IsChromeOSLocalMode(os_name): """Determines if we're running telemetry on a Chrome OS device. Used to differentiate local mode (telemetry running on the CrOS DUT) from remote mode (running telemetry on another platform that communicates with the CrOS DUT over SSH). """ return os_name == 'chromeos' and py_utils.GetHostOsName() == 'chromeos' def FetchPath(binary_name, os_name, arch, os_version=None): """ Return a path to the appropriate executable for <binary_name>, downloading from cloud storage if needed, or None if it cannot be found. """ if GetBinaryManager() is None: raise exceptions.InitializationError( 'Called FetchPath with uninitialized binary manager.') return GetBinaryManager().FetchPath( binary_name, 'linux' if _IsChromeOSLocalMode(os_name) else os_name, arch, os_version) def LocalPath(binary_name, os_name, arch, os_version=None): """ Return a local path to the given binary name, or None if an executable cannot be found. Will not download the executable. """ if GetBinaryManager() is None: raise exceptions.InitializationError( 'Called LocalPath with uninitialized binary manager.') return GetBinaryManager().LocalPath(binary_name, os_name, arch, os_version) def FetchBinaryDependencies( platform, client_configs, fetch_reference_chrome_binary): """ Fetch all binary dependenencies for the given |platform|. Note: we don't fetch browser binaries by default because the size of the binary is about 2Gb, and it requires cloud storage permission to chrome-telemetry bucket. Args: platform: an instance of telemetry.core.platform client_configs: A list of paths (string) to dependencies json files. fetch_reference_chrome_binary: whether to fetch reference chrome binary for the given platform. """ configs = [ dependency_manager.BaseConfig(TELEMETRY_PROJECT_CONFIG), ] dep_manager = dependency_manager.DependencyManager(configs) os_name = platform.GetOSName() # If we're running directly on a Chrome OS device, fetch the binaries for # linux instead, which should be compatible with CrOS. Otherwise, if we're # running remotely on CrOS, fetch the binaries for the host platform like # we do with android below. if _IsChromeOSLocalMode(os_name): os_name = 'linux' target_platform = '%s_%s' % (os_name, platform.GetArchName()) dep_manager.PrefetchPaths(target_platform) host_platform = None fetch_devil_deps = False if os_name in ('android', 'chromeos'): host_platform = '%s_%s' % ( py_utils.GetHostOsName(), py_utils.GetHostArchName()) dep_manager.PrefetchPaths(host_platform) if os_name == 'android': if host_platform == 'linux_x86_64': fetch_devil_deps = True else: logging.error('Devil only supports 64 bit linux as a host platform. ' 'Android tests may fail.') if fetch_reference_chrome_binary: _FetchReferenceBrowserBinary(platform) # For now, handle client config separately because the BUILD.gn & .isolate of # telemetry tests in chromium src failed to include the files specified in its # client config. # (https://github.com/catapult-project/catapult/issues/2192) # For now this is ok because the client configs usually don't include cloud # storage infos. # TODO(crbug.com/1111556): remove the logic of swallowing exception once the # issue is fixed on Chromium side. if client_configs: manager = dependency_manager.DependencyManager( list(dependency_manager.BaseConfig(c) for c in client_configs)) try: manager.PrefetchPaths(target_platform) if host_platform is not None: manager.PrefetchPaths(host_platform) except dependency_manager.NoPathFoundError as e: logging.error('Error when trying to prefetch paths for %s: %s', target_platform, e) if fetch_devil_deps: devil_env.config.Initialize() devil_env.config.PrefetchPaths(arch=platform.GetArchName()) devil_env.config.PrefetchPaths() def ReinstallAndroidHelperIfNeeded(binary_name, install_path, device): """ Install a binary helper to a specific location. Args: binary_name: (str) The name of the binary from binary_dependencies.json install_path: (str) The path to install the binary at device: (device_utils.DeviceUtils) a device to install the helper to Raises: Exception: When the binary could not be fetched or could not be pushed to the device. """ if (device.serial, install_path) in _installed_helpers: return host_path = FetchPath(binary_name, 'android', device.GetABI()) if not host_path: raise Exception( '%s binary could not be fetched as %s', binary_name, host_path) device.PushChangedFiles([(host_path, install_path)]) device.RunShellCommand(['chmod', '777', install_path], check_return=True) _installed_helpers.add((device.serial, install_path)) def _FetchReferenceBrowserBinary(platform): os_name = platform.GetOSName() if _IsChromeOSLocalMode(os_name): os_name = 'linux' arch_name = platform.GetArchName() manager = binary_manager.BinaryManager( [CHROME_BINARY_CONFIG]) if os_name == 'android': os_version = dependency_util.GetChromeApkOsVersion( platform.GetOSVersionName()) manager.FetchPath( 'chrome_stable', os_name, arch_name, os_version) else: manager.FetchPath( 'chrome_stable', os_name, arch_name) def UpdateDependency(dependency, dep_local_path, version, os_name=None, arch_name=None): config = os.path.join( util.GetTelemetryDir(), 'telemetry', 'binary_dependencies.json') if not os_name: assert not arch_name, 'arch_name is specified but not os_name' os_name = py_utils.GetHostOsName() arch_name = py_utils.GetHostArchName() else: assert arch_name, 'os_name is specified but not arch_name' dep_platform = '%s_%s' % (os_name, arch_name) c = base_config.BaseConfig(config, writable=True) try: old_version = c.GetVersion(dependency, dep_platform) print('Updating from version: {}'.format(old_version)) except ValueError: raise RuntimeError( ('binary_dependencies.json entry for %s missing or invalid; please add ' 'it first! (need download_path and path_within_archive)') % dep_platform) if dep_local_path: c.AddCloudStorageDependencyUpdateJob( dependency, dep_platform, dep_local_path, version=version, execute_job=True)
""" Oracle database backend for Django. Requires cx_Oracle: http://cx-oracle.sourceforge.net/ """ from __future__ import unicode_literals import decimal import re import sys import warnings def _setup_environment(environ): import platform # Cygwin requires some special voodoo to set the environment variables # properly so that Oracle will see them. if platform.system().upper().startswith('CYGWIN'): try: import ctypes except ImportError as e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading ctypes: %s; " "the Oracle backend requires ctypes to " "operate correctly under Cygwin." % e) kernel32 = ctypes.CDLL('kernel32') for name, value in environ: kernel32.SetEnvironmentVariableA(name, value) else: import os os.environ.update(environ) _setup_environment([ # Oracle takes client-side character set encoding from the environment. ('NLS_LANG', '.UTF8'), # This prevents unicode from getting mangled by getting encoded into the # potentially non-unicode database character set. ('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'), ]) try: import cx_Oracle as Database except ImportError as e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e) try: import pytz except ImportError: pytz = None from django.db import utils from django.db.backends import * from django.db.backends.oracle.client import DatabaseClient from django.db.backends.oracle.creation import DatabaseCreation from django.db.backends.oracle.introspection import DatabaseIntrospection from django.utils.encoding import force_bytes, force_text DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError if int(Database.version.split('.', 1)[0]) >= 5 and \ (int(Database.version.split('.', 2)[1]) >= 1 or not hasattr(Database, 'UNICODE')): convert_unicode = force_text else: convert_unicode = force_bytes class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () needs_datetime_string_cast = False interprets_empty_strings_as_nulls = True uses_savepoints = True has_select_for_update = True has_select_for_update_nowait = True can_return_id_from_insert = True allow_sliced_subqueries = False supports_subqueries_in_group_by = False supports_transactions = True supports_timezones = False has_zoneinfo_database = pytz is not None supports_bitwise_or = False can_defer_constraint_checks = True ignores_nulls_in_unique_constraints = False has_bulk_insert = True supports_tablespaces = True supports_sequence_reset = False atomic_transactions = False class DatabaseOperations(BaseDatabaseOperations): compiler_module = "django.db.backends.oracle.compiler" def autoinc_sql(self, table, column): # To simulate auto-incrementing primary keys in Oracle, we have to # create a sequence and a trigger. sq_name = self._get_sequence_name(table) tr_name = self._get_trigger_name(table) tbl_name = self.quote_name(table) col_name = self.quote_name(column) sequence_sql = """ DECLARE i INTEGER; BEGIN SELECT COUNT(*) INTO i FROM USER_CATALOG WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; IF i = 0 THEN EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"'; END IF; END; /""" % locals() trigger_sql = """ CREATE OR REPLACE TRIGGER "%(tr_name)s" BEFORE INSERT ON %(tbl_name)s FOR EACH ROW WHEN (new.%(col_name)s IS NULL) BEGIN SELECT "%(sq_name)s".nextval INTO :new.%(col_name)s FROM dual; END; /""" % locals() return sequence_sql, trigger_sql def cache_key_culling_sql(self): return """ SELECT cache_key FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s) WHERE rank = %%s + 1 """ def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. return "TO_CHAR(%s, 'D')" % field_name else: # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_interval_sql(self, sql, connector, timedelta): """ Implements the interval functionality for expressions format for Oracle: (datefield + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6)) """ minutes, seconds = divmod(timedelta.seconds, 60) hours, minutes = divmod(minutes, 60) days = str(timedelta.days) day_precision = len(days) fmt = "(%s %s INTERVAL '%s %02d:%02d:%02d.%06d' DAY(%d) TO SECOND(6))" return fmt % (sql, connector, days, hours, minutes, seconds, timedelta.microseconds, day_precision) def date_trunc_sql(self, lookup_type, field_name): # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084 if lookup_type in ('year', 'month'): return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) else: return "TRUNC(%s)" % field_name # Oracle crashes with "ORA-03113: end-of-file on communication channel" # if the time zone name is passed in parameter. Use interpolation instead. # https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ # This regexp matches all time zone names from the zoneinfo database. _tzname_re = re.compile(r'^[\w/:+-]+$') def _convert_field_to_tz(self, field_name, tzname): if not self._tzname_re.match(tzname): raise ValueError("Invalid time zone name: %s" % tzname) # Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE. result = "(FROM_TZ(%s, '0:00') AT TIME ZONE '%s')" % (field_name, tzname) # Extracting from a TIMESTAMP WITH TIME ZONE ignore the time zone. # Convert to a DATETIME, which is called DATE by Oracle. There's no # built-in function to do that; the easiest is to go through a string. result = "TO_CHAR(%s, 'YYYY-MM-DD HH24:MI:SS')" % result result = "TO_DATE(%s, 'YYYY-MM-DD HH24:MI:SS')" % result # Re-convert to a TIMESTAMP because EXTRACT only handles the date part # on DATE values, even though they actually store the time part. return "CAST(%s AS TIMESTAMP)" % result def datetime_extract_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: field_name = self._convert_field_to_tz(field_name, tzname) if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. sql = "TO_CHAR(%s, 'D')" % field_name else: # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) return sql, [] def datetime_trunc_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: field_name = self._convert_field_to_tz(field_name, tzname) # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084 if lookup_type in ('year', 'month'): sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'day': sql = "TRUNC(%s)" % field_name elif lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name else: sql = field_name # Cast to DATE removes sub-second precision. return sql, [] def convert_values(self, value, field): if isinstance(value, Database.LOB): value = value.read() if field and field.get_internal_type() == 'TextField': value = force_text(value) # Oracle stores empty strings as null. We need to undo this in # order to adhere to the Django convention of using the empty # string instead of null, but only if the field accepts the # empty string. if value is None and field and field.empty_strings_allowed: value = '' # Convert 1 or 0 to True or False elif value in (1, 0) and field and field.get_internal_type() in ('BooleanField', 'NullBooleanField'): value = bool(value) # Force floats to the correct type elif value is not None and field and field.get_internal_type() == 'FloatField': value = float(value) # Convert floats to decimals elif value is not None and field and field.get_internal_type() == 'DecimalField': value = util.typecast_decimal(field.format_number(value)) # cx_Oracle always returns datetime.datetime objects for # DATE and TIMESTAMP columns, but Django wants to see a # python datetime.date, .time, or .datetime. We use the type # of the Field to determine which to cast to, but it's not # always available. # As a workaround, we cast to date if all the time-related # values are 0, or to time if the date is 1/1/1900. # This could be cleaned a bit by adding a method to the Field # classes to normalize values from the database (the to_python # method is used for validation and isn't what we want here). elif isinstance(value, Database.Timestamp): if field and field.get_internal_type() == 'DateTimeField': pass elif field and field.get_internal_type() == 'DateField': value = value.date() elif field and field.get_internal_type() == 'TimeField' or (value.year == 1900 and value.month == value.day == 1): value = value.time() elif value.hour == value.minute == value.second == value.microsecond == 0: value = value.date() return value def deferrable_sql(self): return " DEFERRABLE INITIALLY DEFERRED" def drop_sequence_sql(self, table): return "DROP SEQUENCE %s;" % self.quote_name(self._get_sequence_name(table)) def fetch_returned_insert_id(self, cursor): return int(cursor._insert_id_var.getvalue()) def field_cast_sql(self, db_type, internal_type): if db_type and db_type.endswith('LOB'): return "DBMS_LOB.SUBSTR(%s)" else: return "%s" def last_executed_query(self, cursor, sql, params): # http://cx-oracle.sourceforge.net/html/cursor.html#Cursor.statement # The DB API definition does not define this attribute. statement = cursor.statement if statement and six.PY2 and not isinstance(statement, unicode): statement = statement.decode('utf-8') # Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's # `statement` doesn't contain the query parameters. refs #20010. return super(DatabaseOperations, self).last_executed_query(cursor, statement, params) def last_insert_id(self, cursor, table_name, pk_name): sq_name = self._get_sequence_name(table_name) cursor.execute('SELECT "%s".currval FROM dual' % sq_name) return cursor.fetchone()[0] def lookup_cast(self, lookup_type): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" return "%s" def max_in_list_size(self): return 1000 def max_name_length(self): return 30 def prep_for_iexact_query(self, x): return x def process_clob(self, value): if value is None: return '' return force_text(value.read()) def quote_name(self, name): # SQL92 requires delimited (quoted) names to be case-sensitive. When # not quoted, Oracle has case-insensitive behavior for identifiers, but # always defaults to uppercase. # We simplify things by making Oracle identifiers always uppercase. if not name.startswith('"') and not name.endswith('"'): name = '"%s"' % util.truncate_name(name.upper(), self.max_name_length()) # Oracle puts the query text into a (query % args) construct, so % signs # in names need to be escaped. The '%%' will be collapsed back to '%' at # that stage so we aren't really making the name longer here. name = name.replace('%','%%') return name.upper() def random_function_sql(self): return "DBMS_RANDOM.RANDOM" def regex_lookup_9(self, lookup_type): raise NotImplementedError("Regexes are not supported in Oracle before version 10g.") def regex_lookup_10(self, lookup_type): if lookup_type == 'regex': match_option = "'c'" else: match_option = "'i'" return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option def regex_lookup(self, lookup_type): # If regex_lookup is called before it's been initialized, then create # a cursor to initialize it and recur. self.connection.cursor() return self.connection.ops.regex_lookup(lookup_type) def return_insert_id(self): return "RETURNING %s INTO %%s", (InsertIdVar(),) def savepoint_create_sql(self, sid): return convert_unicode("SAVEPOINT " + self.quote_name(sid)) def savepoint_rollback_sql(self, sid): return convert_unicode("ROLLBACK TO SAVEPOINT " + self.quote_name(sid)) def sql_flush(self, style, tables, sequences, allow_cascade=False): # Return a list of 'TRUNCATE x;', 'TRUNCATE y;', # 'TRUNCATE z;'... style SQL statements if tables: # Oracle does support TRUNCATE, but it seems to get us into # FK referential trouble, whereas DELETE FROM table works. sql = ['%s %s %s;' % ( style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table)) ) for table in tables] # Since we've just deleted all the rows, running our sequence # ALTER code will reset the sequence to 0. sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql else: return [] def sequence_reset_by_name_sql(self, style, sequences): sql = [] for sequence_info in sequences: sequence_name = self._get_sequence_name(sequence_info['table']) table_name = self.quote_name(sequence_info['table']) column_name = self.quote_name(sequence_info['column'] or 'id') query = _get_sequence_reset_sql() % {'sequence': sequence_name, 'table': table_name, 'column': column_name} sql.append(query) return sql def sequence_reset_sql(self, style, model_list): from django.db import models output = [] query = _get_sequence_reset_sql() for model in model_list: for f in model._meta.local_fields: if isinstance(f, models.AutoField): table_name = self.quote_name(model._meta.db_table) sequence_name = self._get_sequence_name(model._meta.db_table) column_name = self.quote_name(f.column) output.append(query % {'sequence': sequence_name, 'table': table_name, 'column': column_name}) # Only one AutoField is allowed per model, so don't # continue to loop break for f in model._meta.many_to_many: if not f.rel.through: table_name = self.quote_name(f.m2m_db_table()) sequence_name = self._get_sequence_name(f.m2m_db_table()) column_name = self.quote_name('id') output.append(query % {'sequence': sequence_name, 'table': table_name, 'column': column_name}) return output def start_transaction_sql(self): return '' def tablespace_sql(self, tablespace, inline=False): if inline: return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) else: return "TABLESPACE %s" % self.quote_name(tablespace) def value_to_db_datetime(self, value): if value is None: return None # Oracle doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = value.astimezone(timezone.utc).replace(tzinfo=None) else: raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.") return six.text_type(value) def value_to_db_time(self, value): if value is None: return None if isinstance(value, six.string_types): return datetime.datetime.strptime(value, '%H:%M:%S') # Oracle doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("Oracle backend does not support timezone-aware times.") return datetime.datetime(1900, 1, 1, value.hour, value.minute, value.second, value.microsecond) def year_lookup_bounds_for_date_field(self, value): first = '%s-01-01' second = '%s-12-31' return [first % value, second % value] def year_lookup_bounds_for_datetime_field(self, value): # The default implementation uses datetime objects for the bounds. # This must be overridden here, to use a formatted date (string) as # 'second' instead -- cx_Oracle chops the fraction-of-second part # off of datetime objects, leaving almost an entire second out of # the year under the default implementation. bounds = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value) if settings.USE_TZ: bounds = [b.astimezone(timezone.utc).replace(tzinfo=None) for b in bounds] return [b.isoformat(b' ') for b in bounds] def combine_expression(self, connector, sub_expressions): "Oracle requires special cases for %% and & operators in query expressions" if connector == '%%': return 'MOD(%s)' % ','.join(sub_expressions) elif connector == '&': return 'BITAND(%s)' % ','.join(sub_expressions) elif connector == '|': raise NotImplementedError("Bit-wise or is not supported in Oracle.") return super(DatabaseOperations, self).combine_expression(connector, sub_expressions) def _get_sequence_name(self, table): name_length = self.max_name_length() - 3 return '%s_SQ' % util.truncate_name(table, name_length).upper() def _get_trigger_name(self, table): name_length = self.max_name_length() - 3 return '%s_TR' % util.truncate_name(table, name_length).upper() def bulk_insert_sql(self, fields, num_values): items_sql = "SELECT %s FROM DUAL" % ", ".join(["%s"] * len(fields)) return " UNION ALL ".join([items_sql] * num_values) class _UninitializedOperatorsDescriptor(object): def __get__(self, instance, owner): # If connection.operators is looked up before a connection has been # created, transparently initialize connection.operators to avert an # AttributeError. if instance is None: raise AttributeError("operators not available as class attribute") # Creating a cursor will initialize the operators. instance.cursor().close() return instance.__dict__['operators'] class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'oracle' operators = _UninitializedOperatorsDescriptor() _standard_operators = { 'exact': '= %s', 'iexact': '= UPPER(%s)', 'contains': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'icontains': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'endswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'istartswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'iendswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", } _likec_operators = _standard_operators.copy() _likec_operators.update({ 'contains': "LIKEC %s ESCAPE '\\'", 'icontains': "LIKEC UPPER(%s) ESCAPE '\\'", 'startswith': "LIKEC %s ESCAPE '\\'", 'endswith': "LIKEC %s ESCAPE '\\'", 'istartswith': "LIKEC UPPER(%s) ESCAPE '\\'", 'iendswith': "LIKEC UPPER(%s) ESCAPE '\\'", }) Database = Database def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.features = DatabaseFeatures(self) use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True) self.features.can_return_id_from_insert = use_returning_into self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = BaseDatabaseValidation(self) def _connect_string(self): settings_dict = self.settings_dict if not settings_dict['HOST'].strip(): settings_dict['HOST'] = 'localhost' if settings_dict['PORT'].strip(): dsn = Database.makedsn(settings_dict['HOST'], int(settings_dict['PORT']), settings_dict['NAME']) else: dsn = settings_dict['NAME'] return "%s/%s@%s" % (settings_dict['USER'], settings_dict['PASSWORD'], dsn) def get_connection_params(self): conn_params = self.settings_dict['OPTIONS'].copy() if 'use_returning_into' in conn_params: del conn_params['use_returning_into'] return conn_params def get_new_connection(self, conn_params): conn_string = convert_unicode(self._connect_string()) return Database.connect(conn_string, **conn_params) def init_connection_state(self): cursor = self.create_cursor() # Set the territory first. The territory overrides NLS_DATE_FORMAT # and NLS_TIMESTAMP_FORMAT to the territory default. When all of # these are set in single statement it isn't clear what is supposed # to happen. cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") # Set oracle date to ansi date format. This only needs to execute # once when we create a new connection. We also set the Territory # to 'AMERICA' which forces Sunday to evaluate to a '1' in # TO_CHAR(). cursor.execute( "ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'" " NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" + (" TIME_ZONE = 'UTC'" if settings.USE_TZ else '')) cursor.close() if 'operators' not in self.__dict__: # Ticket #14149: Check whether our LIKE implementation will # work for this connection or we need to fall back on LIKEC. # This check is performed only once per DatabaseWrapper # instance per thread, since subsequent connections will use # the same settings. cursor = self.create_cursor() try: cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s" % self._standard_operators['contains'], ['X']) except DatabaseError: self.operators = self._likec_operators else: self.operators = self._standard_operators cursor.close() # There's no way for the DatabaseOperations class to know the # currently active Oracle version, so we do some setups here. # TODO: Multi-db support will need a better solution (a way to # communicate the current version). if self.oracle_version is not None and self.oracle_version <= 9: self.ops.regex_lookup = self.ops.regex_lookup_9 else: self.ops.regex_lookup = self.ops.regex_lookup_10 try: self.connection.stmtcachesize = 20 except: # Django docs specify cx_Oracle version 4.3.1 or higher, but # stmtcachesize is available only in 4.3.2 and up. pass def create_cursor(self): return FormatStylePlaceholderCursor(self.connection) def _commit(self): if self.connection is not None: try: return self.connection.commit() except Database.DatabaseError as e: # cx_Oracle 5.0.4 raises a cx_Oracle.DatabaseError exception # with the following attributes and values: # code = 2091 # message = 'ORA-02091: transaction rolled back # 'ORA-02291: integrity constraint (TEST_DJANGOTEST.SYS # _C00102056) violated - parent key not found' # We convert that particular case to our IntegrityError exception x = e.args[0] if hasattr(x, 'code') and hasattr(x, 'message') \ and x.code == 2091 and 'ORA-02291' in x.message: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise # Oracle doesn't support savepoint commits. Ignore them. def _savepoint_commit(self, sid): pass def _set_autocommit(self, autocommit): with self.wrap_database_errors: self.connection.autocommit = autocommit def check_constraints(self, table_names=None): """ To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they are returned to deferred. """ self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE') self.cursor().execute('SET CONSTRAINTS ALL DEFERRED') def is_usable(self): try: if hasattr(self.connection, 'ping'): # Oracle 10g R2 and higher self.connection.ping() else: # Use a cx_Oracle cursor directly, bypassing Django's utilities. self.connection.cursor().execute("SELECT 1 FROM DUAL") except Database.Error: return False else: return True @cached_property def oracle_version(self): with self.temporary_connection(): version = self.connection.version try: return int(version.split('.')[0]) except ValueError: return None class OracleParam(object): """ Wrapper object for formatting parameters for Oracle. If the string representation of the value is large enough (greater than 4000 characters) the input size needs to be set as CLOB. Alternatively, if the parameter has an `input_size` attribute, then the value of the `input_size` attribute will be used instead. Otherwise, no input size will be set for the parameter when executing the query. """ def __init__(self, param, cursor, strings_only=False): # With raw SQL queries, datetimes can reach this function # without being converted by DateTimeField.get_db_prep_value. if settings.USE_TZ and isinstance(param, datetime.datetime): if timezone.is_naive(param): warnings.warn("Oracle received a naive datetime (%s)" " while time zone support is active." % param, RuntimeWarning) default_timezone = timezone.get_default_timezone() param = timezone.make_aware(param, default_timezone) param = param.astimezone(timezone.utc).replace(tzinfo=None) # Oracle doesn't recognize True and False correctly in Python 3. # The conversion done below works both in 2 and 3. if param is True: param = "1" elif param is False: param = "0" if hasattr(param, 'bind_parameter'): self.force_bytes = param.bind_parameter(cursor) elif isinstance(param, six.memoryview): self.force_bytes = param else: self.force_bytes = convert_unicode(param, cursor.charset, strings_only) if hasattr(param, 'input_size'): # If parameter has `input_size` attribute, use that. self.input_size = param.input_size elif isinstance(param, six.string_types) and len(param) > 4000: # Mark any string param greater than 4000 characters as a CLOB. self.input_size = Database.CLOB else: self.input_size = None class VariableWrapper(object): """ An adapter class for cursor variables that prevents the wrapped object from being converted into a string when used to instanciate an OracleParam. This can be used generally for any other object that should be passed into Cursor.execute as-is. """ def __init__(self, var): self.var = var def bind_parameter(self, cursor): return self.var def __getattr__(self, key): return getattr(self.var, key) def __setattr__(self, key, value): if key == 'var': self.__dict__[key] = value else: setattr(self.var, key, value) class InsertIdVar(object): """ A late-binding cursor variable that can be passed to Cursor.execute as a parameter, in order to receive the id of the row created by an insert statement. """ def bind_parameter(self, cursor): param = cursor.cursor.var(Database.NUMBER) cursor._insert_id_var = param return param class FormatStylePlaceholderCursor(object): """ Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var" style. This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". We also do automatic conversion between Unicode on the Python side and UTF-8 -- for talking to Oracle -- in here. """ charset = 'utf-8' def __init__(self, connection): self.cursor = connection.cursor() # Necessary to retrieve decimal values without rounding error. self.cursor.numbersAsStrings = True # Default arraysize of 1 is highly sub-optimal. self.cursor.arraysize = 100 def _format_params(self, params): try: return dict((k,OracleParam(v, self, True)) for k,v in params.items()) except AttributeError: return tuple([OracleParam(p, self, True) for p in params]) def _guess_input_sizes(self, params_list): # Try dict handling; if that fails, treat as sequence if hasattr(params_list[0], 'keys'): sizes = {} for params in params_list: for k, value in params.items(): if value.input_size: sizes[k] = value.input_size self.setinputsizes(**sizes) else: # It's not a list of dicts; it's a list of sequences sizes = [None] * len(params_list[0]) for params in params_list: for i, value in enumerate(params): if value.input_size: sizes[i] = value.input_size self.setinputsizes(*sizes) def _param_generator(self, params): # Try dict handling; if that fails, treat as sequence if hasattr(params, 'items'): return dict((k, v.force_bytes) for k,v in params.items()) else: return [p.force_bytes for p in params] def _fix_for_params(self, query, params): # cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it # it does want a trailing ';' but not a trailing '/'. However, these # characters must be included in the original query in case the query # is being passed to SQL*Plus. if query.endswith(';') or query.endswith('/'): query = query[:-1] if params is None: params = [] query = convert_unicode(query, self.charset) elif hasattr(params, 'keys'): # Handle params as dict args = dict((k, ":%s"%k) for k in params.keys()) query = convert_unicode(query % args, self.charset) else: # Handle params as sequence args = [(':arg%d' % i) for i in range(len(params))] query = convert_unicode(query % tuple(args), self.charset) return query, self._format_params(params) def execute(self, query, params=None): query, params = self._fix_for_params(query, params) self._guess_input_sizes([params]) try: return self.cursor.execute(query, self._param_generator(params)) except Database.DatabaseError as e: # cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400. if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError): six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def executemany(self, query, params=None): if not params: # No params given, nothing to do return None # uniform treatment for sequences and iterables params_iter = iter(params) query, firstparams = self._fix_for_params(query, next(params_iter)) # we build a list of formatted params; as we're going to traverse it # more than once, we can't make it lazy by using a generator formatted = [firstparams]+[self._format_params(p) for p in params_iter] self._guess_input_sizes(formatted) try: return self.cursor.executemany(query, [self._param_generator(p) for p in formatted]) except Database.DatabaseError as e: # cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400. if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError): six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def fetchone(self): row = self.cursor.fetchone() if row is None: return row return _rowfactory(row, self.cursor) def fetchmany(self, size=None): if size is None: size = self.arraysize return tuple([_rowfactory(r, self.cursor) for r in self.cursor.fetchmany(size)]) def fetchall(self): return tuple([_rowfactory(r, self.cursor) for r in self.cursor.fetchall()]) def var(self, *args): return VariableWrapper(self.cursor.var(*args)) def arrayvar(self, *args): return VariableWrapper(self.cursor.arrayvar(*args)) def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.cursor, attr) def __iter__(self): return CursorIterator(self.cursor) class CursorIterator(six.Iterator): """Cursor iterator wrapper that invokes our custom row factory.""" def __init__(self, cursor): self.cursor = cursor self.iter = iter(cursor) def __iter__(self): return self def __next__(self): return _rowfactory(next(self.iter), self.cursor) def _rowfactory(row, cursor): # Cast numeric values as the appropriate Python type based upon the # cursor description, and convert strings to unicode. casted = [] for value, desc in zip(row, cursor.description): if value is not None and desc[1] is Database.NUMBER: precision, scale = desc[4:6] if scale == -127: if precision == 0: # NUMBER column: decimal-precision floating point # This will normally be an integer from a sequence, # but it could be a decimal value. if '.' in value: value = decimal.Decimal(value) else: value = int(value) else: # FLOAT column: binary-precision floating point. # This comes from FloatField columns. value = float(value) elif precision > 0: # NUMBER(p,s) column: decimal-precision fixed point. # This comes from IntField and DecimalField columns. if scale == 0: value = int(value) else: value = decimal.Decimal(value) elif '.' in value: # No type information. This normally comes from a # mathematical expression in the SELECT list. Guess int # or Decimal based on whether it has a decimal point. value = decimal.Decimal(value) else: value = int(value) # datetimes are returned as TIMESTAMP, except the results # of "dates" queries, which are returned as DATETIME. elif desc[1] in (Database.TIMESTAMP, Database.DATETIME): # Confirm that dt is naive before overwriting its tzinfo. if settings.USE_TZ and value is not None and timezone.is_naive(value): value = value.replace(tzinfo=timezone.utc) elif desc[1] in (Database.STRING, Database.FIXED_CHAR, Database.LONG_STRING): value = to_unicode(value) casted.append(value) return tuple(casted) def to_unicode(s): """ Convert strings to Unicode objects (and return all other data types unchanged). """ if isinstance(s, six.string_types): return force_text(s) return s def _get_sequence_reset_sql(): # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. return """ DECLARE table_value integer; seq_value integer; BEGIN SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences WHERE sequence_name = '%(sequence)s'; WHILE table_value > seq_value LOOP SELECT "%(sequence)s".nextval INTO seq_value FROM dual; END LOOP; END; /"""
""" lantz.drivers.andor.ccd ~~~~~~~~~~~~~~~~~~~~~~~ Low level driver wrapping library for CCD and Intensified CCD cameras. Only functions for iXon EMCCD cameras were tested. Only tested in Windows OS. The driver was written for the single-camera scenario. If more than one camera is present, some 'read_once=True' should be erased but it shouldn't be necessary to make any more changes. Sources:: - Andor SDK 2.96 Manual :copyright: 2015 by Lantz Authors, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import numpy as np import ctypes as ct from collections import namedtuple from lantz import Driver, Feat, Action, DictFeat from lantz.errors import InstrumentError from lantz.foreign import LibraryDriver from lantz import Q_ degC = Q_(1, 'degC') us = Q_(1, 'us') MHz = Q_(1, 'MHz') seg = Q_(1, 's') _ERRORS = { 20002: 'DRV_SUCCESS', 20003: 'DRV_VXDNOTINSTALLED', 20004: 'DRV_ERROR_SCAN', 20005: 'DRV_ERROR_CHECK_SUM', 20006: 'DRV_ERROR_FILELOAD', 20007: 'DRV_UNKNOWN_FUNCTION', 20008: 'DRV_ERROR_VXD_INIT', 20009: 'DRV_ERROR_ADDRESS', 20010: 'DRV_ERROR_PAGELOCK', 20011: 'DRV_ERROR_PAGE_UNLOCK', 20012: 'DRV_ERROR_BOARDTEST', 20013: 'Unable to communicate with card.', 20014: 'DRV_ERROR_UP_FIFO', 20015: 'DRV_ERROR_PATTERN', 20017: 'DRV_ACQUISITION_ERRORS', 20018: 'Computer unable to read the data via the ISA slot at the required rate.', 20019: 'DRV_ACQ_DOWNFIFO_FULL', 20020: 'RV_PROC_UNKNOWN_INSTRUCTION', 20021: 'DRV_ILLEGAL_OP_CODE', 20022: 'Unable to meet Kinetic cycle time.', 20023: 'Unable to meet Accumulate cycle time.', 20024: 'No acquisition has taken place', 20026: 'Overflow of the spool buffer.', 20027: 'DRV_SPOOLSETUPERROR', 20033: 'DRV_TEMPERATURE_CODES', 20034: 'Temperature is OFF.', 20035: 'Temperature reached but not stabilized.', 20036: 'Temperature has stabilized at set point.', 20037: 'Temperature has not reached set point.', 20038: 'DRV_TEMPERATURE_OUT_RANGE', 20039: 'DRV_TEMPERATURE_NOT_SUPPORTED', 20040: 'Temperature had stabilized but has since drifted.', 20049: 'DRV_GENERAL_ERRORS', 20050: 'DRV_INVALID_AUX', 20051: 'DRV_COF_NOTLOADED', 20052: 'DRV_FPGAPROG', 20053: 'DRV_FLEXERROR', 20054: 'DRV_GPIBERROR', 20064: 'DRV_DATATYPE', 20065: 'DRV_DRIVER_ERRORS', 20066: 'Invalid parameter 1', 20067: 'Invalid parameter 2', 20068: 'Invalid parameter 3', 20069: 'Invalid parameter 4', 20070: 'DRV_INIERROR', 20071: 'DRV_COFERROR', 20072: 'Acquisition in progress', 20073: 'The system is not currently acquiring', 20074: 'DRV_TEMPCYCLE', 20075: 'System not initialized', 20076: 'DRV_P5INVALID', 20077: 'DRV_P6INVALID', 20078: 'Not a valid mode', 20079: 'DRV_INVALID_FILTER', 20080: 'DRV_I2CERRORS', 20081: 'DRV_DRV_I2CDEVNOTFOUND', 20082: 'DRV_I2CTIMEOUT', 20083: 'DRV_P7INVALID', 20089: 'DRV_USBERROR', 20090: 'DRV_IOCERROR', 20091: 'DRV_VRMVERSIONERROR', 20093: 'DRV_USB_INTERRUPT_ENDPOINT_ERROR', 20094: 'DRV_RANDOM_TRACK_ERROR', 20095: 'DRV_INVALID_TRIGGER_MODE', 20096: 'DRV_LOAD_FIRMWARE_ERROR', 20097: 'DRV_DIVIDE_BY_ZERO_ERROR', 20098: 'DRV_INVALID_RINGEXPOSURES', 20099: 'DRV_BINNING_ERROR', 20990: 'No camera present', 20991: 'Feature not supported on this camera.', 20992: 'Feature is not available at the moment.', 20115: 'DRV_ERROR_MAP', 20116: 'DRV_ERROR_UNMAP', 20117: 'DRV_ERROR_MDL', 20118: 'DRV_ERROR_UNMDL', 20119: 'DRV_ERROR_BUFFSIZE', 20121: 'DRV_ERROR_NOHANDLE', 20130: 'DRV_GATING_NOT_AVAILABLE', 20131: 'DRV_FPGA_VOLTAGE_ERROR', 20100: 'DRV_INVALID_AMPLIFIER', 20101: 'DRV_INVALID_COUNTCONVERT_MODE' } class CCD(LibraryDriver): LIBRARY_NAME = 'atmcd64d.dll' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.cameraIndex = ct.c_int(0) def _patch_functions(self): internal = self.lib.internal internal.GetCameraSerialNumber.argtypes = [ct.pointer(ct.c_uint)] internal.Filter_SetAveragingFactor.argtypes = [ct.c_int] internal.Filter_SetThreshold.argtypes = ct.c_float internal.Filter_GetThreshold.argtypes = ct.c_float def _return_handler(self, func_name, ret_value): excl_func = ['GetTemperatureF', 'IsCountConvertModeAvailable', 'IsAmplifierAvailable', 'IsTriggerModeAvailable'] if ret_value != 20002 and func_name not in excl_func: raise InstrumentError('{}'.format(_ERRORS[ret_value])) return ret_value def initialize(self): """ This function will initialize the Andor SDK System. As part of the initialization procedure on some cameras (i.e. Classic, iStar and earlier iXion) the DLL will need access to a DETECTOR.INI which contains information relating to the detector head, number pixels, readout speeds etc. If your system has multiple cameras then see the section Controlling multiple cameras. """ self.lib.Initialize() self.triggers = {'Internal': 0, 'External': 1, 'External Start': 6, 'External Exposure': 7, 'External FVB EM': 9, 'Software Trigger': 10, 'External Charge Shifting': 12} self.savetypes = {'Signed16bits': 1, 'Signed32bits': 2, 'Float': 3} # Initial values self.readout_packing_state = False self.readout_packing = self.readout_packing_state self.readout_mode_mode = 'Image' self.readout_mode = self.readout_mode_mode self.photon_counting_mode_state = False self.photon_counting_mode = self.photon_counting_mode_state self.frame_transfer_mode_state = False self.frame_transfer_mode = self.frame_transfer_mode_state self.fan_mode_index = 'onfull' self.fan_mode = self.fan_mode_index self.EM_gain_mode_index = 'RealGain' self.EM_gain_mode = self.EM_gain_mode_index self.cooled_on_shutdown_value = False self.cooled_on_shutdown = self.cooled_on_shutdown_value self.baseline_offset_value = 100 self.baseline_offset = self.baseline_offset_value self.adv_trigger_mode_state = True self.adv_trigger_mode = self.adv_trigger_mode_state self.acq_mode = 'Single Scan' self.acquisition_mode = self.acq_mode self.amp_typ = 0 self.horiz_shift_speed_index = 0 self.horiz_shift_speed = self.horiz_shift_speed_index self.vert_shift_speed_index = 0 self.vert_shift_speed = self.vert_shift_speed_index self.preamp_index = 0 self.preamp = self.preamp_index self.temperature_sp = 0 * degC self.temperature_setpoint = self.temperature_sp self.auxout = np.zeros(4, dtype=bool) for i in np.arange(1, 5): self.out_aux_port[i] = False self.trigger_mode_index = 'Internal' self.trigger_mode = self.trigger_mode_index def finalize(self): """Finalize Library. Concluding function. """ if self.status != 'Camera is idle, waiting for instructions.': self.abort_acquisition() self.cooler_on = False self.free_int_mem() self.lib.ShutDown() ### SYSTEM INFORMATION @Feat(read_once=True) def ncameras(self): """This function returns the total number of Andor cameras currently installed. It is possible to call this function before any of the cameras are initialized. """ n = ct.c_long() self.lib.GetAvailableCameras(ct.pointer(n)) return n.value def camera_handle(self, index): """This function returns the handle for the camera specified by cameraIndex. When multiple Andor cameras are installed the handle of each camera must be retrieved in order to select a camera using the SetCurrentCamera function. The number of cameras can be obtained using the GetAvailableCameras function. :param index: index of any of the installed cameras. Valid values: 0 to NumberCameras-1 where NumberCameras is the value returned by the GetAvailableCameras function. """ index = ct.c_long(index) handle = ct.c_long() self.lib.GetCameraHandle(index, ct.pointer(handle)) return handle.value @Feat() def current_camera(self): """When multiple Andor cameras are installed this function allows the user to select which camera is currently active. Once a camera has been selected the other functions can be called as normal but they will only apply to the selected camera. If only 1 camera is installed calling this function is not required since that camera will be selected by default. """ n = ct.c_long() # current camera handler self.lib.GetCurrentCamera(ct.pointer(n)) return n.value @current_camera.setter def current_camera(self, value): value = ct.c_long(value) self.lib.SetCurrentCamera(value.value) # needs camera handler @Feat(read_once=True) def idn(self): """Identification of the device """ hname = (ct.c_char * 100)() self.lib.GetHeadModel(ct.pointer(hname)) hname = str(hname.value)[2:-1] sn = ct.c_uint() self.lib.GetCameraSerialNumber(ct.pointer(sn)) return 'Andor ' + hname + ', serial number ' + str(sn.value) @Feat(read_once=True) def hardware_version(self): pcb, decode = ct.c_uint(), ct.c_uint() dummy1, dummy2 = ct.c_uint(), ct.c_uint() firmware_ver, firmware_build = ct.c_uint(), ct.c_uint() self.lib.GetHardwareVersion(ct.pointer(pcb), ct.pointer(decode), ct.pointer(dummy1), ct.pointer(dummy2), ct.pointer(firmware_ver), ct.pointer(firmware_build)) results = namedtuple('hardware_versions', 'PCB Flex10K CameraFirmware CameraFirmwareBuild') return results(pcb.value, decode.value, firmware_ver.value, firmware_build.value) @Feat(read_once=True) def software_version(self): eprom, coffile, vxdrev = ct.c_uint(), ct.c_uint(), ct.c_uint() vxdver, dllrev, dllver = ct.c_uint(), ct.c_uint(), ct.c_uint() self.lib.GetSoftwareVersion(ct.pointer(eprom), ct.pointer(coffile), ct.pointer(vxdrev), ct.pointer(vxdver), ct.pointer(dllrev), ct.pointer(dllver)) results = namedtuple('software_versions', 'EPROM COF DriverRev DriverVer DLLRev DLLVer') return results(eprom.value, coffile.value, vxdrev.value, vxdver.value, dllrev.value, dllver.value) # TODO: Make sense of this: @Feat(read_once=True) def capabilities(self): """This function will fill in an AndorCapabilities structure with the capabilities associated with the connected camera. Individual capabilities are determined by examining certain bits and combinations of bits in the member variables of the AndorCapabilites structure. """ class Capabilities(ct.Structure): _fields_ = [("Size", ct.c_ulong), ("AcqModes", ct.c_ulong), ("ReadModes", ct.c_ulong), ("FTReadModes", ct.c_ulong), ("TriggerModes", ct.c_ulong), ("CameraType", ct.c_ulong), ("PixelModes", ct.c_ulong), ("SetFunctions", ct.c_ulong), ("GetFunctions", ct.c_ulong), ("Features", ct.c_ulong), ("PCICard", ct.c_ulong), ("EMGainCapability", ct.c_ulong)] stru = Capabilities() stru.Size = ct.sizeof(stru) self.lib.GetCapabilities(ct.pointer(stru)) return stru @Feat(read_once=True) def controller_card(self): """This function will retrieve the type of PCI controller card included in your system. This function is not applicable for USB systems. The maximum number of characters that can be returned from this function is 10. """ model = ct.c_wchar_p() self.lib.GetControllerCardModel(ct.pointer(model)) return model.value @Feat(read_once=True) def count_convert_wavelength_range(self): """This function returns the valid wavelength range available in Count Convert mode.""" mini = ct.c_float() maxi = ct.c_float() self.lib.GetCountConvertWavelengthRange(ct.pointer(mini), ct.pointer(maxi)) return (mini.value, maxi.value) @Feat(read_once=True) def detector_shape(self): xp, yp = ct.c_int(), ct.c_int() self.lib.GetDetector(ct.pointer(xp), ct.pointer(yp)) return (xp.value, yp.value) @Feat(read_once=True) def px_size(self): """This function returns the dimension of the pixels in the detector in microns. """ xp, yp = ct.c_float(), ct.c_float() self.lib.GetPixelSize(ct.pointer(xp), ct.pointer(yp)) return (xp.value, yp.value) def QE(self, wl): """Returns the percentage QE for a particular head model at a user specified wavelength. """ hname = (ct.c_char * 100)() self.lib.GetHeadModel(ct.pointer(hname)) wl = ct.c_float(wl) qe = ct.c_float() self.lib.GetQE(ct.pointer(hname), wl, ct.c_uint(0), ct.pointer(qe)) return qe.value def sensitivity(self, ad, amp, i, pa): """This function returns the sensitivity for a particular speed. """ sens = ct.c_float() ad, amp, i, pa = ct.c_int(ad), ct.c_int(amp), ct.c_int(i), ct.c_int(pa) self.lib.GetSensitivity(ad, amp, i, pa, ct.pointer(sens)) return sens.value def count_convert_available(self, mode): """This function checks if the hardware and current settings permit the use of the specified Count Convert mode. """ mode = ct.c_int(mode) ans = self.lib.IsCountConvertModeAvailable(mode) if ans == 20002: return True else: return False ### SHUTTER # I couldn't find a better way to do this... sorry @Action() def shutter(self, typ, mode, ext_closing, ext_opening, ext_mode): """This function expands the control offered by SetShutter to allow an external shutter and internal shutter to be controlled independently (only available on some cameras – please consult your Camera User Guide). The typ parameter allows the user to control the TTL signal output to an external shutter. The opening and closing times specify the length of time required to open and close the shutter (this information is required for calculating acquisition timings – see SHUTTER TRANSFER TIME). The mode and extmode parameters control the behaviour of the internal and external shutters. To have an external shutter open and close automatically in an experiment, set the mode parameter to “Open” and set the extmode parameter to “Auto”. To have an internal shutter open and close automatically in an experiment, set the extmode parameter to “Open” and set the mode parameter to “Auto”. To not use any shutter in the experiment, set both shutter modes to permanently open. :param typ: 0 (or 1) Output TTL low (or high) signal to open shutter. :param mode: Internal shutter: 0 Fully Auto, 1 Permanently Open, 2 Permanently Closed, 4 Open for FVB series, 5 Open for any series. :param ext_closing: Time shutter takes to close (milliseconds) :param ext_opening: Time shutter takes to open (milliseconds) :param ext_mode: External shutter: 0 Fully Auto, 1 Permanently Open, 2 Permanently Closed, 4 Open for FVB series, 5 Open for any series. """ self.lib.SetShutterEx(ct.c_int(typ), ct.c_int(mode), ct.c_int(ext_closing), ct.c_int(ext_opening), ct.c_int(ext_mode)) @Feat(read_once=True) def shutter_min_times(self): """ This function will return the minimum opening and closing times in milliseconds for the shutter on the current camera. """ otime, ctime = ct.c_int(), ct.c_int() self.lib.GetShutterMinTimes(ct.pointer(ctime), ct.pointer(otime)) return (otime.value, ctime.value) @Feat(read_once=True) def has_mechanical_shutter(self): state = ct.c_int() self.lib.IsInternalMechanicalShutter(ct.pointer(state)) return bool(state.value) ### TEMPERATURE @Feat(read_once=True, units='degC') def min_temperature(self): """This function returns the valid range of temperatures in centigrads to which the detector can be cooled. """ mini, maxi = ct.c_int(), ct.c_int() self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi)) return mini.value @Feat(read_once=True, units='degC') def max_temperature(self): """This function returns the valid range of temperatures in centigrads to which the detector can be cooled. """ mini, maxi = ct.c_int(), ct.c_int() self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi)) return maxi.value @Feat() def temperature_status(self): """This function returns the temperature of the detector to the nearest degree. It also gives the status of cooling process. """ temp = ct.c_float() ans = self.lib.GetTemperatureF(ct.pointer(temp)) return _ERRORS[ans] @Feat(units='degC') def temperature(self): """This function returns the temperature of the detector to the nearest degree. It also gives the status of cooling process. """ temp = ct.c_float() self.lib.GetTemperatureF(ct.pointer(temp)) return temp.value @Feat(units='degC') def temperature_setpoint(self): return self.temperature_sp @temperature_setpoint.setter def temperature_setpoint(self, value): self.temperature_sp = value value = ct.c_int(int(value)) self.lib.SetTemperature(value) @Feat(values={True: 1, False: 0}) def cooler_on(self): state = ct.c_int() self.lib.IsCoolerOn(ct.pointer(state)) return state.value @cooler_on.setter def cooler_on(self, value): if value: self.lib.CoolerON() else: self.lib.CoolerOFF() @Feat(values={True: 1, False: 0}) def cooled_on_shutdown(self): """This function determines whether the cooler is switched off when the camera is shut down. """ return self.cooled_on_shutdown_value @cooled_on_shutdown.setter def cooled_on_shutdown(self, state): ans = self.lib.SetCoolerMode(ct.c_int(state)) if ans == 20002: self.cooled_on_shutdown_value = state @Feat(values={'onfull': 0, 'onlow': 1, 'off': 2}) def fan_mode(self): """Allows the user to control the mode of the camera fan. If the system is cooled, the fan should only be turned off for short periods of time. During this time the body of the camera will warm up which could compromise cooling capabilities. If the camera body reaches too high a temperature, depends on camera, the buzzer will sound. If this happens, turn off the external power supply and allow the system to stabilize before continuing. """ return self.fan_mode_index @fan_mode.setter def fan_mode(self, mode): ans = self.lib.SetFanMode(ct.c_int(mode)) if ans == 20002: self.fan_mode_index = mode ### FILTERS @Feat() def averaging_factor(self): """Averaging factor to be used with the recursive filter. For information on the various data averaging filters available see DATA AVERAGING FILTERS in the Special Guides section of the manual. """ af = ct.c_uint() self.lib.Filter_GetAveragingFactor(ct.pointer(af)) return af.value @averaging_factor.setter def averaging_factor(self, value): self.lib.Filter_SetAveragingFactor(ct.c_uint(value)) @Feat() def averaging_frame_count(self): """Number of frames to be used when using the frame averaging filter. """ fc = ct.c_uint() self.lib.Filter_GetAveragingFrameCount(ct.pointer(fc)) return fc.value @averaging_frame_count.setter def averaging_frame_count(self, value): self.lib.Filter_SetAveragingFrameCount(ct.c_uint(value)) @Feat(values={'NAF': 0, 'RAF': 5, 'FAF': 6}) def averaging_mode(self): """Current averaging mode. Valid options are: 0 – No Averaging Filter 5 – Recursive Averaging Filter 6 – Frame Averaging Filter """ i = ct.c_int() self.lib.Filter_GetDataAveragingMode(ct.pointer(i)) return i.value @averaging_mode.setter def averaging_mode(self, value): self.lib.Filter_SetDataAveragingMode(ct.c_int(value)) @Feat(values={'NF': 0, 'MF': 1, 'LAF': 2, 'IRF': 3, 'NTF': 4}) def noise_filter_mode(self): """Set the Noise Filter to use; For information on the various spurious noise filters available see SPURIOUS NOISE FILTERS in the Special Guides section of the manual. Valid options are: 0 – No Averaging Filter 1 – Median Filter 2 – Level Above Filter 3 – Interquartile Range Filter 4 – Noise Threshold Filter """ i = ct.c_uint() self.lib.Filter_GetMode(ct.pointer(i)) return i.value @noise_filter_mode.setter def noise_filter_mode(self, value): self.lib.Filter_SetMode(ct.c_uint(value)) @Feat() def filter_threshold(self): """Sets the threshold value for the Noise Filter. For information on the various spurious noise filters available see SPURIOUS NOISE FILTERS in the Special Guides section of the manual. Valid values are: 0 – 65535 for Level Above filte 0 – 10 for all other filters. """ f = ct.c_float() self.lib.Filter_GetThreshold(ct.pointer(f)) return f.value @filter_threshold.setter def filter_threshold(self, value): self.lib.Filter_SetThreshold(ct.c_float(value)) @Feat(values={True: 2, False: 0}) def cr_filter_enabled(self): """This function will set the state of the cosmic ray filter mode for future acquisitions. If the filter mode is on, consecutive scans in an accumulation will be compared and any cosmic ray-like features that are only present in one scan will be replaced with a scaled version of the corresponding pixel value in the correct scan. """ i = ct.c_int() self.lib.GetFilterMode(ct.pointer(i)) return i.value @cr_filter_enabled.setter def cr_filter_enabled(self, value): self.lib.SetFilterMode(ct.c_int(value)) ### PHOTON COUNTING MODE @Feat(values={True: 1, False: 0}) # FIXME: untested def photon_counting_mode(self): """This function activates the photon counting option. """ return self.photon_counting_mode_state @photon_counting_mode.setter def photon_counting_mode(self, state): ans = self.lib.SetPhotonCounting(ct.c_int(state)) if ans == 20002: self.photon_counting_mode_state = state @Feat(read_once=True) def n_photon_counting_div(self): """Available in some systems is photon counting mode. This function gets the number of photon counting divisions available. The functions SetPhotonCounting and SetPhotonCountingThreshold can be used to specify which of these divisions is to be used. """ inti = ct.c_ulong() self.lib.GetNumberPhotonCountingDivisions(ct.pointer(inti)) return inti.value @Action() # untested def set_photon_counting_divs(self, n, thres): """This function sets the thresholds for the photon counting option. """ thres = ct.c_long(thres) self.lib.SetPhotonCountingDivisions(ct.c_ulong(n), ct.pointer(thres)) @Action() def set_photon_counting_thres(self, mini, maxi): """This function sets the minimum and maximum threshold in counts (1-65535) for the photon counting option. """ self.lib.SetPhotonCountingThreshold(ct.c_long(mini), ct.c_long(maxi)) ### FAST KINETICS MODE @Feat(units='s') def FK_exposure_time(self): """This function will return the current “valid” exposure time for a fast kinetics acquisition. This function should be used after all the acquisitions settings have been set, i.e. SetFastKinetics and SetFKVShiftSpeed. The value returned is the actual time used in subsequent acquisitions. """ f = ct.c_float() self.lib.GetFKExposureTime(ct.pointer(f)) return f.value ### ACQUISITION HANDLING @Feat(values={'Single Scan': 1, 'Accumulate': 2, 'Kinetics': 3, 'Fast Kinetics': 4, 'Run till abort': 5}) def acquisition_mode(self): """This function will set the acquisition mode to be used on the next StartAcquisition. NOTE: In Mode 5 the system uses a “Run Till Abort” acquisition mode. In Mode 5 only, the camera continually acquires data until the AbortAcquisition function is called. By using the SetDriverEvent function you will be notified as each acquisition is completed. """ return self.acq_mode @acquisition_mode.setter def acquisition_mode(self, mode): ans = self.lib.SetAcquisitionMode(ct.c_int(mode)) if ans == 20002: self.acq_mode = mode @Action() def prepare_acquisition(self): """This function reads the current acquisition setup and allocates and configures any memory that will be used during the acquisition. The function call is not required as it will be called automatically by the StartAcquisition function if it has not already been called externally. However for long kinetic series acquisitions the time to allocate and configure any memory can be quite long which can result in a long delay between calling StartAcquisition and the acquisition actually commencing. For iDus, there is an additional delay caused by the camera being set-up with any new acquisition parameters. Calling PrepareAcquisition first will reduce this delay in the StartAcquisition call. """ self.lib.PrepareAcquisition() @Action() def start_acquisition(self): """This function starts an acquisition. The status of the acquisition can be monitored via GetStatus(). """ self.lib.StartAcquisition() @Action() def abort_acquisition(self): """This function aborts the current acquisition if one is active """ self.lib.AbortAcquisition() @Action() def wait_for_acquisition(self): """WaitForAcquisition can be called after an acquisition is started using StartAcquisition to put the calling thread to sleep until an Acquisition Event occurs. This can be used as a simple alternative to the functionality provided by the SetDriverEvent function, as all Event creation and handling is performed internally by the SDK library. Like the SetDriverEvent functionality it will use less processor resources than continuously polling with the GetStatus function. If you wish to restart the calling thread without waiting for an Acquisition event, call the function CancelWait. An Acquisition Event occurs each time a new image is acquired during an Accumulation, Kinetic Series or Run-Till-Abort acquisition or at the end of a Single Scan Acquisition. If a second event occurs before the first one has been acknowledged, the first one will be ignored. Care should be taken in this case, as you may have to use CancelWait to exit the function. """ self.lib.WaitForAcquisition() @Action() def cancel_wait(self): """This function restarts a thread which is sleeping within the WaitForAcquisition function. The sleeping thread will return from WaitForAcquisition with a value not equal to DRV_SUCCESS. """ self.lib.CancelWait() @Feat() def acquisition_progress(self): """This function will return information on the progress of the current acquisition. It can be called at any time but is best used in conjunction with SetDriverEvent. The values returned show the number of completed scans in the current acquisition. If 0 is returned for both accum and series then either: - No acquisition is currently running - The acquisition has just completed - The very first scan of an acquisition has just started and not yet completed. GetStatus can be used to confirm if the first scan has just started, returning DRV_ACQUIRING, otherwise it will return DRV_IDLE. For example, if accum=2 and series=3 then the acquisition has completed 3 in the series and 2 accumulations in the 4 scan of the series """ acc = ct.c_long() series = ct.c_long() self.lib.GetAcquisitionProgress(ct.pointer(acc), ct.pointer(series)) return acc.value, series.value @Feat() def status(self): """This function will return the current status of the Andor SDK system. This function should be called before an acquisition is started to ensure that it is IDLE and during an acquisition to monitor the process. """ st = ct.c_int() self.lib.GetStatus(ct.pointer(st)) if st.value == 20073: return 'Camera is idle, waiting for instructions.' elif st.value == 20074: return 'Camera is executing the temperature cycle.' elif st.value == 20072: return 'Acquisition in progress.' elif st.value == 20023: return 'Unable to meet accumulate cycle time.' elif st.value == 20022: return 'Unable to meet kinetic cycle time.' elif st.value == 20013: return 'Unable to communicate with card.' elif st.value == 20018: return ('Computer unable to read the data via the ISA slot at the ' 'required rate.') elif st.value == 20026: return 'Overflow of the spool buffer.' @Feat() def n_exposures_in_ring(self): """Gets the number of exposures in the ring at this moment.""" n = ct.c_int() self.lib.GetNumberRingExposureTimes(ct.pointer(n)) return n.value @Feat() def buffer_size(self): """This function will return the maximum number of images the circular buffer can store based on the current acquisition settings. """ n = ct.c_long() self.lib.GetSizeOfCircularBuffer(ct.pointer(n)) return n.value @Feat(values={True: 1, False: 0}) def exposing(self): """This function will return if the system is exposing or not. The status of the firepulse will be returned. NOTE This is only supported by the CCI23 card. """ i = ct.c_int() self.lib.GetCameraEventStatus(ct.pointer(i)) return i.value @Feat() def n_images_acquired(self): """This function will return the total number of images acquired since the current acquisition started. If the camera is idle the value returned is the number of images acquired during the last acquisition. """ n = ct.c_long() self.lib.GetTotalNumberImagesAcquired(ct.pointer(n)) return n.value @Action() def set_image(self, shape=None, binned=(1, 1), p_0=(1, 1)): """This function will set the horizontal and vertical binning to be used when taking a full resolution image. :param hbin: number of pixels to bin horizontally. :param vbin: number of pixels to bin vertically. :param hstart: Start column (inclusive). :param hend: End column (inclusive). :param vstart: Start row (inclusive). :param vend: End row (inclusive). """ if shape is None: shape = self.detector_shape (hbin, vbin) = binned (hstart, vstart) = p_0 (hend, vend) = (p_0[0] + shape[0] - 1, p_0[1] + shape[1] - 1) self.lib.SetImage(ct.c_int(hbin), ct.c_int(vbin), ct.c_int(hstart), ct.c_int(hend), ct.c_int(vstart), ct.c_int(vend)) @Feat(values={'FVB': 0, 'Multi-Track': 1, 'Random-Track': 2, 'Single-Track': 3, 'Image': 4}) def readout_mode(self): """This function will set the readout mode to be used on the subsequent acquisitions. """ return self.readout_mode_mode @readout_mode.setter def readout_mode(self, mode): ans = self.lib.SetReadMode(ct.c_int(mode)) if ans == 20002: self.readout_mode_mode = mode @Feat(values={True: 1, False: 0}) def readout_packing(self): """This function will configure whether data is packed into the readout register to improve frame rates for sub-images. Note: It is important to ensure that no light falls outside of the sub-image area otherwise the acquired data will be corrupted. Only currently available on iXon+ and iXon3. """ return self.readout_packing_state @readout_packing.setter def readout_packing(self, state): ans = self.lib.SetReadoutRegisterPacking(ct.c_int(state)) if ans == 20002: self.readout_packing_state = state ### DATA HANDLING @Feat(read_once=True) def min_image_length(self): """This function will return the minimum number of pixels that can be read out from the chip at each exposure. This minimum value arises due the way in which the chip is read out and will limit the possible sub image dimensions and binning sizes that can be applied. """ # Will contain the minimum number of super pixels on return. px = ct.c_int() self.lib.GetMinimumImageLength(ct.pointer(px)) return px.value @Action() def free_int_mem(self): """The FreeInternalMemory function will deallocate any memory used internally to store the previously acquired data. Note that once this function has been called, data from last acquisition cannot be retrived. """ self.lib.FreeInternalMemory() def acquired_data(self, shape): """This function will return the data from the last acquisition. The data are returned as long integers (32-bit signed integers). The “array” must be large enough to hold the complete data set. """ size = np.array(shape).prod() arr = np.ascontiguousarray(np.zeros(size, dtype=np.int32)) self.lib.GetAcquiredData(arr.ctypes.data_as(ct.POINTER(ct.c_int32)), ct.c_ulong(size)) arr = arr.reshape(shape) return arr def acquired_data16(self, shape): """16-bit version of the GetAcquiredData function. The “array” must be large enough to hold the complete data set. """ size = np.array(shape).prod() arr = np.ascontiguousarray(np.zeros(size, dtype=np.int16)) self.lib.GetAcquiredData16(arr.ctypes.data_as(ct.POINTER(ct.c_int16)), ct.c_ulong(size)) return arr.reshape(shape) def oldest_image(self, shape): """This function will update the data array with the oldest image in the circular buffer. Once the oldest image has been retrieved it no longer is available. The data are returned as long integers (32-bit signed integers). The "array" must be exactly the same size as the full image. """ size = np.array(shape).prod() array = np.ascontiguousarray(np.zeros(size, dtype=np.int32)) self.lib.GetOldestImage(array.ctypes.data_as(ct.POINTER(ct.c_int32)), ct.c_ulong(size)) return array.reshape(shape) def oldest_image16(self, shape): """16-bit version of the GetOldestImage function. """ size = np.array(shape).prod() array = np.ascontiguousarray(np.zeros(size, dtype=np.int16)) self.lib.GetOldestImage16(array.ctypes.data_as(ct.POINTER(ct.c_int16)), ct.c_ulong(size)) return array.reshape(shape) def most_recent_image(self, shape): """This function will update the data array with the most recently acquired image in any acquisition mode. The data are returned as long integers (32-bit signed integers). The "array" must be exactly the same size as the complete image. """ size = np.array(shape).prod() arr = np.ascontiguousarray(np.zeros(size, dtype=np.int32)) self.lib.GetMostRecentImage(arr.ctypes.data_as(ct.POINTER(ct.c_int32)), ct.c_ulong(size)) return arr.reshape(shape) def most_recent_image16(self, shape): """16-bit version of the GetMostRecentImage function. """ size = np.array(shape).prod() arr = np.ascontiguousarray(np.zeros(size, dtype=np.int16)) pt = ct.POINTER(ct.c_int16) self.lib.GetMostRecentImage16(arr.ctypes.data_as(pt), ct.c_ulong(size)) return arr.reshape(shape) def images(self, first, last, shape, validfirst, validlast): """This function will update the data array with the specified series of images from the circular buffer. If the specified series is out of range (i.e. the images have been overwritten or have not yet been acquired) then an error will be returned. :param first: index of first image in buffer to retrieve. :param flast: index of last image in buffer to retrieve. :param farr: pointer to data storage allocated by the user. :param size: total number of pixels. :param fvalidfirst: index of the first valid image. :param fvalidlast: index of the last valid image. """ size = shape[0] * shape[1] * (1 + last - first) array = np.ascontiguousarray(np.zeros(size, dtype=np.int32)) self.lib.GetImages(ct.c_long(first), ct.c_long(last), array.ctypes.data_as(ct.POINTER(ct.c_int32)), ct.c_ulong(size), ct.pointer(ct.c_long(validfirst)), ct.pointer(ct.c_long(validlast))) return array.reshape(-1, shape[0], shape[1]) def images16(self, first, last, shape, validfirst, validlast): """16-bit version of the GetImages function. """ size = shape[0] * shape[1] * (1 + last - first) array = np.ascontiguousarray(np.zeros(size, dtype=np.int16)) self.lib.GetImages16(ct.c_long(first), ct.c_long(last), array.ctypes.data_as(ct.POINTER(ct.c_int16)), ct.c_ulong(size), ct.pointer(ct.c_long(validfirst)), ct.pointer(ct.c_long(validlast))) return array.reshape(-1, shape[0], shape[1]) @Feat() def new_images_index(self): """This function will return information on the number of new images (i.e. images which have not yet been retrieved) in the circular buffer. This information can be used with GetImages to retrieve a series of the latest images. If any images are overwritten in the circular buffer they can no longer be retrieved and the information returned will treat overwritten images as having been retrieved. """ first = ct.c_long() last = ct.c_long() self.lib.GetNumberNewImages(ct.pointer(first), ct.pointer(last)) return (first.value, last.value) @Feat() # TODO: test this def available_images_index(self): """This function will return information on the number of available images in the circular buffer. This information can be used with GetImages to retrieve a series of images. If any images are overwritten in the circular buffer they no longer can be retrieved and the information returned will treat overwritten images as not available. """ first = ct.c_long() last = ct.c_long() self.lib.GetNumberAvailableImages(ct.pointer(first), ct.pointer(last)) return (first.value, last.value) def set_dma_parameters(self, n_max_images, s_per_dma): """In order to facilitate high image readout rates the controller card may wait for multiple images to be acquired before notifying the SDK that new data is available. Without this facility, there is a chance that hardware interrupts may be lost as the operating system does not have enough time to respond to each interrupt. The drawback to this is that you will not get the data for an image until all images for that interrupt have been acquired. There are 3 settings involved in determining how many images will be acquired for each notification (DMA Interrupt) of the controller card and they are as follows: 1. The size of the DMA buffer gives an upper limit on the number of images that can be stored within it and is usually set to the size of one full image when installing the software. This will usually mean that if you acquire full frames there will never be more than one image per DMA. 2. A second setting that is used is the minimum amount of time (SecondsPerDMA) that should expire between interrupts. This can be used to give an indication of the reponsiveness of the operating system to interrupts. Decreasing this value will allow more interrupts per second and should only be done for faster pcs. The default value is 0.03s (30ms), finding the optimal value for your pc can only be done through experimentation. 3. The third setting is an overide to the number of images calculated using the previous settings. If the number of images per dma is calculated to be greater than MaxImagesPerDMA then it will be reduced to MaxImagesPerDMA. This can be used to, for example, ensure that there is never more than 1 image per DMA by setting MaxImagesPerDMA to 1. Setting MaxImagesPerDMA to zero removes this limit. Care should be taken when modifying these parameters as missed interrupts may prevent the acquisition from completing. """ self.lib.SetDMAParameters(ct.c_int(n_max_images), ct.c_float(s_per_dma)) @Feat() def max_images_per_dma(self): """This function will return the maximum number of images that can be transferred during a single DMA transaction. """ n = ct.c_ulong() self.lib.GetImagesPerDMA(ct.pointer(n)) return n.value @Action() def save_raw(self, filename, typ): """This function saves the last acquisition as a raw data file. See self.savetypes for the file type keys. """ self.lib.SaveAsRaw(ct.c_char_p(str.encode(filename)), ct.c_int(self.savetypes[typ])) ### EXPOSURE SETTINGS @Feat() def acquisition_timings(self): """This function will return the current “valid” acquisition timing information. This function should be used after all the acquisitions settings have been set, e.g. SetExposureTime, SetKineticCycleTime and SetReadMode etc. The values returned are the actual times used in subsequent acquisitions. This function is required as it is possible to set the exposure time to 20ms, accumulate cycle time to 30ms and then set the readout mode to full image. As it can take 250ms to read out an image it is not possible to have a cycle time of 30ms. All data is measured in seconds. """ exp = ct.c_float() accum = ct.c_float() kine = ct.c_float() self.lib.GetAcquisitionTimings(ct.pointer(exp), ct.pointer(accum), ct.pointer(kine)) return exp.value * seg, accum.value * seg, kine.value * seg @Action() def set_exposure_time(self, time): """This function will set the exposure time to the nearest valid value not less than the given value, in seconds. The actual exposure time used is obtained by GetAcquisitionTimings. Please refer to SECTION 5 – ACQUISITION MODES for further information. """ try: time.magnitude except AttributeError: time = time * seg self.lib.SetExposureTime(ct.c_float(time.magnitude)) @Action() def set_accum_time(self, time): """This function will set the accumulation cycle time to the nearest valid value not less than the given value. The actual cycle time used is obtained by GetAcquisitionTimings. Please refer to SECTION 5 – ACQUISITION MODES for further information. """ try: time.magnitude except AttributeError: time = time * seg self.lib.SetAccumulationCycleTime(ct.c_float(time.magnitude)) @Action() def set_kinetic_cycle_time(self, time): """This function will set the kinetic cycle time to the nearest valid value not less than the given value. The actual time used is obtained by GetAcquisitionTimings. . Please refer to SECTION 5 – ACQUISITION MODES for further information. float time: the kinetic cycle time in seconds. """ try: time.magnitude except AttributeError: time = time * seg self.lib.SetKineticCycleTime(ct.c_float(time.magnitude)) @Action() def set_n_kinetics(self, n): """This function will set the number of scans (possibly accumulated scans) to be taken during a single acquisition sequence. This will only take effect if the acquisition mode is Kinetic Series. """ self.lib.SetNumberKinetics(ct.c_int(n)) @Action() def set_n_accum(self, n): """This function will set the number of scans accumulated in memory. This will only take effect if the acquisition mode is either Accumulate or Kinetic Series. """ self.lib.SetNumberAccumulations(ct.c_int(n)) @Feat(units='s') def keep_clean_time(self): """This function will return the time to perform a keep clean cycle. This function should be used after all the acquisitions settings have been set, e.g. SetExposureTime, SetKineticCycleTime and SetReadMode etc. The value returned is the actual times used in subsequent acquisitions. """ time = ct.c_float() self.lib.GetKeepCleanTime(ct.pointer(time)) return time.value @Feat(units='s') def readout_time(self): """This function will return the time to readout data from a sensor. This function should be used after all the acquisitions settings have been set, e.g. SetExposureTime, SetKineticCycleTime and SetReadMode etc. The value returned is the actual times used in subsequent acquisitions. """ time = ct.c_float() self.lib.GetReadOutTime(ct.pointer(time)) return time.value @Feat(read_once=True, units='s') def max_exposure(self): """This function will return the maximum Exposure Time in seconds that is settable by the SetExposureTime function. """ exp = ct.c_float() self.lib.GetMaximumExposure(ct.pointer(exp)) return exp.value @Feat(read_once=True) def n_max_nexposure(self): """This function will return the maximum number of exposures that can be configured in the SetRingExposureTimes SDK function. """ n = ct.c_int() self.lib.GetMaximumNumberRingExposureTimes(ct.pointer(n)) return n.value def true_exposure_times(self, n): # FIXME: bit order? something """This function will return the actual exposure times that the camera will use. There may be differences between requested exposures and the actual exposures. ntimes: Numbers of times requested. """ times = np.ascontiguousarray(np.zeros(n, dtype=np.float)) outtimes = times.ctypes.data_as(ct.POINTER(ct.c_float)) self.lib.GetAdjustedRingExposureTimes(ct.c_int(n), outtimes) return times def exposure_times(self, value): n = ct.c_int(len(value)) value = np.ascontiguousarray(value.astype(np.float)) outvalue = value.ctypes.data_as(ct.POINTER(ct.c_float)) self.lib.SetRingExposureTimes(n, outvalue) @Feat(values={True: 1, False: 0}) def frame_transfer_mode(self): """This function will set whether an acquisition will readout in Frame Transfer Mode. If the acquisition mode is Single Scan or Fast Kinetics this call will have no affect. """ return self.frame_transfer_mode_state @frame_transfer_mode.setter def frame_transfer_mode(self, state): ans = self.lib.SetFrameTransferMode(ct.c_int(state)) if ans == 20002: self.frame_transfer_mode_state = state ### AMPLIFIERS, GAIN, SPEEDS @Feat(read_once=True) def n_preamps(self): """Available in some systems are a number of pre amp gains that can be applied to the data as it is read out. This function gets the number of these pre amp gains available. The functions GetPreAmpGain and SetPreAmpGain can be used to specify which of these gains is to be used. """ n = ct.c_int() self.lib.GetNumberPreAmpGains(ct.pointer(n)) return n.value def preamp_available(self, channel, amp, index, preamp): """This function checks that the AD channel exists, and that the amplifier, speed and gain are available for the AD channel. """ channel = ct.c_int(channel) amp = ct.c_int(amp) index = ct.c_int(index) preamp = ct.c_int(preamp) status = ct.c_int() self.lib.IsPreAmpGainAvailable(channel, amp, index, preamp, ct.pointer(status)) return bool(status.value) def preamp_descr(self, index): """This function will return a string with a pre amp gain description. The pre amp gain is selected using the index. The SDK has a string associated with each of its pre amp gains. The maximum number of characters needed to store the pre amp gain descriptions is 30. The user has to specify the number of characters they wish to have returned to them from this function. """ index = ct.c_int(index) descr = (ct.c_char * 30)() leng = ct.c_int(30) self.lib.GetAmpDesc(index, ct.pointer(descr), leng) return str(descr.value)[2:-1] def true_preamp(self, index): """For those systems that provide a number of pre amp gains to apply to the data as it is read out; this function retrieves the amount of gain that is stored for a particular index. The number of gains available can be obtained by calling the GetNumberPreAmpGains function and a specific Gain can be selected using the function SetPreAmpGain. """ index = ct.c_int(index) gain = ct.c_float() self.lib.GetPreAmpGain(index, ct.pointer(gain)) return gain.value @Feat() def preamp(self): """This function will set the pre amp gain to be used for subsequent acquisitions. The actual gain factor that will be applied can be found through a call to the GetPreAmpGain function. The number of Pre Amp Gains available is found by calling the GetNumberPreAmpGains function. """ return self.preamp_index @preamp.setter def preamp(self, index): self.preamp_index = index self.lib.SetPreAmpGain(ct.c_int(index)) @Feat(values={True: 1, False: 0}) def EM_advanced_enabled(self): """This function turns on and off access to higher EM gain levels within the SDK. Typically, optimal signal to noise ratio and dynamic range is achieved between x1 to x300 EM Gain. Higher gains of > x300 are recommended for single photon counting only. Before using higher levels, you should ensure that light levels do not exceed the regime of tens of photons per pixel, otherwise accelerated ageing of the sensor can occur. This is set to False upon initialization of the camera. """ state = ct.c_int() self.lib.GetEMAdvanced(ct.pointer(state)) return state.value @EM_advanced_enabled.setter def EM_advanced_enabled(self, value): self.lib.SetEMAdvanced(ct.c_int(value)) @Feat(values={'DAC255': 0, 'DAC4095': 1, 'Linear': 2, 'RealGain': 3}) def EM_gain_mode(self): """Set the EM Gain mode to one of the following possible settings. Mode 0: The EM Gain is controlled by DAC settings in the range 0-255. Default mode. 1: The EM Gain is controlled by DAC settings in the range 0-4095. 2: Linear mode. 3: Real EM gain """ return self.EM_gain_mode_index @EM_gain_mode.setter def EM_gain_mode(self, mode): ans = self.lib.SetEMGainMode(ct.c_int(mode)) if ans == 20002: self.EM_gain_mode_index = mode @Feat() def EM_gain(self): """Allows the user to change the gain value. The valid range for the gain depends on what gain mode the camera is operating in. See SetEMGainMode to set the mode and GetEMGainRange to get the valid range to work with. To access higher gain values (>x300) see SetEMAdvanced. """ gain = ct.c_int() self.lib.GetEMCCDGain(ct.pointer(gain)) return gain.value @EM_gain.setter def EM_gain(self, value): self.lib.SetEMCCDGain(ct.c_int(value)) @Feat() def EM_gain_range(self): """Returns the minimum and maximum values of the current selected EM Gain mode and temperature of the sensor. """ mini, maxi = ct.c_int(), ct.c_int() self.lib.GetEMGainRange(ct.pointer(mini), ct.pointer(maxi)) return (mini.value, maxi.value) @Feat(read_once=True) def n_ad_channels(self): n = ct.c_int() self.lib.GetNumberADChannels(ct.pointer(n)) return n.value @Feat(read_once=True) def n_amps(self): n = ct.c_int() self.lib.GetNumberAmp(ct.pointer(n)) return n.value def amp_available(self, iamp): """This function checks if the hardware and current settings permit the use of the specified amplifier.""" ans = self.lib.IsAmplifierAvailable(ct.c_int(iamp)) if ans == 20002: return True else: return False def amp_descr(self, index): """This function will return a string with an amplifier description. The amplifier is selected using the index. The SDK has a string associated with each of its amplifiers. The maximum number of characters needed to store the amplifier descriptions is 21. The user has to specify the number of characters they wish to have returned to them from this function. """ index = ct.c_int(index) descr = (ct.c_char * 21)() leng = ct.c_int(21) self.lib.GetAmpDesc(index, ct.pointer(descr), leng) return str(descr.value)[2:-1] def readout_flipped(self, iamp): """On cameras with multiple amplifiers the frame readout may be flipped. This function can be used to determine if this is the case. """ flipped = ct.c_int() self.lib.IsReadoutFlippedByAmplifier(ct.c_int(iamp), ct.pointer(flipped)) return bool(flipped.value) def amp_max_hspeed(self, index): """This function will return the maximum available horizontal shift speed for the amplifier selected by the index parameter. """ hspeed = ct.c_float() self.lib.GetAmpMaxSpeed(ct.c_int(index), ct.pointer(hspeed)) return hspeed.value def n_horiz_shift_speeds(self, channel=0, typ=None): """As your Andor SDK system is capable of operating at more than one horizontal shift speed this function will return the actual number of speeds available. :param channel: the AD channel. :param typ: output amplification. 0 electron multiplication. 1 conventional. """ if typ is None: typ = self.amp_typ n = ct.c_int() self.lib.GetNumberHSSpeeds(ct.c_int(channel), ct.c_int(typ), ct.pointer(n)) return n.value def true_horiz_shift_speed(self, index=0, typ=None, ad=0): """As your Andor system is capable of operating at more than one horizontal shift speed this function will return the actual speeds available. The value returned is in MHz. GetHSSpeed(int channel, int typ, int index, float* speed) :param typ: output amplification. 0 electron multiplication/Conventional(clara) 1 conventional/Extended NIR Mode(clara). :param index: speed required 0 to NumberSpeeds-1 where NumberSpeeds is value returned in first parameter after a call to GetNumberHSSpeeds(). :param ad: the AD channel. """ if typ is None: typ = self.amp_typ speed = ct.c_float() self.lib.GetHSSpeed(ct.c_int(ad), ct.c_int(typ), ct.c_int(index), ct.pointer(speed)) return speed.value * MHz @Feat() def horiz_shift_speed(self): return self.horiz_shift_speed_index @horiz_shift_speed.setter def horiz_shift_speed(self, index): """This function will set the speed at which the pixels are shifted into the output node during the readout phase of an acquisition. Typically your camera will be capable of operating at several horizontal shift speeds. To get the actual speed that an index corresponds to use the GetHSSpeed function. :param typ: output amplification. 0 electron multiplication/Conventional(clara). 1 conventional/Extended NIR mode(clara). :param index: the horizontal speed to be used 0 to GetNumberHSSpeeds() - 1 """ ans = self.lib.SetHSSpeed(ct.c_int(self.amp_typ), ct.c_int(index)) if ans == 20002: self.horiz_shift_speed_index = index @Feat() def fastest_recommended_vsspeed(self): """As your Andor SDK system may be capable of operating at more than one vertical shift speed this function will return the fastest recommended speed available. The very high readout speeds, may require an increase in the amplitude of the Vertical Clock Voltage using SetVSAmplitude. This function returns the fastest speed which does not require the Vertical Clock Voltage to be adjusted. The values returned are the vertical shift speed index and the actual speed in microseconds per pixel shift. """ inti, f2 = ct.c_int(), ct.c_float() self.lib.GetFastestRecommendedVSSpeed(ct.pointer(inti), ct.pointer(f2)) return (inti.value, f2.value) @Feat(read_once=True) def n_vert_clock_amps(self): """This function will normally return the number of vertical clock voltage amplitudes that the camera has. """ n = ct.c_int() self.lib.GetNumberVSAmplitudes(ct.pointer(n)) return n.value def vert_amp_index(self, string): """This Function is used to get the index of the Vertical Clock Amplitude that corresponds to the string passed in. :param string: "Normal" , "+1" , "+2" , "+3" , "+4" """ index = ct.c_int() string = ct.c_char_p(str.encode(string)) self.lib.GetVSAmplitudeFromString(string, ct.pointer(index)) return index.value def vert_amp_string(self, index): """This Function is used to get the Vertical Clock Amplitude string that corresponds to the index passed in. :param index: Index of VS amplitude required Valid values 0 to GetNumberVSAmplitudes() - 1 """ index = ct.c_int(index) string = (ct.c_char * 6)() self.lib.GetVSAmplitudeString(index, ct.pointer(string)) return str(string.value)[2:-1] def true_vert_amp(self, index): """This Function is used to get the value of the Vertical Clock Amplitude found at the index passed in. :param index: Index of VS amplitude required Valid values 0 to GetNumberVSAmplitudes() - 1 """ index = ct.c_int(index) amp = ct.c_int() self.lib.GetVSAmplitudeValue(index, ct.pointer(amp)) return amp.value @Action() def set_vert_clock(self, index): """If you choose a high readout speed (a low readout time), then you should also consider increasing the amplitude of the Vertical Clock Voltage. There are five levels of amplitude available for you to choose from: - Normal, +1, +2, +3, +4 Exercise caution when increasing the amplitude of the vertical clock voltage, since higher clocking voltages may result in increased clock-induced charge (noise) in your signal. In general, only the very highest vertical clocking speeds are likely to benefit from an increased vertical clock voltage amplitude. """ self.lib.SetVSAmplitude(ct.c_int(index)) @Feat(read_once=True) def n_vert_shift_speeds(self): """As your Andor system may be capable of operating at more than one vertical shift speed this function will return the actual number of speeds available. """ n = ct.c_int() self.lib.GetNumberVSSpeeds(ct.pointer(n)) return n.value def true_vert_shift_speed(self, index=0): """As your Andor SDK system may be capable of operating at more than one vertical shift speed this function will return the actual speeds available. The value returned is in microseconds. """ speed = ct.c_float() self.lib.GetVSSpeed(ct.c_int(index), ct.pointer(speed)) return speed.value * us @Feat() def vert_shift_speed(self): return self.vert_shift_speed_index @vert_shift_speed.setter def vert_shift_speed(self, index): """This function will set the vertical speed to be used for subsequent acquisitions. """ self.vert_shift_speed_index = index self.lib.SetVSSpeed(ct.c_int(index)) ### BASELINE @Feat(values={True: 1, False: 0}) def baseline_clamp(self): """This function returns the status of the baseline clamp functionality. With this feature enabled the baseline level of each scan in a kinetic series will be more consistent across the sequence. """ i = ct.c_int() self.lib.GetBaselineClamp(ct.pointer(i)) return i.value @baseline_clamp.setter def baseline_clamp(self, value): value = ct.c_int(value) self.lib.SetBaselineClamp(value) @Feat(limits=(-1000, 1100, 100)) def baseline_offset(self): """This function allows the user to move the baseline level by the amount selected. For example “+100” will add approximately 100 counts to the default baseline value. The value entered should be a multiple of 100 between -1000 and +1000 inclusively. """ return self.baseline_offset_value @baseline_offset.setter def baseline_offset(self, value): ans = self.lib.SetBaselineOffset(ct.c_int(value)) if ans == 20002: self.baseline_offset_value = value ### BIT DEPTH def bit_depth(self, ch): """This function will retrieve the size in bits of the dynamic range for any available AD channel. """ ch = ct.c_int(ch) depth = ct.c_uint() self.lib.GetBitDepth(ch, ct.pointer(depth)) return depth.value ### TRIGGER @Feat(values={True: 1, False: 0}) def adv_trigger_mode(self): """This function will set the state for the iCam functionality that some cameras are capable of. There may be some cases where we wish to prevent the software using the new functionality and just do it the way it was previously done. """ return self.adv_trigger_mode_state @adv_trigger_mode.setter def adv_trigger_mode(self, state): ans = self.lib.SetAdvancedTriggerModeState(ct.c_int(state)) if ans == 20002: self.adv_trigger_mode_state = state def trigger_mode_available(self, modestr): """This function checks if the hardware and current settings permit the use of the specified trigger mode. """ index = self.triggers[modestr] ans = self.lib.IsTriggerModeAvailable(ct.c_int(index)) if ans == 20002: return True else: return False @Feat(values={'Internal': 0, 'External': 1, 'External Start': 6, 'External Exposure': 7, 'External FVB EM': 9, 'Software Trigger': 10, 'External Charge Shifting': 12}) def trigger_mode(self): """This function will set the trigger mode that the camera will operate in. """ return self.trigger_mode_index @trigger_mode.setter def trigger_mode(self, mode): ans = self.lib.SetTriggerMode(ct.c_int(mode)) if ans == 20002: self.trigger_mode_index = mode @Action() def send_software_trigger(self): """This function sends an event to the camera to take an acquisition when in Software Trigger mode. Not all cameras have this mode available to them. To check if your camera can operate in this mode check the GetCapabilities function for the Trigger Mode AC_TRIGGERMODE_CONTINUOUS. If this mode is physically possible and other settings are suitable (IsTriggerModeAvailable) and the camera is acquiring then this command will take an acquisition. NOTES: The settings of the camera must be as follows: - ReadOut mode is full image - RunMode is Run Till Abort - TriggerMode is 10 """ self.lib.SendSoftwareTrigger() @Action() def trigger_level(self, value): """This function sets the trigger voltage which the system will use. """ self.lib.SetTriggerLevel(ct.c_float(value)) ### AUXPORT @DictFeat(values={True: not(0), False: 0}, keys=list(range(1, 5))) def in_aux_port(self, port): """This function returns the state of the TTL Auxiliary Input Port on the Andor plug-in card. """ port = ct.c_int(port) state = ct.c_int() self.lib.InAuxPort(port, ct.pointer(state)) return state.value @DictFeat(values={True: 1, False: 0}, keys=list(range(1, 5))) def out_aux_port(self, port): """This function sets the TTL Auxiliary Output port (P) on the Andor plug-in card to either ON/HIGH or OFF/LOW. """ return self.auxout[port - 1] @out_aux_port.setter def out_aux_port(self, port, state): self.auxout[port - 1] = bool(state) port = ct.c_int(port) state = ct.c_int(state) self.lib.OutAuxPort(port, ct.pointer(state)) def is_implemented(self, strcommand): """Checks if command is implemented. """ result = ct.c_bool() command = ct.c_wchar_p(strcommand) self.lib.AT_IsImplemented(self.AT_H, command, ct.addressof(result)) return result.value def is_writable(self, strcommand): """Checks if command is writable. """ result = ct.c_bool() command = ct.c_wchar_p(strcommand) self.lib.AT_IsWritable(self.AT_H, command, ct.addressof(result)) return result.value def queuebuffer(self, bufptr, value): """Put buffer in queue. """ value = ct.c_int(value) self.lib.AT_QueueBuffer(self.AT_H, ct.byref(bufptr), value) def waitbuffer(self, ptr, bufsize): """Wait for next buffer ready. """ timeout = ct.c_int(20000) self.lib.AT_WaitBuffer(self.AT_H, ct.byref(ptr), ct.byref(bufsize), timeout) def command(self, strcommand): """Run command. """ command = ct.c_wchar_p(strcommand) self.lib.AT_Command(self.AT_H, command) def getint(self, strcommand): """Run command and get Int return value. """ result = ct.c_longlong() command = ct.c_wchar_p(strcommand) self.lib.AT_GetInt(self.AT_H, command, ct.addressof(result)) return result.value def setint(self, strcommand, value): """SetInt function. """ command = ct.c_wchar_p(strcommand) value = ct.c_longlong(value) self.lib.AT_SetInt(self.AT_H, command, value) def getfloat(self, strcommand): """Run command and get Int return value. """ result = ct.c_double() command = ct.c_wchar_p(strcommand) self.lib.AT_GetFloat(self.AT_H, command, ct.addressof(result)) return result.value def setfloat(self, strcommand, value): """Set command with Float value parameter. """ command = ct.c_wchar_p(strcommand) value = ct.c_double(value) self.lib.AT_SetFloat(self.AT_H, command, value) def getbool(self, strcommand): """Run command and get Bool return value. """ result = ct.c_bool() command = ct.c_wchar_p(strcommand) self.lib.AT_GetBool(self.AT_H, command, ct.addressof(result)) return result.value def setbool(self, strcommand, value): """Set command with Bool value parameter. """ command = ct.c_wchar_p(strcommand) value = ct.c_bool(value) self.lib.AT_SetBool(self.AT_H, command, value) def getenumerated(self, strcommand): """Run command and set Enumerated return value. """ result = ct.c_int() command = ct.c_wchar_p(strcommand) self.lib.AT_GetEnumerated(self.AT_H, command, ct.addressof(result)) def setenumerated(self, strcommand, value): """Set command with Enumerated value parameter. """ command = ct.c_wchar_p(strcommand) value = ct.c_bool(value) self.lib.AT_SetEnumerated(self.AT_H, command, value) def setenumstring(self, strcommand, item): """Set command with EnumeratedString value parameter. """ command = ct.c_wchar_p(strcommand) item = ct.c_wchar_p(item) self.lib.AT_SetEnumString(self.AT_H, command, item) def flush(self): self.lib.AT_Flush(self.AT_H) if __name__ == '__main__': from matplotlib import pyplot as plt from lantz import Q_ import time degC = Q_(1, 'degC') us = Q_(1, 'us') MHz = Q_(1, 'MHz') s = Q_(1, 's') with CCD() as andor: print(andor.idn) andor.free_int_mem() # Acquisition settings andor.readout_mode = 'Image' andor.set_image() andor.acquisition_mode = 'Run till abort' andor.set_exposure_time(0.03 * s) andor.trigger_mode = 'Internal' andor.amp_typ = 0 andor.horiz_shift_speed = 0 andor.vert_shift_speed = 0 andor.shutter(0, 0, 0, 0, 0) # Acquisition andor.start_acquisition() time.sleep(2) data = andor.most_recent_image(shape=andor.detector_shape) andor.abort_acquisition() plt.imshow(data, cmap='gray', interpolation='None') plt.colorbar() plt.show() print(data.min(), data.max(), data.mean())
import datetime import httplib2 import itertools import json from django.conf import settings from django.db import connection, transaction from django.db.models import Sum, Max import commonware.log from apiclient.discovery import build from celeryutils import task from oauth2client.client import OAuth2Credentials import amo import amo.search from addons.models import Addon, AddonUser from bandwagon.models import Collection from lib.es.utils import get_indices from reviews.models import Review from stats.models import Contribution from users.models import UserProfile from versions.models import Version from mkt.constants.regions import REGIONS_CHOICES_SLUG from mkt.monolith.models import MonolithRecord from mkt.webapps.models import Webapp from . import search from .models import (AddonCollectionCount, CollectionCount, CollectionStats, DownloadCount, ThemeUserCount, UpdateCount) log = commonware.log.getLogger('z.task') @task def addon_total_contributions(*addons, **kw): "Updates the total contributions for a given addon." log.info('[%s@%s] Updating total contributions.' % (len(addons), addon_total_contributions.rate_limit)) # Only count uuid=None; those are verified transactions. stats = (Contribution.objects.filter(addon__in=addons, uuid=None) .values_list('addon').annotate(Sum('amount'))) for addon, total in stats: Addon.objects.filter(id=addon).update(total_contributions=total) @task def update_addons_collections_downloads(data, **kw): log.info("[%s] Updating addons+collections download totals." % (len(data))) cursor = connection.cursor() q = ("UPDATE addons_collections SET downloads=%s WHERE addon_id=%s " "AND collection_id=%s;" * len(data)) cursor.execute(q, list(itertools.chain.from_iterable( [var['sum'], var['addon'], var['collection']] for var in data))) transaction.commit_unless_managed() @task def update_collections_total(data, **kw): log.info("[%s] Updating collections' download totals." % (len(data))) for var in data: (Collection.objects.filter(pk=var['collection_id']) .update(downloads=var['sum'])) def get_profile_id(service, domain): """ Fetch the profile ID for the given domain. """ accounts = service.management().accounts().list().execute() account_ids = [a['id'] for a in accounts.get('items', ())] for account_id in account_ids: webproperties = service.management().webproperties().list( accountId=account_id).execute() webproperty_ids = [p['id'] for p in webproperties.get('items', ())] for webproperty_id in webproperty_ids: profiles = service.management().profiles().list( accountId=account_id, webPropertyId=webproperty_id).execute() for p in profiles.get('items', ()): # sometimes GA includes "http://", sometimes it doesn't. if '://' in p['websiteUrl']: name = p['websiteUrl'].partition('://')[-1] else: name = p['websiteUrl'] if name == domain: return p['id'] @task def update_google_analytics(date, **kw): creds_data = getattr(settings, 'GOOGLE_ANALYTICS_CREDENTIALS', None) if not creds_data: log.critical('Failed to update global stats: ' 'GOOGLE_ANALYTICS_CREDENTIALS not set') return creds = OAuth2Credentials( *[creds_data[k] for k in ('access_token', 'client_id', 'client_secret', 'refresh_token', 'token_expiry', 'token_uri', 'user_agent')]) h = httplib2.Http() creds.authorize(h) service = build('analytics', 'v3', http=h) domain = getattr(settings, 'GOOGLE_ANALYTICS_DOMAIN', None) or settings.DOMAIN profile_id = get_profile_id(service, domain) if profile_id is None: log.critical('Failed to update global stats: could not access a Google' ' Analytics profile for ' + domain) return datestr = date.strftime('%Y-%m-%d') try: data = service.data().ga().get(ids='ga:' + profile_id, start_date=datestr, end_date=datestr, metrics='ga:visits').execute() # Storing this under the webtrends stat name so it goes on the # same graph as the old webtrends data. p = ['webtrends_DailyVisitors', data['rows'][0][0], date] except Exception, e: log.critical( 'Fetching stats data for %s from Google Analytics failed: %s' % e) return try: cursor = connection.cursor() cursor.execute('REPLACE INTO global_stats (name, count, date) ' 'values (%s, %s, %s)', p) transaction.commit_unless_managed() except Exception, e: log.critical('Failed to update global stats: (%s): %s' % (p, e)) return log.debug('Committed global stats details: (%s) has (%s) for (%s)' % tuple(p)) @task def update_global_totals(job, date, **kw): log.info('Updating global statistics totals (%s) for (%s)' % (job, date)) jobs = _get_daily_jobs(date) jobs.update(_get_metrics_jobs(date)) num = jobs[job]() q = """REPLACE INTO global_stats (`name`, `count`, `date`) VALUES (%s, %s, %s)""" p = [job, num or 0, date] try: cursor = connection.cursor() cursor.execute(q, p) transaction.commit_unless_managed() except Exception, e: log.critical('Failed to update global stats: (%s): %s' % (p, e)) log.debug('Committed global stats details: (%s) has (%s) for (%s)' % tuple(p)) def _get_daily_jobs(date=None): """Return a dictionary of statistics queries. If a date is specified and applies to the job it will be used. Otherwise the date will default to today(). """ if not date: date = datetime.date.today() # Passing through a datetime would not generate an error, # but would pass and give incorrect values. if isinstance(date, datetime.datetime): raise ValueError('This requires a valid date, not a datetime') # Testing on lte created date doesn't get you todays date, you need to do # less than next date. That's because 2012-1-1 becomes 2012-1-1 00:00 next_date = date + datetime.timedelta(days=1) date_str = date.strftime('%Y-%m-%d') extra = dict(where=['DATE(created)=%s'], params=[date_str]) # If you're editing these, note that you are returning a function! This # cheesy hackery was done so that we could pass the queries to celery # lazily and not hammer the db with a ton of these all at once. stats = { # Add-on Downloads 'addon_total_downloads': lambda: DownloadCount.objects.filter( date__lt=next_date).aggregate(sum=Sum('count'))['sum'], 'addon_downloads_new': lambda: DownloadCount.objects.filter( date=date).aggregate(sum=Sum('count'))['sum'], # Add-on counts 'addon_count_new': Addon.objects.extra(**extra).count, # Version counts 'version_count_new': Version.objects.extra(**extra).count, # User counts 'user_count_total': UserProfile.objects.filter( created__lt=next_date).count, 'user_count_new': UserProfile.objects.extra(**extra).count, # Review counts 'review_count_total': Review.objects.filter(created__lte=date, editorreview=0).count, 'review_count_new': Review.objects.filter(editorreview=0).extra( **extra).count, # Collection counts 'collection_count_total': Collection.objects.filter( created__lt=next_date).count, 'collection_count_new': Collection.objects.extra(**extra).count, 'collection_count_autopublishers': Collection.objects.filter( created__lt=next_date, type=amo.COLLECTION_SYNCHRONIZED).count, 'collection_addon_downloads': (lambda: AddonCollectionCount.objects.filter(date__lte=date).aggregate( sum=Sum('count'))['sum']), } # If we're processing today's stats, we'll do some extras. We don't do # these for re-processed stats because they change over time (eg. add-ons # move from sandbox -> public if date == datetime.date.today(): stats.update({ 'addon_count_experimental': Addon.objects.filter( created__lte=date, status=amo.STATUS_UNREVIEWED, disabled_by_user=0).count, 'addon_count_nominated': Addon.objects.filter( created__lte=date, status=amo.STATUS_NOMINATED, disabled_by_user=0).count, 'addon_count_public': Addon.objects.filter( created__lte=date, status=amo.STATUS_PUBLIC, disabled_by_user=0).count, 'addon_count_pending': Version.objects.filter( created__lte=date, files__status=amo.STATUS_PENDING).count, 'collection_count_private': Collection.objects.filter( created__lte=date, listed=0).count, 'collection_count_public': Collection.objects.filter( created__lte=date, listed=1).count, 'collection_count_editorspicks': Collection.objects.filter( created__lte=date, type=amo.COLLECTION_FEATURED).count, 'collection_count_normal': Collection.objects.filter( created__lte=date, type=amo.COLLECTION_NORMAL).count, }) return stats def _get_metrics_jobs(date=None): """Return a dictionary of statistics queries. If a date is specified and applies to the job it will be used. Otherwise the date will default to the last date metrics put something in the db. """ if not date: date = UpdateCount.objects.aggregate(max=Max('date'))['max'] # If you're editing these, note that you are returning a function! stats = { 'addon_total_updatepings': lambda: UpdateCount.objects.filter( date=date).aggregate(sum=Sum('count'))['sum'], 'collector_updatepings': lambda: UpdateCount.objects.get( addon=11950, date=date).count, } return stats @task def index_update_counts(ids, **kw): index = kw.pop('index', None) indices = get_indices(index) es = amo.search.get_es() qs = UpdateCount.objects.filter(id__in=ids) if qs: log.info('Indexing %s updates for %s.' % (qs.count(), qs[0].date)) try: for update in qs: key = '%s-%s' % (update.addon_id, update.date) data = search.extract_update_count(update) for index in indices: UpdateCount.index(data, bulk=True, id=key, index=index) es.flush_bulk(forced=True) except Exception, exc: index_update_counts.retry(args=[ids], exc=exc, **kw) raise @task def index_download_counts(ids, **kw): index = kw.pop('index', None) indices = get_indices(index) es = amo.search.get_es() qs = DownloadCount.objects.filter(id__in=ids) if qs: log.info('Indexing %s downloads for %s.' % (qs.count(), qs[0].date)) try: for dl in qs: key = '%s-%s' % (dl.addon_id, dl.date) data = search.extract_download_count(dl) for index in indices: DownloadCount.index(data, bulk=True, id=key, index=index) es.flush_bulk(forced=True) except Exception, exc: index_download_counts.retry(args=[ids], exc=exc) raise @task def index_collection_counts(ids, **kw): index = kw.pop('index', None) indices = get_indices(index) es = amo.search.get_es() qs = CollectionCount.objects.filter(collection__in=ids) if qs: log.info('Indexing %s addon collection counts: %s' % (qs.count(), qs[0].date)) try: for collection_count in qs: collection = collection_count.collection_id key = '%s-%s' % (collection, collection_count.date) filters = dict(collection=collection, date=collection_count.date) data = search.extract_addon_collection( collection_count, AddonCollectionCount.objects.filter(**filters), CollectionStats.objects.filter(**filters)) for index in indices: CollectionCount.index(data, bulk=True, id=key, index=index) es.flush_bulk(forced=True) except Exception, exc: index_collection_counts.retry(args=[ids], exc=exc) raise @task def index_theme_user_counts(ids, **kw): index = kw.pop('index', None) indices = get_indices(index) es = amo.search.get_es() qs = ThemeUserCount.objects.filter(id__in=ids) if qs: log.info('Indexing %s theme user counts for %s.' % (qs.count(), qs[0].date)) try: for user_count in qs: key = '%s-%s' % (user_count.addon_id, user_count.date) data = search.extract_theme_user_count(user_count) for index in indices: ThemeUserCount.index(data, bulk=True, id=key, index=index) es.flush_bulk(forced=True) except Exception, exc: index_theme_user_counts.retry(args=[ids], exc=exc) raise @task def update_monolith_stats(metric, date, **kw): log.info('Updating monolith statistics (%s) for (%s)' % (metric, date)) jobs = _get_monolith_jobs(date)[metric] for job in jobs: try: # Only record if count is greater than zero. count = job['count']() if count: value = {'count': count} if 'dimensions' in job: value.update(job['dimensions']) MonolithRecord.objects.create(recorded=date, key=metric, value=json.dumps(value)) log.debug('Monolith stats details: (%s) has (%s) for (%s). ' 'Value: %s' % (metric, count, date, value)) except Exception as e: log.critical('Update of monolith table failed: (%s): %s' % ([metric, date], e)) def _get_monolith_jobs(date=None): """ Return a dict of Monolith based statistics queries. The dict is of the form:: {'<metric_name>': [{'count': <callable>, 'dimensions': <dimensions>}]} Where `dimensions` is an optional dict of dimensions we expect to filter on via Monolith. If a date is specified and applies to the job it will be used. Otherwise the date will default to today(). """ if not date: date = datetime.date.today() # If we have a datetime make it a date so H/M/S isn't used. if isinstance(date, datetime.datetime): date = date.date() next_date = date + datetime.timedelta(days=1) stats = { # Marketplace reviews. 'apps_review_count_new': [{ 'count': Review.objects.filter( created__range=(date, next_date), editorreview=0, addon__type=amo.ADDON_WEBAPP).count, }], # New users 'mmo_user_count_total': [{ 'count': UserProfile.objects.filter( created__lt=next_date, source=amo.LOGIN_SOURCE_MMO_BROWSERID).count, }], 'mmo_user_count_new': [{ 'count': UserProfile.objects.filter( created__range=(date, next_date), source=amo.LOGIN_SOURCE_MMO_BROWSERID).count, }], # New developers. 'mmo_developer_count_total': [{ 'count': AddonUser.objects.filter( addon__type=amo.ADDON_WEBAPP).values('user').distinct().count, }], # App counts. 'apps_count_new': [{ 'count': Webapp.objects.filter( created__range=(date, next_date)).count, }], } # Add various "Apps Added" for all the dimensions we need. apps = Webapp.objects.filter(created__range=(date, next_date)) package_counts = [] premium_counts = [] # privileged==packaged for our consideration. package_types = amo.ADDON_WEBAPP_TYPES.copy() package_types.pop(amo.ADDON_WEBAPP_PRIVILEGED) for region_slug, region in REGIONS_CHOICES_SLUG: # Apps added by package type and region. for package_type in package_types.values(): package_counts.append({ 'count': apps.filter( is_packaged=package_type == 'packaged').exclude( addonexcludedregion__region=region.id).count, 'dimensions': {'region': region_slug, 'package_type': package_type}, }) # Apps added by premium type and region. for premium_type, pt_name in amo.ADDON_PREMIUM_API.items(): premium_counts.append({ 'count': apps.filter( premium_type=premium_type).exclude( addonexcludedregion__region=region.id).count, 'dimensions': {'region': region_slug, 'premium_type': pt_name}, }) stats.update({'apps_added_by_package_type': package_counts}) stats.update({'apps_added_by_premium_type': premium_counts}) # Add various "Apps Available" for all the dimensions we need. apps = Webapp.objects.filter(status=amo.STATUS_PUBLIC, disabled_by_user=False) package_counts = [] premium_counts = [] for region_slug, region in REGIONS_CHOICES_SLUG: # Apps available by package type and region. for package_type in package_types.values(): package_counts.append({ 'count': apps.filter( is_packaged=package_type == 'packaged').exclude( addonexcludedregion__region=region.id).count, 'dimensions': {'region': region_slug, 'package_type': package_type}, }) # Apps available by premium type and region. for premium_type, pt_name in amo.ADDON_PREMIUM_API.items(): premium_counts.append({ 'count': apps.filter( premium_type=premium_type).exclude( addonexcludedregion__region=region.id).count, 'dimensions': {'region': region_slug, 'premium_type': pt_name}, }) stats.update({'apps_available_by_package_type': package_counts}) stats.update({'apps_available_by_premium_type': premium_counts}) return stats
import hashlib import logging import os from django.conf import settings from django.core.files.storage import default_storage as storage from django.db import transaction from PIL import Image from olympia import amo from olympia.addons.models import ( Addon, attach_tags, attach_translations, AppSupport, CompatOverride, IncompatibleVersions, Persona, Preview) from olympia.addons.indexers import AddonIndexer from olympia.amo.celery import task from olympia.amo.decorators import set_modified_on, write from olympia.amo.helpers import user_media_path from olympia.amo.storage_utils import rm_stored_dir from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage from olympia.editors.models import RereviewQueueTheme from olympia.lib.es.utils import index_objects from olympia.versions.models import Version from . import cron # noqa log = logging.getLogger('z.task') @task @write def version_changed(addon_id, **kw): update_last_updated(addon_id) update_appsupport([addon_id]) def update_last_updated(addon_id): queries = Addon._last_updated_queries() try: addon = Addon.objects.get(pk=addon_id) except Addon.DoesNotExist: log.info('[1@None] Updating last updated for %s failed, no addon found' % addon_id) return log.info('[1@None] Updating last updated for %s.' % addon_id) if addon.is_persona(): q = 'personas' elif addon.status == amo.STATUS_PUBLIC: q = 'public' else: q = 'exp' qs = queries[q].filter(pk=addon_id).using('default') res = qs.values_list('id', 'last_updated') if res: pk, t = res[0] Addon.objects.filter(pk=pk).update(last_updated=t) @write def update_appsupport(ids): log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids)) addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms() support = [] for addon in addons: for app, appver in addon.compatible_apps.items(): if appver is None: # Fake support for all version ranges. min_, max_ = 0, 999999999999999999 else: min_, max_ = appver.min.version_int, appver.max.version_int support.append(AppSupport(addon=addon, app=app.id, min=min_, max=max_)) if not support: return with transaction.atomic(): AppSupport.objects.filter(addon__id__in=ids).delete() AppSupport.objects.bulk_create(support) # All our updates were sql, so invalidate manually. Addon.objects.invalidate(*addons) @task def delete_preview_files(id, **kw): log.info('[1@None] Removing preview with id of %s.' % id) p = Preview(id=id) for f in (p.thumbnail_path, p.image_path): try: storage.delete(f) except Exception, e: log.error('Error deleting preview file (%s): %s' % (f, e)) @task(acks_late=True) def index_addons(ids, **kw): log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids))) transforms = (attach_tags, attach_translations) index_objects(ids, Addon, AddonIndexer.extract_document, kw.pop('index', None), transforms, Addon.unfiltered) @task def unindex_addons(ids, **kw): for addon in ids: log.info('Removing addon [%s] from search index.' % addon) Addon.unindex(addon) @task def delete_persona_image(dst, **kw): log.info('[1@None] Deleting persona image: %s.' % dst) if not dst.startswith(user_media_path('addons')): log.error("Someone tried deleting something they shouldn't: %s" % dst) return try: storage.delete(dst) except Exception, e: log.error('Error deleting persona image: %s' % e) @set_modified_on def create_persona_preview_images(src, full_dst, **kw): """ Creates a 680x100 thumbnail used for the Persona preview and a 32x32 thumbnail used for search suggestions/detail pages. """ log.info('[1@None] Resizing persona images: %s' % full_dst) preview, full = amo.PERSONA_IMAGE_SIZES['header'] preview_w, preview_h = preview orig_w, orig_h = full with storage.open(src) as fp: i_orig = i = Image.open(fp) # Crop image from the right. i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h)) # Resize preview. i = i.resize(preview, Image.ANTIALIAS) i.load() with storage.open(full_dst[0], 'wb') as fp: i.save(fp, 'png') _, icon_size = amo.PERSONA_IMAGE_SIZES['icon'] icon_w, icon_h = icon_size # Resize icon. i = i_orig i.load() i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h)) i = i.resize(icon_size, Image.ANTIALIAS) i.load() with storage.open(full_dst[1], 'wb') as fp: i.save(fp, 'png') return True @set_modified_on def save_persona_image(src, full_dst, **kw): """Creates a PNG of a Persona header/footer image.""" log.info('[1@None] Saving persona image: %s' % full_dst) img = ImageCheck(storage.open(src)) if not img.is_image(): log.error('Not an image: %s' % src, exc_info=True) return with storage.open(src, 'rb') as fp: i = Image.open(fp) with storage.open(full_dst, 'wb') as fp: i.save(fp, 'png') return True @task def update_incompatible_appversions(data, **kw): """Updates the incompatible_versions table for this version.""" log.info('Updating incompatible_versions for %s versions.' % len(data)) addon_ids = set() for version_id in data: # This is here to handle both post_save and post_delete hooks. IncompatibleVersions.objects.filter(version=version_id).delete() try: version = Version.objects.get(pk=version_id) except Version.DoesNotExist: log.info('Version ID [%d] not found. Incompatible versions were ' 'cleared.' % version_id) return addon_ids.add(version.addon_id) try: compat = CompatOverride.objects.get(addon=version.addon) except CompatOverride.DoesNotExist: log.info('Compat override for addon with version ID [%d] not ' 'found. Incompatible versions were cleared.' % version_id) return app_ranges = [] ranges = compat.collapsed_ranges() for range in ranges: if range.min == '0' and range.max == '*': # Wildcard range, add all app ranges app_ranges.extend(range.apps) else: # Since we can't rely on add-on version numbers, get the min # and max ID values and find versions whose ID is within those # ranges, being careful with wildcards. min_id = max_id = None if range.min == '0': versions = (Version.objects.filter(addon=version.addon_id) .order_by('id') .values_list('id', flat=True)[:1]) if versions: min_id = versions[0] else: try: min_id = Version.objects.get(addon=version.addon_id, version=range.min).id except Version.DoesNotExist: pass if range.max == '*': versions = (Version.objects.filter(addon=version.addon_id) .order_by('-id') .values_list('id', flat=True)[:1]) if versions: max_id = versions[0] else: try: max_id = Version.objects.get(addon=version.addon_id, version=range.max).id except Version.DoesNotExist: pass if min_id and max_id: if min_id <= version.id <= max_id: app_ranges.extend(range.apps) for app_range in app_ranges: IncompatibleVersions.objects.create(version=version, app=app_range.app.id, min_app_version=app_range.min, max_app_version=app_range.max) log.info('Added incompatible version for version ID [%d]: ' 'app:%d, %s -> %s' % (version_id, app_range.app.id, app_range.min, app_range.max)) # Increment namespace cache of compat versions. for addon_id in addon_ids: cache_ns_key('d2c-versions:%s' % addon_id, increment=True) def make_checksum(header_path, footer_path): ls = LocalFileStorage() footer = footer_path and ls._open(footer_path).read() or '' raw_checksum = ls._open(header_path).read() + footer return hashlib.sha224(raw_checksum).hexdigest() def theme_checksum(theme, **kw): theme.checksum = make_checksum(theme.header_path, theme.footer_path) dupe_personas = Persona.objects.filter(checksum=theme.checksum) if dupe_personas.exists(): theme.dupe_persona = dupe_personas[0] theme.save() def rereviewqueuetheme_checksum(rqt, **kw): """Check for possible duplicate theme images.""" dupe_personas = Persona.objects.filter( checksum=make_checksum(rqt.header_path or rqt.theme.header_path, rqt.footer_path or rqt.theme.footer_path)) if dupe_personas.exists(): rqt.dupe_persona = dupe_personas[0] rqt.save() @task @write def save_theme(header, footer, addon, **kw): """Save theme image and calculates checksum after theme save.""" dst_root = os.path.join(user_media_path('addons'), str(addon.id)) header = os.path.join(settings.TMP_PATH, 'persona_header', header) header_dst = os.path.join(dst_root, 'header.png') if footer: footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer) footer_dst = os.path.join(dst_root, 'footer.png') try: save_persona_image(src=header, full_dst=header_dst) if footer: save_persona_image(src=footer, full_dst=footer_dst) create_persona_preview_images( src=header, full_dst=[os.path.join(dst_root, 'preview.png'), os.path.join(dst_root, 'icon.png')], set_modified_on=[addon]) theme_checksum(addon.persona) except IOError: addon.delete() raise @task @write def save_theme_reupload(header, footer, addon, **kw): header_dst = None footer_dst = None dst_root = os.path.join(user_media_path('addons'), str(addon.id)) try: if header: header = os.path.join(settings.TMP_PATH, 'persona_header', header) header_dst = os.path.join(dst_root, 'pending_header.png') save_persona_image(src=header, full_dst=header_dst) if footer: footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer) footer_dst = os.path.join(dst_root, 'pending_footer.png') save_persona_image(src=footer, full_dst=footer_dst) except IOError as e: log.error(str(e)) raise if header_dst or footer_dst: theme = addon.persona header = 'pending_header.png' if header_dst else theme.header # Theme footer is optional, but can't be None. footer = theme.footer or '' if footer_dst: footer = 'pending_footer.png' # Store pending header and/or footer file paths for review. RereviewQueueTheme.objects.filter(theme=theme).delete() rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer) rereviewqueuetheme_checksum(rqt=rqt) rqt.save() @task @write def calc_checksum(theme_id, **kw): """For migration 596.""" lfs = LocalFileStorage() theme = Persona.objects.get(id=theme_id) header = theme.header_path footer = theme.footer_path # Delete invalid themes that are not images (e.g. PDF, EXE). try: Image.open(header) Image.open(footer) except IOError: log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' % (theme.addon.id, header, footer)) theme.addon.delete() theme.delete() rm_stored_dir(header.replace('header.png', ''), storage=lfs) return # Calculate checksum and save. try: theme.checksum = make_checksum(header, footer) theme.save() except IOError as e: log.error(str(e))
from __future__ import absolute_import from sentry.models import Activity from sentry.testutils import APITestCase class GroupNoteTest(APITestCase): def test_simple(self): group = self.group activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=self.user, data={'text': 'hello world'}, ) self.login_as(user=self.user) url = '/api/0/issues/{}/comments/'.format(group.id) response = self.client.get(url, format='json') assert response.status_code == 200, response.content assert len(response.data) == 1 assert response.data[0]['id'] == str(activity.id) class GroupNoteCreateTest(APITestCase): def test_simple(self): group = self.group self.login_as(user=self.user) url = '/api/0/issues/{}/comments/'.format(group.id) response = self.client.post(url, format='json') assert response.status_code == 400 response = self.client.post(url, format='json', data={ 'text': 'hello world', }) assert response.status_code == 201, response.content activity = Activity.objects.get(id=response.data['id']) assert activity.user == self.user assert activity.group == group assert activity.data == {'text': 'hello world'} response = self.client.post(url, format='json', data={ 'text': 'hello world', }) assert response.status_code == 400, response.content
from __future__ import absolute_import from celery.utils.dispatch.saferef import safe_ref from celery.tests.utils import Case class Class1(object): def x(self): pass def fun(obj): pass class Class2(object): def __call__(self, obj): pass class SaferefTests(Case): def setUp(self): ts = [] ss = [] for x in xrange(5000): t = Class1() ts.append(t) s = safe_ref(t.x, self._closure) ss.append(s) ts.append(fun) ss.append(safe_ref(fun, self._closure)) for x in xrange(30): t = Class2() ts.append(t) s = safe_ref(t, self._closure) ss.append(s) self.ts = ts self.ss = ss self.closureCount = 0 def tearDown(self): del self.ts del self.ss def testIn(self): """Test the "in" operator for safe references (cmp)""" for t in self.ts[:50]: self.assertTrue(safe_ref(t.x) in self.ss) def testValid(self): """Test that the references are valid (return instance methods)""" for s in self.ss: self.assertTrue(s()) def testShortCircuit(self): """Test that creation short-circuits to reuse existing references""" sd = {} for s in self.ss: sd[s] = 1 for t in self.ts: if hasattr(t, 'x'): self.assertIn(safe_ref(t.x), sd) else: self.assertIn(safe_ref(t), sd) def testRepresentation(self): """Test that the reference object's representation works XXX Doesn't currently check the results, just that no error is raised """ repr(self.ss[-1]) def _closure(self, ref): """Dumb utility mechanism to increment deletion counter""" self.closureCount += 1
from twisted.internet.defer import inlineCallbacks, returnValue from vumi.connectors import ( BaseConnector, ReceiveInboundConnector, ReceiveOutboundConnector, IgnoreMessage) from vumi.tests.utils import LogCatcher from vumi.worker import BaseWorker from vumi.message import TransportUserMessage from vumi.middleware.tests.utils import RecordingMiddleware from vumi.tests.helpers import VumiTestCase, MessageHelper, WorkerHelper class DummyWorker(BaseWorker): def setup_connectors(self): pass def setup_worker(self): pass def teardown_worker(self): pass class BaseConnectorTestCase(VumiTestCase): connector_class = None def setUp(self): self.msg_helper = self.add_helper(MessageHelper()) self.worker_helper = self.add_helper(WorkerHelper()) @inlineCallbacks def mk_connector(self, worker=None, connector_name=None, prefetch_count=None, middlewares=None, setup=False): if worker is None: worker = yield self.worker_helper.get_worker(DummyWorker, {}) if connector_name is None: connector_name = "dummy_connector" connector = self.connector_class(worker, connector_name, prefetch_count=prefetch_count, middlewares=middlewares) if setup: yield connector.setup() returnValue(connector) @inlineCallbacks def mk_consumer(self, *args, **kwargs): conn = yield self.mk_connector(*args, **kwargs) consumer = yield conn._setup_consumer('inbound', TransportUserMessage, lambda msg: None) returnValue((conn, consumer)) class TestBaseConnector(BaseConnectorTestCase): connector_class = BaseConnector @inlineCallbacks def test_creation(self): conn = yield self.mk_connector(connector_name="foo") self.assertEqual(conn.name, "foo") self.assertTrue(isinstance(conn.worker, BaseWorker)) @inlineCallbacks def test_middlewares_consume(self): worker = yield self.worker_helper.get_worker(DummyWorker, {}) middlewares = [RecordingMiddleware( str(i), {'consume_priority': 0, 'publish_priority': 0}, worker) for i in range(3)] conn, consumer = yield self.mk_consumer( worker=worker, connector_name='foo', middlewares=middlewares) consumer.unpause() msgs = [] conn._set_default_endpoint_handler('inbound', msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') record = msgs[0].payload.pop('record') self.assertEqual(record, [(str(i), 'inbound', 'foo') for i in range(3)]) @inlineCallbacks def test_middlewares_publish(self): worker = yield self.worker_helper.get_worker(DummyWorker, {}) middlewares = [RecordingMiddleware( str(i), {'consume_priority': 0, 'publish_priority': 0}, worker) for i in range(3)] conn = yield self.mk_connector( worker=worker, connector_name='foo', middlewares=middlewares) yield conn._setup_publisher('outbound') msg = self.msg_helper.make_outbound("outbound") yield conn._publish_message('outbound', msg, 'dummy_endpoint') msgs = self.worker_helper.get_dispatched_outbound('foo') record = msgs[0].payload.pop('record') self.assertEqual(record, [[str(i), 'outbound', 'foo'] for i in range(2, -1, -1)]) @inlineCallbacks def test_pretech_count(self): conn, consumer = yield self.mk_consumer(prefetch_count=10) self.assertEqual(consumer.channel.qos_prefetch_count, 10) @inlineCallbacks def test_setup_raises(self): conn = yield self.mk_connector() self.assertRaises(NotImplementedError, conn.setup) @inlineCallbacks def test_teardown(self): conn, consumer = yield self.mk_consumer() self.assertTrue(consumer.keep_consuming) yield conn.teardown() self.assertFalse(consumer.keep_consuming) @inlineCallbacks def test_paused(self): conn, consumer = yield self.mk_consumer() consumer.pause() self.assertTrue(conn.paused) consumer.unpause() self.assertFalse(conn.paused) @inlineCallbacks def test_pause(self): conn, consumer = yield self.mk_consumer() consumer.unpause() self.assertFalse(consumer.paused) conn.pause() self.assertTrue(consumer.paused) @inlineCallbacks def test_unpause(self): conn, consumer = yield self.mk_consumer() consumer.pause() self.assertTrue(consumer.paused) conn.unpause() self.assertFalse(consumer.paused) @inlineCallbacks def test_setup_publisher(self): conn = yield self.mk_connector(connector_name='foo') publisher = yield conn._setup_publisher('outbound') self.assertEqual(publisher.routing_key, 'foo.outbound') @inlineCallbacks def test_setup_consumer(self): conn, consumer = yield self.mk_consumer(connector_name='foo') self.assertTrue(consumer.paused) self.assertEqual(consumer.routing_key, 'foo.inbound') self.assertEqual(consumer.message_class, TransportUserMessage) @inlineCallbacks def test_set_endpoint_handler(self): conn, consumer = yield self.mk_consumer(connector_name='foo') consumer.unpause() msgs = [] conn._set_endpoint_handler('inbound', msgs.append, 'dummy_endpoint') msg = self.msg_helper.make_inbound("inbound") msg.set_routing_endpoint('dummy_endpoint') yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_none_endpoint_handler(self): conn, consumer = yield self.mk_consumer(connector_name='foo') consumer.unpause() msgs = [] conn._set_endpoint_handler('inbound', msgs.append, None) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_endpoint_handler(self): conn, consumer = yield self.mk_consumer(connector_name='foo') consumer.unpause() msgs = [] conn._set_default_endpoint_handler('inbound', msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_message_with_endpoint(self): conn = yield self.mk_connector(connector_name='foo') yield conn._setup_publisher('outbound') msg = self.msg_helper.make_outbound("outbound") yield conn._publish_message('outbound', msg, 'dummy_endpoint') msgs = self.worker_helper.get_dispatched_outbound('foo') self.assertEqual(msgs, [msg]) class TestReceiveInboundConnector(BaseConnectorTestCase): connector_class = ReceiveInboundConnector @inlineCallbacks def test_setup(self): conn = yield self.mk_connector(connector_name='foo') yield conn.setup() conn.unpause() with LogCatcher() as lc: msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') [msg_log] = lc.messages() self.assertTrue(msg_log.startswith("No inbound handler for 'foo'")) with LogCatcher() as lc: event = self.msg_helper.make_ack() yield self.worker_helper.dispatch_event(event, 'foo') [event_log] = lc.messages() self.assertTrue(event_log.startswith("No event handler for 'foo'")) msg = self.msg_helper.make_outbound("outbound") yield conn.publish_outbound(msg) msgs = self.worker_helper.get_dispatched_outbound('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_default_inbound_handler(self): conn = yield self.mk_connector(connector_name='foo', setup=True) with LogCatcher() as lc: conn.default_inbound_handler( self.msg_helper.make_inbound("inbound")) [log] = lc.messages() self.assertTrue(log.startswith("No inbound handler for 'foo'")) @inlineCallbacks def test_default_event_handler(self): conn = yield self.mk_connector(connector_name='foo', setup=True) with LogCatcher() as lc: conn.default_event_handler(self.msg_helper.make_ack()) [log] = lc.messages() self.assertTrue(log.startswith("No event handler for 'foo'")) @inlineCallbacks def test_set_inbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_inbound_handler(msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_inbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_inbound_handler(msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_event_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_event_handler(msgs.append) msg = self.msg_helper.make_ack() yield self.worker_helper.dispatch_event(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_event_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_event_handler(msgs.append) msg = self.msg_helper.make_ack() yield self.worker_helper.dispatch_event(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_outbound(self): conn = yield self.mk_connector(connector_name='foo', setup=True) msg = self.msg_helper.make_outbound("outbound") yield conn.publish_outbound(msg) msgs = self.worker_helper.get_dispatched_outbound('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_inbound_handler_ignore_message(self): def im_handler(msg): raise IgnoreMessage() conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_inbound_handler(im_handler) msg = self.msg_helper.make_inbound("inbound") with LogCatcher() as lc: yield self.worker_helper.dispatch_inbound(msg, 'foo') [log] = lc.messages() self.assertTrue(log.startswith( "Ignoring msg due to IgnoreMessage(): <Message")) class TestReceiveOutboundConnector(BaseConnectorTestCase): connector_class = ReceiveOutboundConnector @inlineCallbacks def test_setup(self): conn = yield self.mk_connector(connector_name='foo') yield conn.setup() conn.unpause() with LogCatcher() as lc: msg = self.msg_helper.make_outbound("outbound") yield self.worker_helper.dispatch_outbound(msg, 'foo') [log] = lc.messages() self.assertTrue(log.startswith("No outbound handler for 'foo'")) msg = self.msg_helper.make_inbound("inbound") yield conn.publish_inbound(msg) msgs = self.worker_helper.get_dispatched_inbound('foo') self.assertEqual(msgs, [msg]) msg = self.msg_helper.make_ack() yield conn.publish_event(msg) msgs = self.worker_helper.get_dispatched_events('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_default_outbound_handler(self): conn = yield self.mk_connector(connector_name='foo', setup=True) with LogCatcher() as lc: conn.default_outbound_handler( self.msg_helper.make_outbound("outbound")) [log] = lc.messages() self.assertTrue(log.startswith("No outbound handler for 'foo'")) @inlineCallbacks def test_set_outbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_outbound_handler(msgs.append) msg = self.msg_helper.make_outbound("outbound") yield self.worker_helper.dispatch_outbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_outbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_outbound_handler(msgs.append) msg = self.msg_helper.make_outbound("outbound") yield self.worker_helper.dispatch_outbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_inbound(self): conn = yield self.mk_connector(connector_name='foo', setup=True) msg = self.msg_helper.make_inbound("inbound") yield conn.publish_inbound(msg) msgs = self.worker_helper.get_dispatched_inbound('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_event(self): conn = yield self.mk_connector(connector_name='foo', setup=True) msg = self.msg_helper.make_ack() yield conn.publish_event(msg) msgs = self.worker_helper.get_dispatched_events('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_outbound_handler_nack_message(self): def im_handler(msg): raise IgnoreMessage() conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_outbound_handler(im_handler) msg = self.msg_helper.make_inbound("inbound") with LogCatcher() as lc: yield self.worker_helper.dispatch_outbound(msg, 'foo') [log] = lc.messages() self.assertTrue(log.startswith( "Ignoring msg (with NACK) due to IgnoreMessage(): <Message")) [event] = self.worker_helper.get_dispatched_events('foo') self.assertEqual(event['event_type'], 'nack')
import binascii import hashlib import sys def computeAuxpow (block, target, ok): """ Build an auxpow object (serialised as hex string) that solves (ok = True) or doesn't solve (ok = False) the block. """ # Start by building the merge-mining coinbase. The merkle tree # consists only of the block hash as root. coinbase = "fabe" + binascii.hexlify ("m" * 2) coinbase += block coinbase += "01000000" + ("00" * 4) # Construct "vector" of transaction inputs. vin = "01" vin += ("00" * 32) + ("ff" * 4) vin += ("%02x" % (len (coinbase) / 2)) + coinbase vin += ("ff" * 4) # Build up the full coinbase transaction. It consists only # of the input and has no outputs. tx = "01000000" + vin + "00" + ("00" * 4) txHash = doubleHashHex (tx) # Construct the parent block header. It need not be valid, just good # enough for auxpow purposes. header = "01000000" header += "00" * 32 header += reverseHex (txHash) header += "00" * 4 header += "00" * 4 header += "00" * 4 # Mine the block. (header, blockhash) = mineBlock (header, target, ok) # Build the MerkleTx part of the auxpow. auxpow = tx auxpow += blockhash auxpow += "00" auxpow += "00" * 4 # Extend to full auxpow. auxpow += "00" auxpow += "00" * 4 auxpow += header return auxpow def mineBlock (header, target, ok): """ Given a block header, update the nonce until it is ok (or not) for the given target. """ data = bytearray (binascii.unhexlify (header)) while True: assert data[79] < 255 data[79] += 1 hexData = binascii.hexlify (data) blockhash = doubleHashHex (hexData) if (ok and blockhash < target) or ((not ok) and blockhash > target): break return (hexData, blockhash) def doubleHashHex (data): """ Perform Crowncoin's Double-SHA256 hash on the given hex string. """ hasher = hashlib.sha256 () hasher.update (binascii.unhexlify (data)) data = hasher.digest () hasher = hashlib.sha256 () hasher.update (data) return reverseHex (hasher.hexdigest ()) def reverseHex (data): """ Flip byte order in the given data (hex string). """ b = bytearray (binascii.unhexlify (data)) b.reverse () return binascii.hexlify (b) if len (sys.argv) != 4: print "Usage: solveauxpow.py HASH _TARGET OK" sys.exit () blockHash = sys.argv[1] revTarget = sys.argv[2] ok = sys.argv[3] if ok not in ["true", "false"]: print "expected 'true' or 'false' as OK value" sys.exit () target = reverseHex (revTarget) ok = (ok == "true") res = computeAuxpow (blockHash, target, ok) print res
'''Trains two recurrent neural networks based upon a story and a question. The resulting merged vector is then queried to answer a range of bAbI tasks. The results are comparable to those for an LSTM model provided in Weston et al.: "Towards AI-Complete Question Answering: A Set of Prerequisite Toy Tasks" http://arxiv.org/abs/1502.05698 Task Number | FB LSTM Baseline | Keras QA --- | --- | --- QA1 - Single Supporting Fact | 50 | 100.0 QA2 - Two Supporting Facts | 20 | 50.0 QA3 - Three Supporting Facts | 20 | 20.5 QA4 - Two Arg. Relations | 61 | 62.9 QA5 - Three Arg. Relations | 70 | 61.9 QA6 - Yes/No Questions | 48 | 50.7 QA7 - Counting | 49 | 78.9 QA8 - Lists/Sets | 45 | 77.2 QA9 - Simple Negation | 64 | 64.0 QA10 - Indefinite Knowledge | 44 | 47.7 QA11 - Basic Coreference | 72 | 74.9 QA12 - Conjunction | 74 | 76.4 QA13 - Compound Coreference | 94 | 94.4 QA14 - Time Reasoning | 27 | 34.8 QA15 - Basic Deduction | 21 | 32.4 QA16 - Basic Induction | 23 | 50.6 QA17 - Positional Reasoning | 51 | 49.1 QA18 - Size Reasoning | 52 | 90.8 QA19 - Path Finding | 8 | 9.0 QA20 - Agent's Motivations | 91 | 90.7 For the resources related to the bAbI project, refer to: https://research.facebook.com/researchers/1543934539189348 Notes: - With default word, sentence, and query vector sizes, the GRU model achieves: - 100% test accuracy on QA1 in 20 epochs (2 seconds per epoch on CPU) - 50% test accuracy on QA2 in 20 epochs (16 seconds per epoch on CPU) In comparison, the Facebook paper achieves 50% and 20% for the LSTM baseline. - The task does not traditionally parse the question separately. This likely improves accuracy and is a good example of merging two RNNs. - The word vector embeddings are not shared between the story and question RNNs. - See how the accuracy changes given 10,000 training samples (en-10k) instead of only 1000. 1000 was used in order to be comparable to the original paper. - Experiment with GRU, LSTM, and JZS1-3 as they give subtly different results. - The length and noise (i.e. 'useless' story components) impact the ability for LSTMs / GRUs to provide the correct answer. Given only the supporting facts, these RNNs can achieve 100% accuracy on many tasks. Memory networks and neural networks that use attentional processes can efficiently search through this noise to find the relevant statements, improving performance substantially. This becomes especially obvious on QA2 and QA3, both far longer than QA1. ''' from __future__ import print_function from functools import reduce import re import tarfile import numpy as np np.random.seed(1337) # for reproducibility from keras.utils.data_utils import get_file from keras.layers.embeddings import Embedding from keras.layers import Dense, Merge, Dropout, RepeatVector from keras.layers import recurrent from keras.models import Sequential from keras.preprocessing.sequence import pad_sequences def tokenize(sent): '''Return the tokens of a sentence including punctuation. >>> tokenize('Bob dropped the apple. Where is the apple?') ['Bob', 'dropped', 'the', 'apple', '.', 'Where', 'is', 'the', 'apple', '?'] ''' return [x.strip() for x in re.split('(\W+)?', sent) if x.strip()] def parse_stories(lines, only_supporting=False): '''Parse stories provided in the bAbi tasks format If only_supporting is true, only the sentences that support the answer are kept. ''' data = [] story = [] for line in lines: line = line.decode('utf-8').strip() nid, line = line.split(' ', 1) nid = int(nid) if nid == 1: story = [] if '\t' in line: q, a, supporting = line.split('\t') q = tokenize(q) substory = None if only_supporting: # Only select the related substory supporting = map(int, supporting.split()) substory = [story[i - 1] for i in supporting] else: # Provide all the substories substory = [x for x in story if x] data.append((substory, q, a)) story.append('') else: sent = tokenize(line) story.append(sent) return data def get_stories(f, only_supporting=False, max_length=None): '''Given a file name, read the file, retrieve the stories, and then convert the sentences into a single story. If max_length is supplied, any stories longer than max_length tokens will be discarded. ''' data = parse_stories(f.readlines(), only_supporting=only_supporting) flatten = lambda data: reduce(lambda x, y: x + y, data) data = [(flatten(story), q, answer) for story, q, answer in data if not max_length or len(flatten(story)) < max_length] return data def vectorize_stories(data, word_idx, story_maxlen, query_maxlen): X = [] Xq = [] Y = [] for story, query, answer in data: x = [word_idx[w] for w in story] xq = [word_idx[w] for w in query] y = np.zeros(len(word_idx) + 1) # let's not forget that index 0 is reserved y[word_idx[answer]] = 1 X.append(x) Xq.append(xq) Y.append(y) return pad_sequences(X, maxlen=story_maxlen), pad_sequences(Xq, maxlen=query_maxlen), np.array(Y) RNN = recurrent.LSTM EMBED_HIDDEN_SIZE = 50 SENT_HIDDEN_SIZE = 100 QUERY_HIDDEN_SIZE = 100 BATCH_SIZE = 32 EPOCHS = 40 print('RNN / Embed / Sent / Query = {}, {}, {}, {}'.format(RNN, EMBED_HIDDEN_SIZE, SENT_HIDDEN_SIZE, QUERY_HIDDEN_SIZE)) try: path = get_file('babi-tasks-v1-2.tar.gz', origin='http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz') except: print('Error downloading dataset, please download it manually:\n' '$ wget http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz\n' '$ mv tasks_1-20_v1-2.tar.gz ~/.keras/datasets/babi-tasks-v1-2.tar.gz') raise tar = tarfile.open(path) challenge = 'tasks_1-20_v1-2/en/qa2_two-supporting-facts_{}.txt' train = get_stories(tar.extractfile(challenge.format('train'))) test = get_stories(tar.extractfile(challenge.format('test'))) vocab = sorted(reduce(lambda x, y: x | y, (set(story + q + [answer]) for story, q, answer in train + test))) vocab_size = len(vocab) + 1 word_idx = dict((c, i + 1) for i, c in enumerate(vocab)) story_maxlen = max(map(len, (x for x, _, _ in train + test))) query_maxlen = max(map(len, (x for _, x, _ in train + test))) X, Xq, Y = vectorize_stories(train, word_idx, story_maxlen, query_maxlen) tX, tXq, tY = vectorize_stories(test, word_idx, story_maxlen, query_maxlen) print('vocab = {}'.format(vocab)) print('X.shape = {}'.format(X.shape)) print('Xq.shape = {}'.format(Xq.shape)) print('Y.shape = {}'.format(Y.shape)) print('story_maxlen, query_maxlen = {}, {}'.format(story_maxlen, query_maxlen)) print('Build model...') sentrnn = Sequential() sentrnn.add(Embedding(vocab_size, EMBED_HIDDEN_SIZE, input_length=story_maxlen)) sentrnn.add(Dropout(0.3)) qrnn = Sequential() qrnn.add(Embedding(vocab_size, EMBED_HIDDEN_SIZE, input_length=query_maxlen)) qrnn.add(Dropout(0.3)) qrnn.add(RNN(EMBED_HIDDEN_SIZE, return_sequences=False)) qrnn.add(RepeatVector(story_maxlen)) model = Sequential() model.add(Merge([sentrnn, qrnn], mode='sum')) model.add(RNN(EMBED_HIDDEN_SIZE, return_sequences=False)) model.add(Dropout(0.3)) model.add(Dense(vocab_size, activation='softmax')) model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) print('Training') model.fit([X, Xq], Y, batch_size=BATCH_SIZE, nb_epoch=EPOCHS, validation_split=0.05) loss, acc = model.evaluate([tX, tXq], tY, batch_size=BATCH_SIZE) print('Test loss / test accuracy = {:.4f} / {:.4f}'.format(loss, acc))
version_info = (1, 9, 'dev0') __version__ = '.'.join(map(str, version_info))
from __future__ import print_function __title__ = 'pif.utils' __author__ = 'Artur Barseghyan' __copyright__ = 'Copyright (c) 2013 Artur Barseghyan' __license__ = 'GPL 2.0/LGPL 2.1' __all__ = ('ensure_autodiscover', 'list_checkers', 'get_public_ip') from pif.base import registry from pif.discover import autodiscover def ensure_autodiscover(): """ Ensures the IP checkers are discovered. """ if not registry._registry: autodiscover() def list_checkers(): """ Lists available checkers. :return list: """ return registry._registry.keys() def get_public_ip(preferred_checker=None, verbose=False): """ Gets IP using one of the services. :param str preffered checker: Checker UID. If given, the preferred checker is used. :param bool verbose: If set to True, debug info is printed. :return str: """ ensure_autodiscover() # If use preferred checker. if preferred_checker: ip_checker_cls = registry.get(preferred_checker) if not ip_checker_cls: return False ip_checker = ip_checker_cls(verbose=verbose) ip = ip_checker.get_public_ip() if verbose: print('provider: ', ip_checker_cls) return ip # Using all checkers. for ip_checker_name, ip_checker_cls in registry._registry.items(): ip_checker = ip_checker_cls(verbose=verbose) try: ip = ip_checker.get_public_ip() if ip: if verbose: print('provider: ', ip_checker_cls) return ip except Exception as e: if verbose: print(e) return False
from abc import abstractmethod import sys, abc if sys.version_info >= (3, 4): ABC = abc.ABC else: ABC = abc.ABCMeta('ABC', (), {}) import numpy as np from enum import Enum class Env(ABC): class Terminate(Enum): Null = 0 Fail = 1 Succ = 2 def __init__(self, args, enable_draw): self.enable_draw = enable_draw return
VERSION = "1.0.0b3"
def uniquer(seq, idfun=None): if idfun is None: def idfun(x): return x seen = {} result = [] for item in seq: marker = idfun(item) if marker in seen: continue seen[marker] = 1 result.append(item) return result
from gettext import gettext as _ import traceback from pulp.client.commands.repo.sync_publish import StatusRenderer from pulp.client.extensions.core import COLOR_FAILURE from pulp_puppet.common import constants from pulp_puppet.common.publish_progress import PublishProgressReport from pulp_puppet.common.sync_progress import SyncProgressReport class PuppetStatusRenderer(StatusRenderer): def __init__(self, context): super(PuppetStatusRenderer, self).__init__(context) # Sync Steps self.sync_metadata_last_state = constants.STATE_NOT_STARTED self.sync_modules_last_state = constants.STATE_NOT_STARTED # Publish Steps self.publish_modules_last_state = constants.STATE_NOT_STARTED self.publish_metadata_last_state = constants.STATE_NOT_STARTED self.publish_http_last_state = constants.STATE_NOT_STARTED self.publish_https_last_state = constants.STATE_NOT_STARTED # UI Widgets self.sync_metadata_bar = self.prompt.create_progress_bar() self.sync_modules_bar = self.prompt.create_progress_bar() self.publish_modules_bar = self.prompt.create_progress_bar() self.publish_metadata_spinner = self.prompt.create_spinner() def display_report(self, progress_report): # Sync Steps if constants.IMPORTER_ID in progress_report: sync_report = SyncProgressReport.from_progress_dict(progress_report[constants.IMPORTER_ID]) self._display_sync_metadata_step(sync_report) self._display_sync_modules_step(sync_report) # Publish Steps if constants.DISTRIBUTOR_ID in progress_report: publish_report = PublishProgressReport.from_progress_dict(progress_report[constants.DISTRIBUTOR_ID]) self._display_publish_modules_step(publish_report) self._display_publish_metadata_step(publish_report) self._display_publish_http_https_step(publish_report) def _display_sync_metadata_step(self, sync_report): # Do nothing if it hasn't started yet or has already finished if sync_report.metadata_state == constants.STATE_NOT_STARTED or \ self.sync_metadata_last_state in constants.COMPLETE_STATES: return # Only render this on the first non-not-started state if self.sync_metadata_last_state == constants.STATE_NOT_STARTED: self.prompt.write(_('Downloading metadata...'), tag='download-metadata') # Same behavior for running or success if sync_report.metadata_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS): items_done = sync_report.metadata_query_finished_count items_total = sync_report.metadata_query_total_count item_type = _('Metadata Query') self._render_itemized_in_progress_state(items_done, items_total, item_type, self.sync_metadata_bar, sync_report.metadata_state) # The only state left to handle is if it failed else: self.prompt.render_failure_message(_('... failed')) self.prompt.render_spacer() self._render_error(sync_report.metadata_error_message, sync_report.metadata_exception, sync_report.metadata_traceback) # Before finishing update the state self.sync_metadata_last_state = sync_report.metadata_state def _display_sync_modules_step(self, sync_report): # Do nothing if it hasn't started yet or has already finished if sync_report.modules_state == constants.STATE_NOT_STARTED or \ self.sync_modules_last_state in constants.COMPLETE_STATES: return # Only render this on the first non-not-started state if self.sync_modules_last_state == constants.STATE_NOT_STARTED: self.prompt.write(_('Downloading new modules...'), tag='downloading') # Same behavior for running or success if sync_report.modules_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS): items_done = sync_report.modules_finished_count + sync_report.modules_error_count items_total = sync_report.modules_total_count item_type = _('Module') self._render_itemized_in_progress_state(items_done, items_total, item_type, self.sync_modules_bar, sync_report.modules_state) # The only state left to handle is if it failed else: self.prompt.render_failure_message(_('... failed')) self.prompt.render_spacer() self._render_error(sync_report.modules_error_message, sync_report.modules_exception, sync_report.modules_traceback) # Regardless of success or failure, display any individual module errors # if the new state is complete if sync_report.modules_state in constants.COMPLETE_STATES: self._render_module_errors(sync_report.modules_individual_errors) # Before finishing update the state self.sync_modules_last_state = sync_report.modules_state def _display_publish_modules_step(self, publish_report): # Do nothing if it hasn't started yet or has already finished if publish_report.modules_state == constants.STATE_NOT_STARTED or \ self.publish_modules_last_state in constants.COMPLETE_STATES: return # Only render this on the first non-not-started state if self.publish_modules_last_state == constants.STATE_NOT_STARTED: self.prompt.write(_('Publishing modules...'), tag='publishing') # Same behavior for running or success if publish_report.modules_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS): items_done = publish_report.modules_finished_count + publish_report.modules_error_count items_total = publish_report.modules_total_count item_type = _('Module') self._render_itemized_in_progress_state(items_done, items_total, item_type, self.publish_modules_bar, publish_report.modules_state) # The only state left to handle is if it failed else: self.prompt.render_failure_message(_('... failed')) self.prompt.render_spacer() self._render_error(publish_report.modules_error_message, publish_report.modules_exception, publish_report.modules_traceback) # Regardless of success or failure, display any individual module errors # if the new state is complete if publish_report.modules_state in constants.COMPLETE_STATES: self._render_module_errors(publish_report.modules_individual_errors) # Before finishing update the state self.publish_modules_last_state = publish_report.modules_state def _display_publish_metadata_step(self, publish_report): # Do nothing if it hasn't started yet or has already finished if publish_report.metadata_state == constants.STATE_NOT_STARTED or \ self.publish_metadata_last_state in constants.COMPLETE_STATES: return # Only render this on the first non-not-started state if self.publish_metadata_last_state == constants.STATE_NOT_STARTED: self.prompt.write(_('Generating repository metadata...'), tag='generating') if publish_report.metadata_state == constants.STATE_RUNNING: self.publish_metadata_spinner.next() elif publish_report.metadata_state == constants.STATE_SUCCESS: self.publish_metadata_spinner.next(finished=True) self.prompt.write(_('... completed'), tag='completed') self.prompt.render_spacer() elif publish_report.metadata_state == constants.STATE_FAILED: self.publish_metadata_spinner.next(finished=True) self.prompt.render_failure_message(_('... failed')) self.prompt.render_spacer() self._render_error(publish_report.modules_error_message, publish_report.modules_exception, publish_report.modules_traceback) self.publish_metadata_last_state = publish_report.metadata_state def _display_publish_http_https_step(self, publish_report): # -- HTTP -------- if publish_report.publish_http != constants.STATE_NOT_STARTED and \ self.publish_http_last_state not in constants.COMPLETE_STATES: self.prompt.write(_('Publishing repository over HTTP...')) if publish_report.publish_http == constants.STATE_SUCCESS: self.prompt.write(_('... completed'), tag='http-completed') elif publish_report.publish_http == constants.STATE_SKIPPED: self.prompt.write(_('... skipped'), tag='http-skipped') else: self.prompt.write(_('... unknown'), tag='http-unknown') self.publish_http_last_state = publish_report.publish_http self.prompt.render_spacer() # -- HTTPS -------- if publish_report.publish_https != constants.STATE_NOT_STARTED and \ self.publish_https_last_state not in constants.COMPLETE_STATES: self.prompt.write(_('Publishing repository over HTTPS...')) if publish_report.publish_https == constants.STATE_SUCCESS: self.prompt.write(_('... completed'), tag='https-completed') elif publish_report.publish_https == constants.STATE_SKIPPED: self.prompt.write(_('... skipped'), tag='https-skipped') else: self.prompt.write(_('... unknown'), tag='https-unknown') self.publish_https_last_state = publish_report.publish_https def _render_itemized_in_progress_state(self, items_done, items_total, type_name, progress_bar, current_state): """ This is a pretty ugly way of reusing similar code between the publish steps for packages and distributions. There might be a cleaner way but I was having trouble updating the correct state variable and frankly I'm out of time. Feel free to fix this if you are inspired. """ # For the progress bar to work, we can't write anything after it until # we're completely finished with it. Assemble the download summary into # a string and let the progress bar render it. message_data = { 'name' : type_name.title(), 'items_done' : items_done, 'items_total' : items_total, } template = _('%(name)s: %(items_done)s/%(items_total)s items') bar_message = template % message_data # If there's nothing to download in this step, flag the bar as complete if items_total is 0: items_total = items_done = 1 progress_bar.render(items_done, items_total, message=bar_message) if current_state == constants.STATE_SUCCESS: self.prompt.write(_('... completed')) self.prompt.render_spacer() def _render_module_errors(self, individual_errors): """ :param individual_errors: dictionary where keys are module names and values are dicts with keys 'exception' and 'traceback'. :type individual_errors: dict """ if individual_errors: # TODO: read this from config display_error_count = 20 self.prompt.render_failure_message(_('Could not import the following modules:')) for module_error in individual_errors[:display_error_count]: msg = _(' %(module)s: %(error)s') msg = msg % {'module': module_error['module'], 'error': module_error['exception']} self.prompt.write(msg, color=COLOR_FAILURE) self.prompt.render_spacer() def _render_error(self, error_message, exception, traceback): msg = _('The following error was encountered during the previous ' 'step. More information can be found by passing -v flag one or more times') self.prompt.render_failure_message(msg) self.prompt.render_spacer() self.prompt.render_failure_message(' %s' % error_message) self.context.logger.error(error_message) self.context.logger.error(exception) self.context.logger.error(traceback)
import curses import sys FROMWHO = "Thomas Gellekum <tg@FreeBSD.org>" def set_color(win, color): if curses.has_colors(): n = color + 1 curses.init_pair(n, color, my_bg) win.attroff(curses.A_COLOR) win.attron(curses.color_pair(n)) def unset_color(win): if curses.has_colors(): win.attrset(curses.color_pair(0)) def look_out(msecs): curses.napms(msecs) if stdscr.getch() != -1: curses.beep() sys.exit(0) def boxit(): for y in range(0, 20): stdscr.addch(y, 7, ord('|')) for x in range(8, 80): stdscr.addch(19, x, ord('_')) for x in range(0, 80): stdscr.addch(22, x, ord('_')) return def seas(): stdscr.addch(4, 1, ord('S')) stdscr.addch(6, 1, ord('E')) stdscr.addch(8, 1, ord('A')) stdscr.addch(10, 1, ord('S')) stdscr.addch(12, 1, ord('O')) stdscr.addch(14, 1, ord('N')) stdscr.addch(16, 1, ord("'")) stdscr.addch(18, 1, ord('S')) return def greet(): stdscr.addch(3, 5, ord('G')) stdscr.addch(5, 5, ord('R')) stdscr.addch(7, 5, ord('E')) stdscr.addch(9, 5, ord('E')) stdscr.addch(11, 5, ord('T')) stdscr.addch(13, 5, ord('I')) stdscr.addch(15, 5, ord('N')) stdscr.addch(17, 5, ord('G')) stdscr.addch(19, 5, ord('S')) return def fromwho(): stdscr.addstr(21, 13, FROMWHO) return def tree(): set_color(treescrn, curses.COLOR_GREEN) treescrn.addch(1, 11, ord('/')) treescrn.addch(2, 11, ord('/')) treescrn.addch(3, 10, ord('/')) treescrn.addch(4, 9, ord('/')) treescrn.addch(5, 9, ord('/')) treescrn.addch(6, 8, ord('/')) treescrn.addch(7, 7, ord('/')) treescrn.addch(8, 6, ord('/')) treescrn.addch(9, 6, ord('/')) treescrn.addch(10, 5, ord('/')) treescrn.addch(11, 3, ord('/')) treescrn.addch(12, 2, ord('/')) treescrn.addch(1, 13, ord('\\')) treescrn.addch(2, 13, ord('\\')) treescrn.addch(3, 14, ord('\\')) treescrn.addch(4, 15, ord('\\')) treescrn.addch(5, 15, ord('\\')) treescrn.addch(6, 16, ord('\\')) treescrn.addch(7, 17, ord('\\')) treescrn.addch(8, 18, ord('\\')) treescrn.addch(9, 18, ord('\\')) treescrn.addch(10, 19, ord('\\')) treescrn.addch(11, 21, ord('\\')) treescrn.addch(12, 22, ord('\\')) treescrn.addch(4, 10, ord('_')) treescrn.addch(4, 14, ord('_')) treescrn.addch(8, 7, ord('_')) treescrn.addch(8, 17, ord('_')) treescrn.addstr(13, 0, "//////////// \\\\\\\\\\\\\\\\\\\\\\\\") treescrn.addstr(14, 11, "| |") treescrn.addstr(15, 11, "|_|") unset_color(treescrn) treescrn.refresh() w_del_msg.refresh() return def balls(): treescrn.overlay(treescrn2) set_color(treescrn2, curses.COLOR_BLUE) treescrn2.addch(3, 9, ord('@')) treescrn2.addch(3, 15, ord('@')) treescrn2.addch(4, 8, ord('@')) treescrn2.addch(4, 16, ord('@')) treescrn2.addch(5, 7, ord('@')) treescrn2.addch(5, 17, ord('@')) treescrn2.addch(7, 6, ord('@')) treescrn2.addch(7, 18, ord('@')) treescrn2.addch(8, 5, ord('@')) treescrn2.addch(8, 19, ord('@')) treescrn2.addch(10, 4, ord('@')) treescrn2.addch(10, 20, ord('@')) treescrn2.addch(11, 2, ord('@')) treescrn2.addch(11, 22, ord('@')) treescrn2.addch(12, 1, ord('@')) treescrn2.addch(12, 23, ord('@')) unset_color(treescrn2) treescrn2.refresh() w_del_msg.refresh() return def star(): treescrn2.attrset(curses.A_BOLD | curses.A_BLINK) set_color(treescrn2, curses.COLOR_YELLOW) treescrn2.addch(0, 12, ord('*')) treescrn2.standend() unset_color(treescrn2) treescrn2.refresh() w_del_msg.refresh() return def strng1(): treescrn2.attrset(curses.A_BOLD | curses.A_BLINK) set_color(treescrn2, curses.COLOR_WHITE) treescrn2.addch(3, 13, ord('\'')) treescrn2.addch(3, 12, ord(':')) treescrn2.addch(3, 11, ord('.')) treescrn2.attroff(curses.A_BOLD | curses.A_BLINK) unset_color(treescrn2) treescrn2.refresh() w_del_msg.refresh() return def strng2(): treescrn2.attrset(curses.A_BOLD | curses.A_BLINK) set_color(treescrn2, curses.COLOR_WHITE) treescrn2.addch(5, 14, ord('\'')) treescrn2.addch(5, 13, ord(':')) treescrn2.addch(5, 12, ord('.')) treescrn2.addch(5, 11, ord(',')) treescrn2.addch(6, 10, ord('\'')) treescrn2.addch(6, 9, ord(':')) treescrn2.attroff(curses.A_BOLD | curses.A_BLINK) unset_color(treescrn2) treescrn2.refresh() w_del_msg.refresh() return def strng3(): treescrn2.attrset(curses.A_BOLD | curses.A_BLINK) set_color(treescrn2, curses.COLOR_WHITE) treescrn2.addch(7, 16, ord('\'')) treescrn2.addch(7, 15, ord(':')) treescrn2.addch(7, 14, ord('.')) treescrn2.addch(7, 13, ord(',')) treescrn2.addch(8, 12, ord('\'')) treescrn2.addch(8, 11, ord(':')) treescrn2.addch(8, 10, ord('.')) treescrn2.addch(8, 9, ord(',')) treescrn2.attroff(curses.A_BOLD | curses.A_BLINK) unset_color(treescrn2) treescrn2.refresh() w_del_msg.refresh() return def strng4(): treescrn2.attrset(curses.A_BOLD | curses.A_BLINK) set_color(treescrn2, curses.COLOR_WHITE) treescrn2.addch(9, 17, ord('\'')) treescrn2.addch(9, 16, ord(':')) treescrn2.addch(9, 15, ord('.')) treescrn2.addch(9, 14, ord(',')) treescrn2.addch(10, 13, ord('\'')) treescrn2.addch(10, 12, ord(':')) treescrn2.addch(10, 11, ord('.')) treescrn2.addch(10, 10, ord(',')) treescrn2.addch(11, 9, ord('\'')) treescrn2.addch(11, 8, ord(':')) treescrn2.addch(11, 7, ord('.')) treescrn2.addch(11, 6, ord(',')) treescrn2.addch(12, 5, ord('\'')) treescrn2.attroff(curses.A_BOLD | curses.A_BLINK) unset_color(treescrn2) treescrn2.refresh() w_del_msg.refresh() return def strng5(): treescrn2.attrset(curses.A_BOLD | curses.A_BLINK) set_color(treescrn2, curses.COLOR_WHITE) treescrn2.addch(11, 19, ord('\'')) treescrn2.addch(11, 18, ord(':')) treescrn2.addch(11, 17, ord('.')) treescrn2.addch(11, 16, ord(',')) treescrn2.addch(12, 15, ord('\'')) treescrn2.addch(12, 14, ord(':')) treescrn2.addch(12, 13, ord('.')) treescrn2.addch(12, 12, ord(',')) treescrn2.attroff(curses.A_BOLD | curses.A_BLINK) unset_color(treescrn2) # save a fully lit tree treescrn2.overlay(treescrn) treescrn2.refresh() w_del_msg.refresh() return def blinkit(): treescrn8.touchwin() for cycle in range(0, 5): if cycle == 0: treescrn3.overlay(treescrn8) treescrn8.refresh() w_del_msg.refresh() break elif cycle == 1: treescrn4.overlay(treescrn8) treescrn8.refresh() w_del_msg.refresh() break elif cycle == 2: treescrn5.overlay(treescrn8) treescrn8.refresh() w_del_msg.refresh() break elif cycle == 3: treescrn6.overlay(treescrn8) treescrn8.refresh() w_del_msg.refresh() break elif cycle == 4: treescrn7.overlay(treescrn8) treescrn8.refresh() w_del_msg.refresh() break treescrn8.touchwin() # ALL ON treescrn.overlay(treescrn8) treescrn8.refresh() w_del_msg.refresh() return def deer_step(win, y, x): win.mvwin(y, x) win.refresh() w_del_msg.refresh() look_out(5) def reindeer(): y_pos = 0 for x_pos in range(70, 62, -1): if x_pos < 66: y_pos = 1 for looper in range(0, 4): dotdeer0.addch(y_pos, x_pos, ord('.')) dotdeer0.refresh() w_del_msg.refresh() dotdeer0.erase() dotdeer0.refresh() w_del_msg.refresh() look_out(50) y_pos = 2 for x_pos in range(x_pos - 1, 50, -1): for looper in range(0, 4): if x_pos < 56: y_pos = 3 try: stardeer0.addch(y_pos, x_pos, ord('*')) except curses.error: pass stardeer0.refresh() w_del_msg.refresh() stardeer0.erase() stardeer0.refresh() w_del_msg.refresh() else: dotdeer0.addch(y_pos, x_pos, ord('*')) dotdeer0.refresh() w_del_msg.refresh() dotdeer0.erase() dotdeer0.refresh() w_del_msg.refresh() x_pos = 58 for y_pos in range(2, 5): lildeer0.touchwin() lildeer0.refresh() w_del_msg.refresh() for looper in range(0, 4): deer_step(lildeer3, y_pos, x_pos) deer_step(lildeer2, y_pos, x_pos) deer_step(lildeer1, y_pos, x_pos) deer_step(lildeer2, y_pos, x_pos) deer_step(lildeer3, y_pos, x_pos) lildeer0.touchwin() lildeer0.refresh() w_del_msg.refresh() x_pos -= 2 x_pos = 35 for y_pos in range(5, 10): middeer0.touchwin() middeer0.refresh() w_del_msg.refresh() for looper in range(0, 2): deer_step(middeer3, y_pos, x_pos) deer_step(middeer2, y_pos, x_pos) deer_step(middeer1, y_pos, x_pos) deer_step(middeer2, y_pos, x_pos) deer_step(middeer3, y_pos, x_pos) middeer0.touchwin() middeer0.refresh() w_del_msg.refresh() x_pos -= 3 look_out(300) y_pos = 1 for x_pos in range(8, 16): deer_step(bigdeer4, y_pos, x_pos) deer_step(bigdeer3, y_pos, x_pos) deer_step(bigdeer2, y_pos, x_pos) deer_step(bigdeer1, y_pos, x_pos) deer_step(bigdeer2, y_pos, x_pos) deer_step(bigdeer3, y_pos, x_pos) deer_step(bigdeer4, y_pos, x_pos) deer_step(bigdeer0, y_pos, x_pos) x_pos -= 1 for looper in range(0, 6): deer_step(lookdeer4, y_pos, x_pos) deer_step(lookdeer3, y_pos, x_pos) deer_step(lookdeer2, y_pos, x_pos) deer_step(lookdeer1, y_pos, x_pos) deer_step(lookdeer2, y_pos, x_pos) deer_step(lookdeer3, y_pos, x_pos) deer_step(lookdeer4, y_pos, x_pos) deer_step(lookdeer0, y_pos, x_pos) for y_pos in range(y_pos, 10): for looper in range(0, 2): deer_step(bigdeer4, y_pos, x_pos) deer_step(bigdeer3, y_pos, x_pos) deer_step(bigdeer2, y_pos, x_pos) deer_step(bigdeer1, y_pos, x_pos) deer_step(bigdeer2, y_pos, x_pos) deer_step(bigdeer3, y_pos, x_pos) deer_step(bigdeer4, y_pos, x_pos) deer_step(bigdeer0, y_pos, x_pos) y_pos -= 1 deer_step(lookdeer3, y_pos, x_pos) return def main(win): global stdscr stdscr = win global my_bg, y_pos, x_pos global treescrn, treescrn2, treescrn3, treescrn4 global treescrn5, treescrn6, treescrn7, treescrn8 global dotdeer0, stardeer0 global lildeer0, lildeer1, lildeer2, lildeer3 global middeer0, middeer1, middeer2, middeer3 global bigdeer0, bigdeer1, bigdeer2, bigdeer3, bigdeer4 global lookdeer0, lookdeer1, lookdeer2, lookdeer3, lookdeer4 global w_holiday, w_del_msg my_bg = curses.COLOR_BLACK # curses.curs_set(0) treescrn = curses.newwin(16, 27, 3, 53) treescrn2 = curses.newwin(16, 27, 3, 53) treescrn3 = curses.newwin(16, 27, 3, 53) treescrn4 = curses.newwin(16, 27, 3, 53) treescrn5 = curses.newwin(16, 27, 3, 53) treescrn6 = curses.newwin(16, 27, 3, 53) treescrn7 = curses.newwin(16, 27, 3, 53) treescrn8 = curses.newwin(16, 27, 3, 53) dotdeer0 = curses.newwin(3, 71, 0, 8) stardeer0 = curses.newwin(4, 56, 0, 8) lildeer0 = curses.newwin(7, 53, 0, 8) lildeer1 = curses.newwin(2, 4, 0, 0) lildeer2 = curses.newwin(2, 4, 0, 0) lildeer3 = curses.newwin(2, 4, 0, 0) middeer0 = curses.newwin(15, 42, 0, 8) middeer1 = curses.newwin(3, 7, 0, 0) middeer2 = curses.newwin(3, 7, 0, 0) middeer3 = curses.newwin(3, 7, 0, 0) bigdeer0 = curses.newwin(10, 23, 0, 0) bigdeer1 = curses.newwin(10, 23, 0, 0) bigdeer2 = curses.newwin(10, 23, 0, 0) bigdeer3 = curses.newwin(10, 23, 0, 0) bigdeer4 = curses.newwin(10, 23, 0, 0) lookdeer0 = curses.newwin(10, 25, 0, 0) lookdeer1 = curses.newwin(10, 25, 0, 0) lookdeer2 = curses.newwin(10, 25, 0, 0) lookdeer3 = curses.newwin(10, 25, 0, 0) lookdeer4 = curses.newwin(10, 25, 0, 0) w_holiday = curses.newwin(1, 27, 3, 27) w_del_msg = curses.newwin(1, 20, 23, 60) try: w_del_msg.addstr(0, 0, "Hit any key to quit") except curses.error: pass try: w_holiday.addstr(0, 0, "H A P P Y H O L I D A Y S") except curses.error: pass # set up the windows for our various reindeer lildeer1.addch(0, 0, ord('V')) lildeer1.addch(1, 0, ord('@')) lildeer1.addch(1, 1, ord('<')) lildeer1.addch(1, 2, ord('>')) try: lildeer1.addch(1, 3, ord('~')) except curses.error: pass lildeer2.addch(0, 0, ord('V')) lildeer2.addch(1, 0, ord('@')) lildeer2.addch(1, 1, ord('|')) lildeer2.addch(1, 2, ord('|')) try: lildeer2.addch(1, 3, ord('~')) except curses.error: pass lildeer3.addch(0, 0, ord('V')) lildeer3.addch(1, 0, ord('@')) lildeer3.addch(1, 1, ord('>')) lildeer3.addch(1, 2, ord('<')) try: lildeer2.addch(1, 3, ord('~')) # XXX except curses.error: pass middeer1.addch(0, 2, ord('y')) middeer1.addch(0, 3, ord('y')) middeer1.addch(1, 2, ord('0')) middeer1.addch(1, 3, ord('(')) middeer1.addch(1, 4, ord('=')) middeer1.addch(1, 5, ord(')')) middeer1.addch(1, 6, ord('~')) middeer1.addch(2, 3, ord('\\')) middeer1.addch(2, 5, ord('/')) middeer2.addch(0, 2, ord('y')) middeer2.addch(0, 3, ord('y')) middeer2.addch(1, 2, ord('0')) middeer2.addch(1, 3, ord('(')) middeer2.addch(1, 4, ord('=')) middeer2.addch(1, 5, ord(')')) middeer2.addch(1, 6, ord('~')) middeer2.addch(2, 3, ord('|')) middeer2.addch(2, 5, ord('|')) middeer3.addch(0, 2, ord('y')) middeer3.addch(0, 3, ord('y')) middeer3.addch(1, 2, ord('0')) middeer3.addch(1, 3, ord('(')) middeer3.addch(1, 4, ord('=')) middeer3.addch(1, 5, ord(')')) middeer3.addch(1, 6, ord('~')) middeer3.addch(2, 3, ord('/')) middeer3.addch(2, 5, ord('\\')) bigdeer1.addch(0, 17, ord('\\')) bigdeer1.addch(0, 18, ord('/')) bigdeer1.addch(0, 19, ord('\\')) bigdeer1.addch(0, 20, ord('/')) bigdeer1.addch(1, 18, ord('\\')) bigdeer1.addch(1, 20, ord('/')) bigdeer1.addch(2, 19, ord('|')) bigdeer1.addch(2, 20, ord('_')) bigdeer1.addch(3, 18, ord('/')) bigdeer1.addch(3, 19, ord('^')) bigdeer1.addch(3, 20, ord('0')) bigdeer1.addch(3, 21, ord('\\')) bigdeer1.addch(4, 17, ord('/')) bigdeer1.addch(4, 18, ord('/')) bigdeer1.addch(4, 19, ord('\\')) bigdeer1.addch(4, 22, ord('\\')) bigdeer1.addstr(5, 7, "^~~~~~~~~// ~~U") bigdeer1.addstr(6, 7, "( \\_____( /") # )) bigdeer1.addstr(7, 8, "( ) /") bigdeer1.addstr(8, 9, "\\\\ /") bigdeer1.addstr(9, 11, "\\>/>") bigdeer2.addch(0, 17, ord('\\')) bigdeer2.addch(0, 18, ord('/')) bigdeer2.addch(0, 19, ord('\\')) bigdeer2.addch(0, 20, ord('/')) bigdeer2.addch(1, 18, ord('\\')) bigdeer2.addch(1, 20, ord('/')) bigdeer2.addch(2, 19, ord('|')) bigdeer2.addch(2, 20, ord('_')) bigdeer2.addch(3, 18, ord('/')) bigdeer2.addch(3, 19, ord('^')) bigdeer2.addch(3, 20, ord('0')) bigdeer2.addch(3, 21, ord('\\')) bigdeer2.addch(4, 17, ord('/')) bigdeer2.addch(4, 18, ord('/')) bigdeer2.addch(4, 19, ord('\\')) bigdeer2.addch(4, 22, ord('\\')) bigdeer2.addstr(5, 7, "^~~~~~~~~// ~~U") bigdeer2.addstr(6, 7, "(( )____( /") # )) bigdeer2.addstr(7, 7, "( / |") bigdeer2.addstr(8, 8, "\\/ |") bigdeer2.addstr(9, 9, "|> |>") bigdeer3.addch(0, 17, ord('\\')) bigdeer3.addch(0, 18, ord('/')) bigdeer3.addch(0, 19, ord('\\')) bigdeer3.addch(0, 20, ord('/')) bigdeer3.addch(1, 18, ord('\\')) bigdeer3.addch(1, 20, ord('/')) bigdeer3.addch(2, 19, ord('|')) bigdeer3.addch(2, 20, ord('_')) bigdeer3.addch(3, 18, ord('/')) bigdeer3.addch(3, 19, ord('^')) bigdeer3.addch(3, 20, ord('0')) bigdeer3.addch(3, 21, ord('\\')) bigdeer3.addch(4, 17, ord('/')) bigdeer3.addch(4, 18, ord('/')) bigdeer3.addch(4, 19, ord('\\')) bigdeer3.addch(4, 22, ord('\\')) bigdeer3.addstr(5, 7, "^~~~~~~~~// ~~U") bigdeer3.addstr(6, 6, "( ()_____( /") # )) bigdeer3.addstr(7, 6, "/ / /") bigdeer3.addstr(8, 5, "|/ \\") bigdeer3.addstr(9, 5, "/> \\>") bigdeer4.addch(0, 17, ord('\\')) bigdeer4.addch(0, 18, ord('/')) bigdeer4.addch(0, 19, ord('\\')) bigdeer4.addch(0, 20, ord('/')) bigdeer4.addch(1, 18, ord('\\')) bigdeer4.addch(1, 20, ord('/')) bigdeer4.addch(2, 19, ord('|')) bigdeer4.addch(2, 20, ord('_')) bigdeer4.addch(3, 18, ord('/')) bigdeer4.addch(3, 19, ord('^')) bigdeer4.addch(3, 20, ord('0')) bigdeer4.addch(3, 21, ord('\\')) bigdeer4.addch(4, 17, ord('/')) bigdeer4.addch(4, 18, ord('/')) bigdeer4.addch(4, 19, ord('\\')) bigdeer4.addch(4, 22, ord('\\')) bigdeer4.addstr(5, 7, "^~~~~~~~~// ~~U") bigdeer4.addstr(6, 6, "( )______( /") # ) bigdeer4.addstr(7, 5, "(/ \\") # ) bigdeer4.addstr(8, 0, "v___= ----^") lookdeer1.addstr(0, 16, "\\/ \\/") lookdeer1.addstr(1, 17, "\\Y/ \\Y/") lookdeer1.addstr(2, 19, "\\=/") lookdeer1.addstr(3, 17, "^\\o o/^") lookdeer1.addstr(4, 17, "//( )") lookdeer1.addstr(5, 7, "^~~~~~~~~// \\O/") lookdeer1.addstr(6, 7, "( \\_____( /") # )) lookdeer1.addstr(7, 8, "( ) /") lookdeer1.addstr(8, 9, "\\\\ /") lookdeer1.addstr(9, 11, "\\>/>") lookdeer2.addstr(0, 16, "\\/ \\/") lookdeer2.addstr(1, 17, "\\Y/ \\Y/") lookdeer2.addstr(2, 19, "\\=/") lookdeer2.addstr(3, 17, "^\\o o/^") lookdeer2.addstr(4, 17, "//( )") lookdeer2.addstr(5, 7, "^~~~~~~~~// \\O/") lookdeer2.addstr(6, 7, "(( )____( /") # )) lookdeer2.addstr(7, 7, "( / |") lookdeer2.addstr(8, 8, "\\/ |") lookdeer2.addstr(9, 9, "|> |>") lookdeer3.addstr(0, 16, "\\/ \\/") lookdeer3.addstr(1, 17, "\\Y/ \\Y/") lookdeer3.addstr(2, 19, "\\=/") lookdeer3.addstr(3, 17, "^\\o o/^") lookdeer3.addstr(4, 17, "//( )") lookdeer3.addstr(5, 7, "^~~~~~~~~// \\O/") lookdeer3.addstr(6, 6, "( ()_____( /") # )) lookdeer3.addstr(7, 6, "/ / /") lookdeer3.addstr(8, 5, "|/ \\") lookdeer3.addstr(9, 5, "/> \\>") lookdeer4.addstr(0, 16, "\\/ \\/") lookdeer4.addstr(1, 17, "\\Y/ \\Y/") lookdeer4.addstr(2, 19, "\\=/") lookdeer4.addstr(3, 17, "^\\o o/^") lookdeer4.addstr(4, 17, "//( )") lookdeer4.addstr(5, 7, "^~~~~~~~~// \\O/") lookdeer4.addstr(6, 6, "( )______( /") # ) lookdeer4.addstr(7, 5, "(/ \\") # ) lookdeer4.addstr(8, 0, "v___= ----^") ############################################### curses.cbreak() stdscr.nodelay(1) while 1: stdscr.clear() treescrn.erase() w_del_msg.touchwin() treescrn.touchwin() treescrn2.erase() treescrn2.touchwin() treescrn8.erase() treescrn8.touchwin() stdscr.refresh() look_out(150) boxit() stdscr.refresh() look_out(150) seas() stdscr.refresh() greet() stdscr.refresh() look_out(150) fromwho() stdscr.refresh() look_out(150) tree() look_out(150) balls() look_out(150) star() look_out(150) strng1() strng2() strng3() strng4() strng5() # set up the windows for our blinking trees # # treescrn3 treescrn.overlay(treescrn3) # balls treescrn3.addch(4, 18, ord(' ')) treescrn3.addch(7, 6, ord(' ')) treescrn3.addch(8, 19, ord(' ')) treescrn3.addch(11, 22, ord(' ')) # star treescrn3.addch(0, 12, ord('*')) # strng1 treescrn3.addch(3, 11, ord(' ')) # strng2 treescrn3.addch(5, 13, ord(' ')) treescrn3.addch(6, 10, ord(' ')) # strng3 treescrn3.addch(7, 16, ord(' ')) treescrn3.addch(7, 14, ord(' ')) # strng4 treescrn3.addch(10, 13, ord(' ')) treescrn3.addch(10, 10, ord(' ')) treescrn3.addch(11, 8, ord(' ')) # strng5 treescrn3.addch(11, 18, ord(' ')) treescrn3.addch(12, 13, ord(' ')) # treescrn4 treescrn.overlay(treescrn4) # balls treescrn4.addch(3, 9, ord(' ')) treescrn4.addch(4, 16, ord(' ')) treescrn4.addch(7, 6, ord(' ')) treescrn4.addch(8, 19, ord(' ')) treescrn4.addch(11, 2, ord(' ')) treescrn4.addch(12, 23, ord(' ')) # star treescrn4.standout() treescrn4.addch(0, 12, ord('*')) treescrn4.standend() # strng1 treescrn4.addch(3, 13, ord(' ')) # strng2 # strng3 treescrn4.addch(7, 15, ord(' ')) treescrn4.addch(8, 11, ord(' ')) # strng4 treescrn4.addch(9, 16, ord(' ')) treescrn4.addch(10, 12, ord(' ')) treescrn4.addch(11, 8, ord(' ')) # strng5 treescrn4.addch(11, 18, ord(' ')) treescrn4.addch(12, 14, ord(' ')) # treescrn5 treescrn.overlay(treescrn5) # balls treescrn5.addch(3, 15, ord(' ')) treescrn5.addch(10, 20, ord(' ')) treescrn5.addch(12, 1, ord(' ')) # star treescrn5.addch(0, 12, ord(' ')) # strng1 treescrn5.addch(3, 11, ord(' ')) # strng2 treescrn5.addch(5, 12, ord(' ')) # strng3 treescrn5.addch(7, 14, ord(' ')) treescrn5.addch(8, 10, ord(' ')) # strng4 treescrn5.addch(9, 15, ord(' ')) treescrn5.addch(10, 11, ord(' ')) treescrn5.addch(11, 7, ord(' ')) # strng5 treescrn5.addch(11, 17, ord(' ')) treescrn5.addch(12, 13, ord(' ')) # treescrn6 treescrn.overlay(treescrn6) # balls treescrn6.addch(6, 7, ord(' ')) treescrn6.addch(7, 18, ord(' ')) treescrn6.addch(10, 4, ord(' ')) treescrn6.addch(11, 23, ord(' ')) # star treescrn6.standout() treescrn6.addch(0, 12, ord('*')) treescrn6.standend() # strng1 # strng2 treescrn6.addch(5, 11, ord(' ')) # strng3 treescrn6.addch(7, 13, ord(' ')) treescrn6.addch(8, 9, ord(' ')) # strng4 treescrn6.addch(9, 14, ord(' ')) treescrn6.addch(10, 10, ord(' ')) treescrn6.addch(11, 6, ord(' ')) # strng5 treescrn6.addch(11, 16, ord(' ')) treescrn6.addch(12, 12, ord(' ')) # treescrn7 treescrn.overlay(treescrn7) # balls treescrn7.addch(3, 15, ord(' ')) treescrn7.addch(6, 7, ord(' ')) treescrn7.addch(7, 18, ord(' ')) treescrn7.addch(10, 4, ord(' ')) treescrn7.addch(11, 22, ord(' ')) # star treescrn7.addch(0, 12, ord('*')) # strng1 treescrn7.addch(3, 12, ord(' ')) # strng2 treescrn7.addch(5, 13, ord(' ')) treescrn7.addch(6, 9, ord(' ')) # strng3 treescrn7.addch(7, 15, ord(' ')) treescrn7.addch(8, 11, ord(' ')) # strng4 treescrn7.addch(9, 16, ord(' ')) treescrn7.addch(10, 12, ord(' ')) treescrn7.addch(11, 8, ord(' ')) # strng5 treescrn7.addch(11, 18, ord(' ')) treescrn7.addch(12, 14, ord(' ')) look_out(150) reindeer() w_holiday.touchwin() w_holiday.refresh() w_del_msg.refresh() look_out(500) for i in range(0, 20): blinkit() curses.wrapper(main)
""" ReflectionBlock can render and process ReflectionIdevices as XHTML """ import logging from exe.webui.block import Block from exe.webui import common from exe.webui.element import TextAreaElement log = logging.getLogger(__name__) class ReflectionBlock(Block): """ ReflectionBlock can render and process ReflectionIdevices as XHTML """ def __init__(self, parent, idevice): """ Initialize a new Block object """ Block.__init__(self, parent, idevice) self.activityInstruc = idevice.activityInstruc self.answerInstruc = idevice.answerInstruc # to compensate for the strange unpickling timing when objects are # loaded from an elp, ensure that proper idevices are set: if idevice.activityTextArea.idevice is None: idevice.activityTextArea.idevice = idevice if idevice.answerTextArea.idevice is None: idevice.answerTextArea.idevice = idevice self.activityElement = TextAreaElement(idevice.activityTextArea) self.answerElement = TextAreaElement(idevice.answerTextArea) self.previewing = False # In view or preview render if not hasattr(self.idevice,'undo'): self.idevice.undo = True def process(self, request): """ Process the request arguments from the web server """ Block.process(self, request) is_cancel = common.requestHasCancel(request) if not is_cancel: self.activityElement.process(request) self.answerElement.process(request) if "title"+self.id in request.args: self.idevice.title = request.args["title"+self.id][0] def renderEdit(self, style): """ Returns an XHTML string with the form element for editing this block """ html = "<div class=\"iDevice\"><br/>\n" html += common.textInput("title"+self.id, self.idevice.title) html += self.activityElement.renderEdit() html += self.answerElement.renderEdit() html += "<br/>" + self.renderEditButtons() html += "</div>\n" return html def renderPreview(self, style): """ Remembers if we're previewing or not, then implicitly calls self.renderViewContent (via Block.renderPreview) """ self.previewing = True return Block.renderPreview(self, style) def renderView(self, style): """ Remembers if we're previewing or not, then implicitly calls self.renderViewContent (via Block.renderPreview) """ self.previewing = False return Block.renderView(self, style) def renderViewContent(self): """ Returns an XHTML string for this block """ if self.previewing: html = self.activityElement.renderPreview() feedback = self.answerElement.renderPreview() else: html = self.activityElement.renderView() feedback = self.answerElement.renderView() #added lernmodule.net html += '<div class="Reflection" id="Reflection%s">' % (self.id) html += '<textarea id="ReflectionText%s" class="ReflectionText" name="ReflectionText%s"' % (self.id, self.id) html += ' rows=5 style="width:99%"></textarea></div>' html += common.feedbackBlock(self.id,feedback) return html from exe.engine.reflectionidevice import ReflectionIdevice from exe.webui.blockfactory import g_blockFactory g_blockFactory.registerBlockType(ReflectionBlock, ReflectionIdevice)
"""Tests for CMFNotification installation ad uninstallation. $Id: testInstallation.py 65679 2008-05-25 23:45:26Z dbaty $ """ from zope.component import getUtility from zope.component import getMultiAdapter from AccessControl.PermissionRole import rolesForPermissionOn from plone.portlets.interfaces import IPortletManager from plone.portlets.interfaces import IPortletAssignmentMapping from Products.CMFCore.utils import getToolByName from Products.CMFNotification.config import LAYER_NAME from Products.CMFNotification.config import PORTLET_NAME from Products.CMFNotification.NotificationTool import ID as TOOL_ID from Products.CMFNotification.permissions import SUBSCRIBE_PERMISSION from Products.CMFNotification.tests.plonetestbrowser import Browser from Products.CMFNotification.tests.base import CMFNotificationTestCase class TestInstallation(CMFNotificationTestCase): """Make sure that the product is properly installed.""" def afterSetUp(self): pass def testToolIsThere(self): portal = self.portal tool = getToolByName(self.portal, TOOL_ID) self.failUnless(tool is not None) def testSkinLayerIsThere(self): stool = getToolByName(self.portal, 'portal_skins') for skin, layers in stool._getSelections().items(): layers = layers.split(',') self.failUnless(LAYER_NAME in layers) self.failUnless(LAYER_NAME in stool.objectIds()) def testPortletCanBeAdded(self): base_url = self.portal.absolute_url() for name in ('plone.leftcolumn', 'plone.rightcolumn'): manager = getUtility(IPortletManager, name=name, context=self.portal) titles = [p.title for p in manager.getAddablePortletTypes()] self.failUnless(PORTLET_NAME in titles) manager = getUtility(IPortletManager, name='plone.rightcolumn', context=self.portal) right_portlets = getMultiAdapter((self.portal, manager), IPortletAssignmentMapping, context=self.portal) right_portlets = right_portlets.keys() self.failUnless(PORTLET_NAME in right_portlets) def testPermissionHasBeenSet(self): roles = set(rolesForPermissionOn(SUBSCRIBE_PERMISSION, self.portal)) self.failUnlessEqual(roles, set(('Manager', 'Member'))) def testConfigletHasBeenAdded(self): cptool = getToolByName(self.portal, 'portal_controlpanel') configlets = [c.getId() for c in cptool.listActions()] self.failUnless('cmfnotification_configuration' in configlets) class TestUnInstallation(CMFNotificationTestCase): """Test that the product has been properly uninstalled.""" def afterSetUp(self): """Uninstall the product before running each test.""" qtool = getToolByName(self.portal, 'portal_quickinstaller') self.setRoles(['Manager']) qtool.uninstallProducts(['CMFNotification']) def testToolIsNotThere(self): tool = getToolByName(self.portal, TOOL_ID, None) self.failUnless(tool is None) def testSkinLayerIsNotThere(self): stool = getToolByName(self.portal, 'portal_skins') for skin, layers in stool._getSelections().items(): layers = layers.split(',') self.failUnless (LAYER_NAME not in layers) self.failUnless(LAYER_NAME not in stool.objectIds()) def testPortletDoNoExist(self): base_url = self.portal.absolute_url() for name in ('plone.leftcolumn', 'plone.rightcolumn'): manager = getUtility(IPortletManager, name=name, context=self.portal) titles = [p.title for p in manager.getAddablePortletTypes()] self.failUnless(PORTLET_NAME not in titles) manager = getUtility(IPortletManager, name='plone.rightcolumn', context=self.portal) right_portlets = getMultiAdapter((self.portal, manager), IPortletAssignmentMapping, context=self.portal) right_portlets = right_portlets.keys() self.failUnless(PORTLET_NAME not in right_portlets) def testConfigletDoNotExist(self): cptool = getToolByName(self.portal, 'portal_controlpanel') configlets = [c.getId() for c in cptool.listActions()] self.failUnless('cmfnotification_configuration' not in configlets) def test_suite(): from unittest import TestSuite, makeSuite suite = TestSuite() suite.addTest(makeSuite(TestInstallation)) suite.addTest(makeSuite(TestUnInstallation)) return suite
from openstack.tests.unit import base from openstack.orchestration.v1 import resource FAKE_ID = '32e39358-2422-4ad0-a1b5-dd60696bf564' FAKE_NAME = 'test_stack' FAKE = { 'links': [{ 'href': 'http://res_link', 'rel': 'self' }, { 'href': 'http://stack_link', 'rel': 'stack' }], 'logical_resource_id': 'the_resource', 'name': 'the_resource', 'physical_resource_id': '9f38ab5a-37c8-4e40-9702-ce27fc5f6954', 'required_by': [], 'resource_type': 'OS::Heat::FakeResource', 'status': 'CREATE_COMPLETE', 'status_reason': 'state changed', 'updated_time': '2015-03-09T12:15:57.233772', } class TestResource(base.TestCase): def test_basic(self): sot = resource.Resource() self.assertEqual('resource', sot.resource_key) self.assertEqual('resources', sot.resources_key) self.assertEqual('/stacks/%(stack_name)s/%(stack_id)s/resources', sot.base_path) self.assertFalse(sot.allow_create) self.assertFalse(sot.allow_retrieve) self.assertFalse(sot.allow_commit) self.assertFalse(sot.allow_delete) self.assertTrue(sot.allow_list) def test_make_it(self): sot = resource.Resource(**FAKE) self.assertEqual(FAKE['links'], sot.links) self.assertEqual(FAKE['logical_resource_id'], sot.logical_resource_id) self.assertEqual(FAKE['name'], sot.name) self.assertEqual(FAKE['physical_resource_id'], sot.physical_resource_id) self.assertEqual(FAKE['required_by'], sot.required_by) self.assertEqual(FAKE['resource_type'], sot.resource_type) self.assertEqual(FAKE['status'], sot.status) self.assertEqual(FAKE['status_reason'], sot.status_reason) self.assertEqual(FAKE['updated_time'], sot.updated_at)
""" Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com> This file is part of RockStor. RockStor is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. RockStor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from django.db import models from storageadmin.models import Appliance class UpdateSubscription(models.Model): """name of the channel. eg: stable""" name = models.CharField(max_length=64, unique=True) """detailed description or a longer name""" description = models.CharField(max_length=128) """url of the repo""" url = models.CharField(max_length=512) appliance = models.ForeignKey(Appliance) password = models.CharField(max_length=64, null=True) """status of subscription: active, inactive, expired etc..""" status = models.CharField(max_length=64) class Meta: app_label = 'storageadmin'
""" Tests for waffle utils features. """ import crum import ddt from django.test import TestCase from django.test.client import RequestFactory from edx_django_utils.cache import RequestCache from mock import patch from opaque_keys.edx.keys import CourseKey from waffle.testutils import override_flag from .. import CourseWaffleFlag, WaffleFlagNamespace, WaffleSwitchNamespace, WaffleSwitch from ..models import WaffleFlagCourseOverrideModel @ddt.ddt class TestCourseWaffleFlag(TestCase): """ Tests the CourseWaffleFlag. """ NAMESPACE_NAME = "test_namespace" FLAG_NAME = "test_flag" NAMESPACED_FLAG_NAME = NAMESPACE_NAME + "." + FLAG_NAME TEST_COURSE_KEY = CourseKey.from_string("edX/DemoX/Demo_Course") TEST_COURSE_2_KEY = CourseKey.from_string("edX/DemoX/Demo_Course_2") TEST_NAMESPACE = WaffleFlagNamespace(NAMESPACE_NAME) TEST_COURSE_FLAG = CourseWaffleFlag(TEST_NAMESPACE, FLAG_NAME) def setUp(self): super(TestCourseWaffleFlag, self).setUp() request = RequestFactory().request() self.addCleanup(crum.set_current_request, None) crum.set_current_request(request) RequestCache.clear_all_namespaces() @ddt.data( {'course_override': WaffleFlagCourseOverrideModel.ALL_CHOICES.on, 'waffle_enabled': False, 'result': True}, {'course_override': WaffleFlagCourseOverrideModel.ALL_CHOICES.off, 'waffle_enabled': True, 'result': False}, {'course_override': WaffleFlagCourseOverrideModel.ALL_CHOICES.unset, 'waffle_enabled': True, 'result': True}, {'course_override': WaffleFlagCourseOverrideModel.ALL_CHOICES.unset, 'waffle_enabled': False, 'result': False}, ) def test_course_waffle_flag(self, data): """ Tests various combinations of a flag being set in waffle and overridden for a course. """ with patch.object(WaffleFlagCourseOverrideModel, 'override_value', return_value=data['course_override']): with override_flag(self.NAMESPACED_FLAG_NAME, active=data['waffle_enabled']): # check twice to test that the result is properly cached self.assertEqual(self.TEST_COURSE_FLAG.is_enabled(self.TEST_COURSE_KEY), data['result']) self.assertEqual(self.TEST_COURSE_FLAG.is_enabled(self.TEST_COURSE_KEY), data['result']) # result is cached, so override check should happen once WaffleFlagCourseOverrideModel.override_value.assert_called_once_with( self.NAMESPACED_FLAG_NAME, self.TEST_COURSE_KEY ) # check flag for a second course if data['course_override'] == WaffleFlagCourseOverrideModel.ALL_CHOICES.unset: # When course override wasn't set for the first course, the second course will get the same # cached value from waffle. self.assertEqual(self.TEST_COURSE_FLAG.is_enabled(self.TEST_COURSE_2_KEY), data['waffle_enabled']) else: # When course override was set for the first course, it should not apply to the second # course which should get the default value of False. self.assertEqual(self.TEST_COURSE_FLAG.is_enabled(self.TEST_COURSE_2_KEY), False) @ddt.data( {'flag_undefined_default': None, 'result': False}, {'flag_undefined_default': False, 'result': False}, {'flag_undefined_default': True, 'result': True}, ) def test_undefined_waffle_flag(self, data): """ Test flag with various defaults provided for undefined waffle flags. """ test_course_flag = CourseWaffleFlag( self.TEST_NAMESPACE, self.FLAG_NAME, flag_undefined_default=data['flag_undefined_default'] ) with patch.object( WaffleFlagCourseOverrideModel, 'override_value', return_value=WaffleFlagCourseOverrideModel.ALL_CHOICES.unset ): # check twice to test that the result is properly cached self.assertEqual(test_course_flag.is_enabled(self.TEST_COURSE_KEY), data['result']) self.assertEqual(test_course_flag.is_enabled(self.TEST_COURSE_KEY), data['result']) # result is cached, so override check should happen once WaffleFlagCourseOverrideModel.override_value.assert_called_once_with( self.NAMESPACED_FLAG_NAME, self.TEST_COURSE_KEY ) @ddt.data( {'flag_undefined_default': None, 'result': False}, {'flag_undefined_default': False, 'result': False}, {'flag_undefined_default': True, 'result': True}, ) def test_without_request(self, data): """ Test the flag behavior when outside a request context. """ crum.set_current_request(None) test_course_flag = CourseWaffleFlag( self.TEST_NAMESPACE, self.FLAG_NAME, flag_undefined_default=data['flag_undefined_default'] ) self.assertEqual(test_course_flag.is_enabled(self.TEST_COURSE_KEY), data['result']) class TestWaffleSwitch(TestCase): """ Tests the WaffleSwitch. """ NAMESPACE_NAME = "test_namespace" WAFFLE_SWITCH_NAME = "test_switch_name" TEST_NAMESPACE = WaffleSwitchNamespace(NAMESPACE_NAME) WAFFLE_SWITCH = WaffleSwitch(TEST_NAMESPACE, WAFFLE_SWITCH_NAME) def test_namespaced_switch_name(self): """ Verify namespaced_switch_name returns the correct namespace switch name """ expected = self.NAMESPACE_NAME + "." + self.WAFFLE_SWITCH_NAME actual = self.WAFFLE_SWITCH.namespaced_switch_name self.assertEqual(actual, expected)
from datetime import datetime import uuid from werkzeug.exceptions import Forbidden import logging import openerp from openerp import api, tools from openerp import SUPERUSER_ID from openerp.addons.website.models.website import slug from openerp.exceptions import Warning from openerp.osv import osv, fields from openerp.tools import html2plaintext from openerp.tools.translate import _ _logger = logging.getLogger(__name__) class KarmaError(Forbidden): """ Karma-related error, used for forum and posts. """ pass class Forum(osv.Model): """TDE TODO: set karma values for actions dynamic for a given forum""" _name = 'forum.forum' _description = 'Forums' _inherit = ['mail.thread', 'website.seo.metadata'] def init(self, cr): """ Add forum uuid for user email validation. """ forum_uuids = self.pool['ir.config_parameter'].search(cr, SUPERUSER_ID, [('key', '=', 'website_forum.uuid')]) if not forum_uuids: self.pool['ir.config_parameter'].set_param(cr, SUPERUSER_ID, 'website_forum.uuid', str(uuid.uuid4()), ['base.group_system']) _columns = { 'name': fields.char('Name', required=True, translate=True), 'faq': fields.html('Guidelines'), 'description': fields.html('Description'), # karma generation 'karma_gen_question_new': fields.integer('Asking a question'), 'karma_gen_question_upvote': fields.integer('Question upvoted'), 'karma_gen_question_downvote': fields.integer('Question downvoted'), 'karma_gen_answer_upvote': fields.integer('Answer upvoted'), 'karma_gen_answer_downvote': fields.integer('Answer downvoted'), 'karma_gen_answer_accept': fields.integer('Accepting an answer'), 'karma_gen_answer_accepted': fields.integer('Answer accepted'), 'karma_gen_answer_flagged': fields.integer('Answer flagged'), # karma-based actions 'karma_ask': fields.integer('Ask a question'), 'karma_answer': fields.integer('Answer a question'), 'karma_edit_own': fields.integer('Edit its own posts'), 'karma_edit_all': fields.integer('Edit all posts'), 'karma_close_own': fields.integer('Close its own posts'), 'karma_close_all': fields.integer('Close all posts'), 'karma_unlink_own': fields.integer('Delete its own posts'), 'karma_unlink_all': fields.integer('Delete all posts'), 'karma_upvote': fields.integer('Upvote'), 'karma_downvote': fields.integer('Downvote'), 'karma_answer_accept_own': fields.integer('Accept an answer on its own questions'), 'karma_answer_accept_all': fields.integer('Accept an answer to all questions'), 'karma_editor_link_files': fields.integer('Linking files (Editor)'), 'karma_editor_clickable_link': fields.integer('Clickable links (Editor)'), 'karma_comment_own': fields.integer('Comment its own posts'), 'karma_comment_all': fields.integer('Comment all posts'), 'karma_comment_convert_own': fields.integer('Convert its own answers to comments and vice versa'), 'karma_comment_convert_all': fields.integer('Convert all answers to comments and vice versa'), 'karma_comment_unlink_own': fields.integer('Unlink its own comments'), 'karma_comment_unlink_all': fields.integer('Unlink all comments'), 'karma_retag': fields.integer('Change question tags'), 'karma_flag': fields.integer('Flag a post as offensive'), } def _get_default_faq(self, cr, uid, context=None): fname = openerp.modules.get_module_resource('website_forum', 'data', 'forum_default_faq.html') with open(fname, 'r') as f: return f.read() return False _defaults = { 'description': 'This community is for professionals and enthusiasts of our products and services.', 'faq': _get_default_faq, 'karma_gen_question_new': 0, # set to null for anti spam protection 'karma_gen_question_upvote': 5, 'karma_gen_question_downvote': -2, 'karma_gen_answer_upvote': 10, 'karma_gen_answer_downvote': -2, 'karma_gen_answer_accept': 2, 'karma_gen_answer_accepted': 15, 'karma_gen_answer_flagged': -100, 'karma_ask': 3, # set to not null for anti spam protection 'karma_answer': 3, # set to not null for anti spam protection 'karma_edit_own': 1, 'karma_edit_all': 300, 'karma_close_own': 100, 'karma_close_all': 500, 'karma_unlink_own': 500, 'karma_unlink_all': 1000, 'karma_upvote': 5, 'karma_downvote': 50, 'karma_answer_accept_own': 20, 'karma_answer_accept_all': 500, 'karma_editor_link_files': 20, 'karma_editor_clickable_link': 20, 'karma_comment_own': 3, 'karma_comment_all': 5, 'karma_comment_convert_own': 50, 'karma_comment_convert_all': 500, 'karma_comment_unlink_own': 50, 'karma_comment_unlink_all': 500, 'karma_retag': 75, 'karma_flag': 500, } def create(self, cr, uid, values, context=None): if context is None: context = {} create_context = dict(context, mail_create_nolog=True) return super(Forum, self).create(cr, uid, values, context=create_context) class Post(osv.Model): _name = 'forum.post' _description = 'Forum Post' _inherit = ['mail.thread', 'website.seo.metadata'] _order = "is_correct DESC, vote_count DESC, write_date DESC" def _get_user_vote(self, cr, uid, ids, field_name, arg, context): res = dict.fromkeys(ids, 0) vote_ids = self.pool['forum.post.vote'].search(cr, uid, [('post_id', 'in', ids), ('user_id', '=', uid)], context=context) for vote in self.pool['forum.post.vote'].browse(cr, uid, vote_ids, context=context): res[vote.post_id.id] = vote.vote return res def _get_vote_count(self, cr, uid, ids, field_name, arg, context): res = dict.fromkeys(ids, 0) for post in self.browse(cr, uid, ids, context=context): for vote in post.vote_ids: res[post.id] += int(vote.vote) return res def _get_post_from_vote(self, cr, uid, ids, context=None): result = {} for vote in self.pool['forum.post.vote'].browse(cr, uid, ids, context=context): result[vote.post_id.id] = True return result.keys() def _get_user_favourite(self, cr, uid, ids, field_name, arg, context): res = dict.fromkeys(ids, False) for post in self.browse(cr, uid, ids, context=context): if uid in [f.id for f in post.favourite_ids]: res[post.id] = True return res def _get_favorite_count(self, cr, uid, ids, field_name, arg, context): res = dict.fromkeys(ids, 0) for post in self.browse(cr, uid, ids, context=context): res[post.id] += len(post.favourite_ids) return res def _get_post_from_hierarchy(self, cr, uid, ids, context=None): post_ids = set(ids) for post in self.browse(cr, SUPERUSER_ID, ids, context=context): if post.parent_id: post_ids.add(post.parent_id.id) return list(post_ids) def _get_child_count(self, cr, uid, ids, field_name=False, arg={}, context=None): res = dict.fromkeys(ids, 0) for post in self.browse(cr, uid, ids, context=context): if post.parent_id: res[post.parent_id.id] = len(post.parent_id.child_ids) else: res[post.id] = len(post.child_ids) return res def _get_uid_answered(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, False) for post in self.browse(cr, uid, ids, context=context): res[post.id] = any(answer.create_uid.id == uid for answer in post.child_ids) return res def _get_has_validated_answer(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, False) ans_ids = self.search(cr, uid, [('parent_id', 'in', ids), ('is_correct', '=', True)], context=context) for answer in self.browse(cr, uid, ans_ids, context=context): res[answer.parent_id.id] = True return res def _is_self_reply(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, False) for post in self.browse(cr, uid, ids, context=context): res[post.id] = post.parent_id and post.parent_id.create_uid == post.create_uid or False return res def _get_post_karma_rights(self, cr, uid, ids, field_name, arg, context=None): user = self.pool['res.users'].browse(cr, uid, uid, context=context) res = dict.fromkeys(ids, False) for post in self.browse(cr, uid, ids, context=context): res[post.id] = { 'karma_ask': post.forum_id.karma_ask, 'karma_answer': post.forum_id.karma_answer, 'karma_accept': post.parent_id and post.parent_id.create_uid.id == uid and post.forum_id.karma_answer_accept_own or post.forum_id.karma_answer_accept_all, 'karma_edit': post.create_uid.id == uid and post.forum_id.karma_edit_own or post.forum_id.karma_edit_all, 'karma_close': post.create_uid.id == uid and post.forum_id.karma_close_own or post.forum_id.karma_close_all, 'karma_unlink': post.create_uid.id == uid and post.forum_id.karma_unlink_own or post.forum_id.karma_unlink_all, 'karma_upvote': post.forum_id.karma_upvote, 'karma_downvote': post.forum_id.karma_downvote, 'karma_comment': post.create_uid.id == uid and post.forum_id.karma_comment_own or post.forum_id.karma_comment_all, 'karma_comment_convert': post.create_uid.id == uid and post.forum_id.karma_comment_convert_own or post.forum_id.karma_comment_convert_all, } res[post.id].update({ 'can_ask': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_ask'], 'can_answer': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_answer'], 'can_accept': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_accept'], 'can_edit': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_edit'], 'can_close': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_close'], 'can_unlink': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_unlink'], 'can_upvote': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_upvote'], 'can_downvote': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_downvote'], 'can_comment': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_comment'], 'can_comment_convert': uid == SUPERUSER_ID or user.karma >= res[post.id]['karma_comment_convert'], }) return res _columns = { 'name': fields.char('Title'), 'forum_id': fields.many2one('forum.forum', 'Forum', required=True), 'content': fields.html('Content'), 'tag_ids': fields.many2many('forum.tag', 'forum_tag_rel', 'forum_id', 'forum_tag_id', 'Tags'), 'state': fields.selection([('active', 'Active'), ('close', 'Close'), ('offensive', 'Offensive')], 'Status'), 'views': fields.integer('Number of Views'), 'active': fields.boolean('Active'), 'is_correct': fields.boolean('Valid Answer', help='Correct Answer or Answer on this question accepted.'), 'website_message_ids': fields.one2many( 'mail.message', 'res_id', domain=lambda self: [ '&', ('model', '=', self._name), ('type', 'in', ['email', 'comment']) ], string='Post Messages', help="Comments on forum post", ), # history 'create_date': fields.datetime('Asked on', select=True, readonly=True), 'create_uid': fields.many2one('res.users', 'Created by', select=True, readonly=True), 'write_date': fields.datetime('Update on', select=True, readonly=True), 'write_uid': fields.many2one('res.users', 'Updated by', select=True, readonly=True), # vote fields 'vote_ids': fields.one2many('forum.post.vote', 'post_id', 'Votes'), 'user_vote': fields.function(_get_user_vote, string='My Vote', type='integer'), 'vote_count': fields.function( _get_vote_count, string="Votes", type='integer', store={ 'forum.post': (lambda self, cr, uid, ids, c={}: ids, ['vote_ids'], 10), 'forum.post.vote': (_get_post_from_vote, [], 10), }), # favorite fields 'favourite_ids': fields.many2many('res.users', string='Favourite'), 'user_favourite': fields.function(_get_user_favourite, string="My Favourite", type='boolean'), 'favourite_count': fields.function( _get_favorite_count, string='Favorite Count', type='integer', store={ 'forum.post': (lambda self, cr, uid, ids, c={}: ids, ['favourite_ids'], 10), }), # hierarchy 'parent_id': fields.many2one('forum.post', 'Question', ondelete='cascade'), 'self_reply': fields.function( _is_self_reply, 'Reply to own question', type='boolean', store={ 'forum.post': (lambda self, cr, uid, ids, c={}: ids, ['parent_id', 'create_uid'], 10), }), 'child_ids': fields.one2many('forum.post', 'parent_id', 'Answers'), 'child_count': fields.function( _get_child_count, string="Answers", type='integer', store={ 'forum.post': (_get_post_from_hierarchy, ['parent_id', 'child_ids'], 10), }), 'uid_has_answered': fields.function( _get_uid_answered, string='Has Answered', type='boolean', ), 'has_validated_answer': fields.function( _get_has_validated_answer, string='Has a Validated Answered', type='boolean', store={ 'forum.post': (_get_post_from_hierarchy, ['parent_id', 'child_ids', 'is_correct'], 10), } ), # closing 'closed_reason_id': fields.many2one('forum.post.reason', 'Reason'), 'closed_uid': fields.many2one('res.users', 'Closed by', select=1), 'closed_date': fields.datetime('Closed on', readonly=True), # karma 'karma_ask': fields.function(_get_post_karma_rights, string='Karma to ask', type='integer', multi='_get_post_karma_rights'), 'karma_answer': fields.function(_get_post_karma_rights, string='Karma to answer', type='integer', multi='_get_post_karma_rights'), 'karma_accept': fields.function(_get_post_karma_rights, string='Karma to accept this answer', type='integer', multi='_get_post_karma_rights'), 'karma_edit': fields.function(_get_post_karma_rights, string='Karma to edit', type='integer', multi='_get_post_karma_rights'), 'karma_close': fields.function(_get_post_karma_rights, string='Karma to close', type='integer', multi='_get_post_karma_rights'), 'karma_unlink': fields.function(_get_post_karma_rights, string='Karma to unlink', type='integer', multi='_get_post_karma_rights'), 'karma_upvote': fields.function(_get_post_karma_rights, string='Karma to upvote', type='integer', multi='_get_post_karma_rights'), 'karma_downvote': fields.function(_get_post_karma_rights, string='Karma to downvote', type='integer', multi='_get_post_karma_rights'), 'karma_comment': fields.function(_get_post_karma_rights, string='Karma to comment', type='integer', multi='_get_post_karma_rights'), 'karma_comment_convert': fields.function(_get_post_karma_rights, string='karma to convert as a comment', type='integer', multi='_get_post_karma_rights'), # access rights 'can_ask': fields.function(_get_post_karma_rights, string='Can Ask', type='boolean', multi='_get_post_karma_rights'), 'can_answer': fields.function(_get_post_karma_rights, string='Can Answer', type='boolean', multi='_get_post_karma_rights'), 'can_accept': fields.function(_get_post_karma_rights, string='Can Accept', type='boolean', multi='_get_post_karma_rights'), 'can_edit': fields.function(_get_post_karma_rights, string='Can Edit', type='boolean', multi='_get_post_karma_rights'), 'can_close': fields.function(_get_post_karma_rights, string='Can Close', type='boolean', multi='_get_post_karma_rights'), 'can_unlink': fields.function(_get_post_karma_rights, string='Can Unlink', type='boolean', multi='_get_post_karma_rights'), 'can_upvote': fields.function(_get_post_karma_rights, string='Can Upvote', type='boolean', multi='_get_post_karma_rights'), 'can_downvote': fields.function(_get_post_karma_rights, string='Can Downvote', type='boolean', multi='_get_post_karma_rights'), 'can_comment': fields.function(_get_post_karma_rights, string='Can Comment', type='boolean', multi='_get_post_karma_rights'), 'can_comment_convert': fields.function(_get_post_karma_rights, string='Can Convert to Comment', type='boolean', multi='_get_post_karma_rights'), } _defaults = { 'state': 'active', 'views': 0, 'active': True, 'vote_ids': list(), 'favourite_ids': list(), 'child_ids': list(), } def create(self, cr, uid, vals, context=None): if context is None: context = {} create_context = dict(context, mail_create_nolog=True) post_id = super(Post, self).create(cr, uid, vals, context=create_context) post = self.browse(cr, uid, post_id, context=context) # karma-based access if not post.parent_id and not post.can_ask: raise KarmaError('Not enough karma to create a new question') elif post.parent_id and not post.can_answer: raise KarmaError('Not enough karma to answer to a question') # messaging and chatter base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url') if post.parent_id: body = _( '<p>A new answer for <i>%s</i> has been posted. <a href="%s/forum/%s/question/%s">Click here to access the post.</a></p>' % (post.parent_id.name, base_url, slug(post.parent_id.forum_id), slug(post.parent_id)) ) self.message_post(cr, uid, post.parent_id.id, subject=_('Re: %s') % post.parent_id.name, body=body, subtype='website_forum.mt_answer_new', context=context) else: body = _( '<p>A new question <i>%s</i> has been asked on %s. <a href="%s/forum/%s/question/%s">Click here to access the question.</a></p>' % (post.name, post.forum_id.name, base_url, slug(post.forum_id), slug(post)) ) self.message_post(cr, uid, post_id, subject=post.name, body=body, subtype='website_forum.mt_question_new', context=context) self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [uid], post.forum_id.karma_gen_question_new, context=context) return post_id def write(self, cr, uid, ids, vals, context=None): posts = self.browse(cr, uid, ids, context=context) if 'state' in vals: if vals['state'] in ['active', 'close'] and any(not post.can_close for post in posts): raise KarmaError('Not enough karma to close or reopen a post.') if 'active' in vals: if any(not post.can_unlink for post in posts): raise KarmaError('Not enough karma to delete or reactivate a post') if 'is_correct' in vals: if any(not post.can_accept for post in posts): raise KarmaError('Not enough karma to accept or refuse an answer') # update karma except for self-acceptance mult = 1 if vals['is_correct'] else -1 for post in self.browse(cr, uid, ids, context=context): if vals['is_correct'] != post.is_correct and post.create_uid.id != uid: self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [post.create_uid.id], post.forum_id.karma_gen_answer_accepted * mult, context=context) self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [uid], post.forum_id.karma_gen_answer_accept * mult, context=context) if any(key not in ['state', 'active', 'is_correct', 'closed_uid', 'closed_date', 'closed_reason_id'] for key in vals.keys()) and any(not post.can_edit for post in posts): raise KarmaError('Not enough karma to edit a post.') res = super(Post, self).write(cr, uid, ids, vals, context=context) # if post content modify, notify followers if 'content' in vals or 'name' in vals: for post in posts: if post.parent_id: body, subtype = _('Answer Edited'), 'website_forum.mt_answer_edit' obj_id = post.parent_id.id else: body, subtype = _('Question Edited'), 'website_forum.mt_question_edit' obj_id = post.id self.message_post(cr, uid, obj_id, body=body, subtype=subtype, context=context) return res def reopen(self, cr, uid, ids, context=None): if any(post.parent_id or post.state != 'close' for post in self.browse(cr, uid, ids, context=context)): return False reason_offensive = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'website_forum.reason_7') reason_spam = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'website_forum.reason_8') for post in self.browse(cr, uid, ids, context=context): if post.closed_reason_id.id in (reason_offensive, reason_spam): _logger.info('Upvoting user <%s>, reopening spam/offensive question', post.create_uid) # TODO: in master, consider making this a tunable karma parameter self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [post.create_uid.id], post.forum_id.karma_gen_question_downvote * -5, context=context) self.pool['forum.post'].write(cr, SUPERUSER_ID, ids, {'state': 'active'}, context=context) def close(self, cr, uid, ids, reason_id, context=None): if any(post.parent_id for post in self.browse(cr, uid, ids, context=context)): return False reason_offensive = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'website_forum.reason_7') reason_spam = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'website_forum.reason_8') if reason_id in (reason_offensive, reason_spam): for post in self.browse(cr, uid, ids, context=context): _logger.info('Downvoting user <%s> for posting spam/offensive contents', post.create_uid) # TODO: in master, consider making this a tunable karma parameter self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [post.create_uid.id], post.forum_id.karma_gen_question_downvote * 5, context=context) self.pool['forum.post'].write(cr, uid, ids, { 'state': 'close', 'closed_uid': uid, 'closed_date': datetime.today().strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT), 'closed_reason_id': reason_id, }, context=context) def unlink(self, cr, uid, ids, context=None): posts = self.browse(cr, uid, ids, context=context) if any(not post.can_unlink for post in posts): raise KarmaError('Not enough karma to unlink a post') # if unlinking an answer with accepted answer: remove provided karma for post in posts: if post.is_correct: self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [post.create_uid.id], post.forum_id.karma_gen_answer_accepted * -1, context=context) self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [uid], post.forum_id.karma_gen_answer_accept * -1, context=context) return super(Post, self).unlink(cr, uid, ids, context=context) def vote(self, cr, uid, ids, upvote=True, context=None): Vote = self.pool['forum.post.vote'] vote_ids = Vote.search(cr, uid, [('post_id', 'in', ids), ('user_id', '=', uid)], context=context) new_vote = '1' if upvote else '-1' voted_forum_ids = set() if vote_ids: for vote in Vote.browse(cr, uid, vote_ids, context=context): if upvote: new_vote = '0' if vote.vote == '-1' else '1' else: new_vote = '0' if vote.vote == '1' else '-1' Vote.write(cr, uid, vote_ids, {'vote': new_vote}, context=context) voted_forum_ids.add(vote.post_id.id) for post_id in set(ids) - voted_forum_ids: for post_id in ids: Vote.create(cr, uid, {'post_id': post_id, 'vote': new_vote}, context=context) return {'vote_count': self._get_vote_count(cr, uid, ids, None, None, context=context)[ids[0]], 'user_vote': new_vote} def convert_answer_to_comment(self, cr, uid, id, context=None): """ Tools to convert an answer (forum.post) to a comment (mail.message). The original post is unlinked and a new comment is posted on the question using the post create_uid as the comment's author. """ post = self.browse(cr, SUPERUSER_ID, id, context=context) if not post.parent_id: return False # karma-based action check: use the post field that computed own/all value if not post.can_comment_convert: raise KarmaError('Not enough karma to convert an answer to a comment') # post the message question = post.parent_id values = { 'author_id': post.create_uid.partner_id.id, 'body': html2plaintext(post.content), 'type': 'comment', 'subtype': 'mail.mt_comment', 'date': post.create_date, } message_id = self.pool['forum.post'].message_post( cr, uid, question.id, context=dict(context, mail_create_nosubcribe=True), **values) # unlink the original answer, using SUPERUSER_ID to avoid karma issues self.pool['forum.post'].unlink(cr, SUPERUSER_ID, [post.id], context=context) return message_id def convert_comment_to_answer(self, cr, uid, message_id, default=None, context=None): """ Tool to convert a comment (mail.message) into an answer (forum.post). The original comment is unlinked and a new answer from the comment's author is created. Nothing is done if the comment's author already answered the question. """ comment = self.pool['mail.message'].browse(cr, SUPERUSER_ID, message_id, context=context) post = self.pool['forum.post'].browse(cr, uid, comment.res_id, context=context) user = self.pool['res.users'].browse(cr, uid, uid, context=context) if not comment.author_id or not comment.author_id.user_ids: # only comment posted by users can be converted return False # karma-based action check: must check the message's author to know if own / all karma_convert = comment.author_id.id == user.partner_id.id and post.forum_id.karma_comment_convert_own or post.forum_id.karma_comment_convert_all can_convert = uid == SUPERUSER_ID or user.karma >= karma_convert if not can_convert: raise KarmaError('Not enough karma to convert a comment to an answer') # check the message's author has not already an answer question = post.parent_id if post.parent_id else post post_create_uid = comment.author_id.user_ids[0] if any(answer.create_uid.id == post_create_uid.id for answer in question.child_ids): return False # create the new post post_values = { 'forum_id': question.forum_id.id, 'content': comment.body, 'parent_id': question.id, } # done with the author user to have create_uid correctly set new_post_id = self.pool['forum.post'].create(cr, post_create_uid.id, post_values, context=context) # delete comment self.pool['mail.message'].unlink(cr, SUPERUSER_ID, [comment.id], context=context) return new_post_id def unlink_comment(self, cr, uid, id, message_id, context=None): comment = self.pool['mail.message'].browse(cr, SUPERUSER_ID, message_id, context=context) post = self.pool['forum.post'].browse(cr, uid, id, context=context) user = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context) if not comment.model == 'forum.post' or not comment.res_id == id: return False # karma-based action check: must check the message's author to know if own or all karma_unlink = comment.author_id.id == user.partner_id.id and post.forum_id.karma_comment_unlink_own or post.forum_id.karma_comment_unlink_all can_unlink = uid == SUPERUSER_ID or user.karma >= karma_unlink if not can_unlink: raise KarmaError('Not enough karma to unlink a comment') return self.pool['mail.message'].unlink(cr, SUPERUSER_ID, [message_id], context=context) def set_viewed(self, cr, uid, ids, context=None): cr.execute("""UPDATE forum_post SET views = views+1 WHERE id IN %s""", (tuple(ids),)) return True def _get_access_link(self, cr, uid, mail, partner, context=None): post = self.pool['forum.post'].browse(cr, uid, mail.res_id, context=context) res_id = post.parent_id and "%s#answer-%s" % (post.parent_id.id, post.id) or post.id return "/forum/%s/question/%s" % (post.forum_id.id, res_id) @api.cr_uid_ids_context def message_post(self, cr, uid, thread_id, type='notification', subtype=None, context=None, **kwargs): if thread_id and type == 'comment': # user comments have a restriction on karma if isinstance(thread_id, (list, tuple)): post_id = thread_id[0] else: post_id = thread_id post = self.browse(cr, uid, post_id, context=context) if not post.can_comment: raise KarmaError('Not enough karma to comment') return super(Post, self).message_post(cr, uid, thread_id, type=type, subtype=subtype, context=context, **kwargs) class PostReason(osv.Model): _name = "forum.post.reason" _description = "Post Closing Reason" _order = 'name' _columns = { 'name': fields.char('Post Reason', required=True, translate=True), } class Vote(osv.Model): _name = 'forum.post.vote' _description = 'Vote' _columns = { 'post_id': fields.many2one('forum.post', 'Post', ondelete='cascade', required=True), 'user_id': fields.many2one('res.users', 'User', required=True), 'vote': fields.selection([('1', '1'), ('-1', '-1'), ('0', '0')], 'Vote', required=True), 'create_date': fields.datetime('Create Date', select=True, readonly=True), # TODO master: store these two 'forum_id': fields.related('post_id', 'forum_id', type='many2one', relation='forum.forum', string='Forum'), 'recipient_id': fields.related('post_id', 'create_uid', type='many2one', relation='res.users', string='To', help="The user receiving the vote"), } _defaults = { 'user_id': lambda self, cr, uid, ctx: uid, 'vote': lambda *args: '1', } def _get_karma_value(self, old_vote, new_vote, up_karma, down_karma): _karma_upd = { '-1': {'-1': 0, '0': -1 * down_karma, '1': -1 * down_karma + up_karma}, '0': {'-1': 1 * down_karma, '0': 0, '1': up_karma}, '1': {'-1': -1 * up_karma + down_karma, '0': -1 * up_karma, '1': 0} } return _karma_upd[old_vote][new_vote] def create(self, cr, uid, vals, context=None): vote_id = super(Vote, self).create(cr, uid, vals, context=context) vote = self.browse(cr, uid, vote_id, context=context) # own post check if vote.user_id.id == vote.post_id.create_uid.id: raise Warning('Not allowed to vote for its own post') # karma check if vote.vote == '1' and not vote.post_id.can_upvote: raise KarmaError('Not enough karma to upvote.') elif vote.vote == '-1' and not vote.post_id.can_downvote: raise KarmaError('Not enough karma to downvote.') # karma update if vote.post_id.parent_id: karma_value = self._get_karma_value('0', vote.vote, vote.forum_id.karma_gen_answer_upvote, vote.forum_id.karma_gen_answer_downvote) else: karma_value = self._get_karma_value('0', vote.vote, vote.forum_id.karma_gen_question_upvote, vote.forum_id.karma_gen_question_downvote) self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [vote.recipient_id.id], karma_value, context=context) return vote_id def write(self, cr, uid, ids, values, context=None): if 'vote' in values: for vote in self.browse(cr, uid, ids, context=context): # own post check if vote.user_id.id == vote.post_id.create_uid.id: raise Warning('Not allowed to vote for its own post') # karma check if (values['vote'] == '1' or vote.vote == '-1' and values['vote'] == '0') and not vote.post_id.can_upvote: raise KarmaError('Not enough karma to upvote.') elif (values['vote'] == '-1' or vote.vote == '1' and values['vote'] == '0') and not vote.post_id.can_downvote: raise KarmaError('Not enough karma to downvote.') # karma update if vote.post_id.parent_id: karma_value = self._get_karma_value(vote.vote, values['vote'], vote.forum_id.karma_gen_answer_upvote, vote.forum_id.karma_gen_answer_downvote) else: karma_value = self._get_karma_value(vote.vote, values['vote'], vote.forum_id.karma_gen_question_upvote, vote.forum_id.karma_gen_question_downvote) self.pool['res.users'].add_karma(cr, SUPERUSER_ID, [vote.recipient_id.id], karma_value, context=context) res = super(Vote, self).write(cr, uid, ids, values, context=context) return res class Tags(osv.Model): _name = "forum.tag" _description = "Tag" _inherit = ['website.seo.metadata'] def _get_posts_count(self, cr, uid, ids, field_name, arg, context=None): return dict((tag_id, self.pool['forum.post'].search_count(cr, uid, [('tag_ids', 'in', tag_id)], context=context)) for tag_id in ids) def _get_tag_from_post(self, cr, uid, ids, context=None): return list(set( [tag.id for post in self.pool['forum.post'].browse(cr, SUPERUSER_ID, ids, context=context) for tag in post.tag_ids] )) _columns = { 'name': fields.char('Name', required=True), 'forum_id': fields.many2one('forum.forum', 'Forum', required=True), 'post_ids': fields.many2many('forum.post', 'forum_tag_rel', 'tag_id', 'post_id', 'Posts'), 'posts_count': fields.function( _get_posts_count, type='integer', string="Number of Posts", store={ 'forum.post': (_get_tag_from_post, ['tag_ids'], 10), } ), 'create_uid': fields.many2one('res.users', 'Created by', readonly=True), }
from __future__ import absolute_import from django.core.management import call_command from teams.models import Team, TeamMember, Workflow from widget.rpc import Rpc def refresh_obj(m): return m.__class__._default_manager.get(pk=m.pk) def reset_solr(): # cause the default site to load from haystack import backend sb = backend.SearchBackend() sb.clear() call_command('update_index') rpc = Rpc()
""" Created on Tue Sep 22 09:47:41 2015 @author: thomas.douenne """ from __future__ import division import statsmodels.formula.api as smf from openfisca_france_indirect_taxation.examples.utils_example import simulate_df_calee_by_grosposte if __name__ == '__main__': import logging log = logging.getLogger(__name__) import sys logging.basicConfig(level = logging.INFO, stream = sys.stdout) simulated_variables = [ 'pondmen', 'revtot', 'rev_disp_loyerimput', 'depenses_carburants', 'depenses_essence', 'depenses_diesel', 'strate', 'nenfants', 'nadultes', 'situacj', 'situapr', 'niveau_vie_decile' ] for year in [2005]: data_for_reg = simulate_df_calee_by_grosposte(simulated_variables = simulated_variables, year = year) # In 2005 3 people consume fuel while their rev_disp_loyerimput is 0. Creates inf number in part_carburants data_for_reg = data_for_reg[data_for_reg['rev_disp_loyerimput'] > 0] data_for_reg['rev_disp_loyerimput_2'] = data_for_reg['rev_disp_loyerimput'] ** 2 data_for_reg['part_carburants'] = data_for_reg['depenses_carburants'] / data_for_reg['rev_disp_loyerimput'] data_for_reg['part_diesel'] = data_for_reg['depenses_diesel'] / data_for_reg['rev_disp_loyerimput'] data_for_reg['part_essence'] = data_for_reg['depenses_essence'] / data_for_reg['rev_disp_loyerimput'] data_for_reg['rural'] = 0 data_for_reg['petite_villes'] = 0 data_for_reg['villes_moyennes'] = 0 data_for_reg['grandes_villes'] = 0 data_for_reg['agglo_paris'] = 0 data_for_reg.loc[data_for_reg['strate'] == 0, 'rural'] = 1 data_for_reg.loc[data_for_reg['strate'] == 1, 'petite_villes'] = 1 data_for_reg.loc[data_for_reg['strate'] == 2, 'villes_moyennes'] = 1 data_for_reg.loc[data_for_reg['strate'] == 3, 'grandes_villes'] = 1 data_for_reg.loc[data_for_reg['strate'] == 4, 'agglo_paris'] = 1 deciles = ['decile_1', 'decile_2', 'decile_3', 'decile_4', 'decile_5', 'decile_6', 'decile_7', 'decile_8', 'decile_9', 'decile_10'] for decile in deciles: data_for_reg[decile] = 0 number = decile.replace('decile_', '') data_for_reg.loc[data_for_reg['niveau_vie_decile'] == int(number), decile] = 1 # Situation vis-à-vis de l'emploi : # Travaille : emploi, stage, étudiant # Autres : chômeurs, retraités, personnes au foyer, autres data_for_reg['cj_travaille'] = 0 data_for_reg['pr_travaille'] = 0 data_for_reg.loc[data_for_reg['situacj'] < 4, 'cj_travaille'] = 1 data_for_reg.loc[data_for_reg['situacj'] == 0, 'cj_travaille'] = 0 data_for_reg.loc[data_for_reg['situapr'] < 4, 'pr_travaille'] = 1 data_for_reg['travaille'] = data_for_reg['cj_travaille'] + data_for_reg['pr_travaille'] regression_carburants = smf.ols(formula = 'part_carburants ~ \ decile_1 + decile_2 + decile_3 + decile_4 + decile_5 + decile_6 + decile_7 + decile_8 + decile_9 + \ rural + petite_villes + grandes_villes + agglo_paris + \ nenfants + nadultes + travaille', data = data_for_reg).fit() print regression_carburants.summary() regression_diesel = smf.ols(formula = 'part_diesel ~ \ decile_1 + decile_2 + decile_3 + decile_4 + decile_5 + decile_6 + decile_7 + decile_8 + decile_9 + \ rural + petite_villes + grandes_villes + agglo_paris + \ nenfants + nadultes + travaille', data = data_for_reg).fit() print regression_diesel.summary() regression_essence = smf.ols(formula = 'part_essence ~ \ decile_1 + decile_2 + decile_3 + decile_4 + decile_5 + decile_6 + decile_7 + decile_8 + decile_9 + \ rural + petite_villes + grandes_villes + agglo_paris + \ nenfants + nadultes + travaille', data = data_for_reg).fit() print regression_essence.summary()