repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
jcpowermac/ansible | lib/ansible/modules/cloud/amazon/route53.py | 20 | 24080 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: route53
version_added: "1.3"
short_description: add or delete entries in Amazons Route53 DNS service
description:
- Creates and deletes DNS records in Amazons Route53 service
options:
state:
description:
- Specifies the state of the resource record. As of Ansible 2.4, the I(command) option has been changed
to I(state) as default and the choices 'present' and 'absent' have been added, but I(command) still works as well.
required: true
aliases: [ 'command' ]
choices: [ 'present', 'absent', 'get', 'create', 'delete' ]
zone:
description:
- The DNS zone to modify
required: true
hosted_zone_id:
description:
- The Hosted Zone ID of the DNS zone to modify
required: false
version_added: "2.0"
default: null
record:
description:
- The full DNS record to create or delete
required: true
ttl:
description:
- The TTL to give the new record
required: false
default: 3600 (one hour)
type:
description:
- The type of DNS record to create
required: true
choices: [ 'A', 'CNAME', 'MX', 'AAAA', 'TXT', 'PTR', 'SRV', 'SPF', 'CAA', 'NS', 'SOA' ]
alias:
description:
- Indicates if this is an alias record.
required: false
version_added: "1.9"
default: False
choices: [ 'True', 'False' ]
alias_hosted_zone_id:
description:
- The hosted zone identifier.
required: false
version_added: "1.9"
default: null
alias_evaluate_target_health:
description:
- Whether or not to evaluate an alias target health. Useful for aliases to Elastic Load Balancers.
required: false
version_added: "2.1"
default: false
value:
description:
- The new value when creating a DNS record. YAML lists or multiple comma-spaced values are allowed for non-alias records.
- When deleting a record all values for the record must be specified or Route53 will not delete it.
required: false
default: null
overwrite:
description:
- Whether an existing record should be overwritten on create if values do not match
required: false
default: null
retry_interval:
description:
- In the case that route53 is still servicing a prior request, this module will wait and try again after this many seconds. If you have many
domain names, the default of 500 seconds may be too long.
required: false
default: 500
private_zone:
description:
- If set to true, the private zone matching the requested name within the domain will be used if there are both public and private zones.
The default is to use the public zone.
required: false
default: false
version_added: "1.9"
identifier:
description:
- Have to be specified for Weighted, latency-based and failover resource record sets only. An identifier
that differentiates among multiple resource record sets that have the
same combination of DNS name and type.
required: false
default: null
version_added: "2.0"
weight:
description:
- Weighted resource record sets only. Among resource record sets that
have the same combination of DNS name and type, a value that
determines what portion of traffic for the current resource record set
is routed to the associated location.
required: false
default: null
version_added: "2.0"
region:
description:
- Latency-based resource record sets only Among resource record sets
that have the same combination of DNS name and type, a value that
determines which region this should be associated with for the
latency-based routing
required: false
default: null
version_added: "2.0"
health_check:
description:
- Health check to associate with this record
required: false
default: null
version_added: "2.0"
failover:
description:
- Failover resource record sets only. Whether this is the primary or
secondary resource record set. Allowed values are PRIMARY and SECONDARY
required: false
default: null
version_added: "2.0"
vpc_id:
description:
- "When used in conjunction with private_zone: true, this will only modify records in the private hosted zone attached to this VPC."
- This allows you to have multiple private hosted zones, all with the same name, attached to different VPCs.
required: false
default: null
version_added: "2.0"
wait:
description:
- Wait until the changes have been replicated to all Amazon Route 53 DNS servers.
required: false
default: no
version_added: "2.1"
wait_timeout:
description:
- How long to wait for the changes to be replicated, in seconds.
required: false
default: 300
version_added: "2.1"
author:
- "Bruce Pennypacker (@bpennypacker)"
- "Mike Buzzetti <mike.buzzetti@gmail.com>"
extends_documentation_fragment: aws
'''
RETURN = '''
nameservers:
description: nameservers associated with the zone
returned: when state is 'get'
type: list
sample:
- ns-1036.awsdns-00.org.
- ns-516.awsdns-00.net.
- ns-1504.awsdns-00.co.uk.
- ns-1.awsdns-00.com.
set:
description: info specific to the resource record
returned: when state is 'get'
type: complex
contains:
alias:
description: whether this is an alias
returned: always
type: bool
sample: false
failover:
description: ""
returned: always
type: NoneType
sample: null
health_check:
description: health_check associated with this record
returned: always
type: NoneType
sample: null
identifier:
description: ""
returned: always
type: NoneType
sample: null
record:
description: domain name for the record set
returned: always
type: string
sample: new.foo.com.
region:
description: ""
returned: always
type:
sample:
ttl:
description: resource record cache TTL
returned: always
type: string
sample: '3600'
type:
description: record set type
returned: always
type: string
sample: A
value:
description: value
returned: always
type: string
sample: 52.43.18.27
values:
description: values
returned: always
type: list
sample:
- 52.43.18.27
weight:
description: weight of the record
returned: always
type: string
sample: '3'
zone:
description: zone this record set belongs to
returned: always
type: string
sample: foo.bar.com.
'''
EXAMPLES = '''
# Add new.foo.com as an A record with 3 IPs and wait until the changes have been replicated
- route53:
state: present
zone: foo.com
record: new.foo.com
type: A
ttl: 7200
value: 1.1.1.1,2.2.2.2,3.3.3.3
wait: yes
# Update new.foo.com as an A record with a list of 3 IPs and wait until the changes have been replicated
- route53:
state: present
zone: foo.com
record: new.foo.com
type: A
ttl: 7200
value:
- 1.1.1.1
- 2.2.2.2
- 3.3.3.3
wait: yes
# Retrieve the details for new.foo.com
- route53:
state: get
zone: foo.com
record: new.foo.com
type: A
register: rec
# Delete new.foo.com A record using the results from the get command
- route53:
state: absent
zone: foo.com
record: "{{ rec.set.record }}"
ttl: "{{ rec.set.ttl }}"
type: "{{ rec.set.type }}"
value: "{{ rec.set.value }}"
# Add an AAAA record. Note that because there are colons in the value
# that the IPv6 address must be quoted. Also shows using the old form command=create.
- route53:
command: create
zone: foo.com
record: localhost.foo.com
type: AAAA
ttl: 7200
value: "::1"
# Add a SRV record with multiple fields for a service on port 22222
# For more information on SRV records see:
# https://en.wikipedia.org/wiki/SRV_record
- route53:
state: present
zone: foo.com
record: "_example-service._tcp.foo.com"
type: SRV
value: "0 0 22222 host1.foo.com,0 0 22222 host2.foo.com"
# Add a TXT record. Note that TXT and SPF records must be surrounded
# by quotes when sent to Route 53:
- route53:
state: present
zone: foo.com
record: localhost.foo.com
type: TXT
ttl: 7200
value: '"bar"'
# Add an alias record that points to an Amazon ELB:
- route53:
state: present
zone: foo.com
record: elb.foo.com
type: A
value: "{{ elb_dns_name }}"
alias: True
alias_hosted_zone_id: "{{ elb_zone_id }}"
# Retrieve the details for elb.foo.com
- route53:
state: get
zone: foo.com
record: elb.foo.com
type: A
register: rec
# Delete an alias record using the results from the get command
- route53:
state: absent
zone: foo.com
record: "{{ rec.set.record }}"
ttl: "{{ rec.set.ttl }}"
type: "{{ rec.set.type }}"
value: "{{ rec.set.value }}"
alias: True
alias_hosted_zone_id: "{{ rec.set.alias_hosted_zone_id }}"
# Add an alias record that points to an Amazon ELB and evaluates it health:
- route53:
state: present
zone: foo.com
record: elb.foo.com
type: A
value: "{{ elb_dns_name }}"
alias: True
alias_hosted_zone_id: "{{ elb_zone_id }}"
alias_evaluate_target_health: True
# Add an AAAA record with Hosted Zone ID.
- route53:
state: present
zone: foo.com
hosted_zone_id: Z2AABBCCDDEEFF
record: localhost.foo.com
type: AAAA
ttl: 7200
value: "::1"
# Use a routing policy to distribute traffic:
- route53:
state: present
zone: foo.com
record: www.foo.com
type: CNAME
value: host1.foo.com
ttl: 30
# Routing policy
identifier: "host1@www"
weight: 100
health_check: "d994b780-3150-49fd-9205-356abdd42e75"
# Add a CAA record (RFC 6844):
- route53:
state: present
zone: example.com
record: example.com
type: CAA
value:
- 0 issue "ca.example.net"
- 0 issuewild ";"
- 0 iodef "mailto:security@example.com"
'''
import time
import distutils.version
try:
import boto
import boto.ec2
from boto import route53
from boto.route53 import Route53Connection
from boto.route53.record import Record, ResourceRecordSets
from boto.route53.status import Status
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info
MINIMUM_BOTO_VERSION = '2.28.0'
WAIT_RETRY_SLEEP = 5 # how many seconds to wait between propagation status polls
class TimeoutError(Exception):
pass
def get_zone_by_name(conn, module, zone_name, want_private, zone_id, want_vpc_id):
"""Finds a zone by name or zone_id"""
for zone in invoke_with_throttling_retries(conn.get_zones):
# only save this zone id if the private status of the zone matches
# the private_zone_in boolean specified in the params
private_zone = module.boolean(zone.config.get('PrivateZone', False))
if private_zone == want_private and ((zone.name == zone_name and zone_id is None) or zone.id.replace('/hostedzone/', '') == zone_id):
if want_vpc_id:
# NOTE: These details aren't available in other boto methods, hence the necessary
# extra API call
hosted_zone = invoke_with_throttling_retries(conn.get_hosted_zone, zone.id)
zone_details = hosted_zone['GetHostedZoneResponse']
# this is to deal with this boto bug: https://github.com/boto/boto/pull/2882
if isinstance(zone_details['VPCs'], dict):
if zone_details['VPCs']['VPC']['VPCId'] == want_vpc_id:
return zone
else: # Forward compatibility for when boto fixes that bug
if want_vpc_id in [v['VPCId'] for v in zone_details['VPCs']]:
return zone
else:
return zone
return None
def commit(changes, retry_interval, wait, wait_timeout):
"""Commit changes, but retry PriorRequestNotComplete errors."""
result = None
retry = 10
while True:
try:
retry -= 1
result = changes.commit()
break
except boto.route53.exception.DNSServerError as e:
code = e.body.split("<Code>")[1]
code = code.split("</Code>")[0]
if code != 'PriorRequestNotComplete' or retry < 0:
raise e
time.sleep(float(retry_interval))
if wait:
timeout_time = time.time() + wait_timeout
connection = changes.connection
change = result['ChangeResourceRecordSetsResponse']['ChangeInfo']
status = Status(connection, change)
while status.status != 'INSYNC' and time.time() < timeout_time:
time.sleep(WAIT_RETRY_SLEEP)
status.update()
if time.time() >= timeout_time:
raise TimeoutError()
return result
# Shamelessly copied over from https://git.io/vgmDG
IGNORE_CODE = 'Throttling'
MAX_RETRIES = 5
def invoke_with_throttling_retries(function_ref, *argv, **kwargs):
retries = 0
while True:
try:
retval = function_ref(*argv, **kwargs)
return retval
except boto.exception.BotoServerError as e:
if e.code != IGNORE_CODE or retries == MAX_RETRIES:
raise e
time.sleep(5 * (2**retries))
retries += 1
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(aliases=['command'], choices=['present', 'absent', 'get', 'create', 'delete'], required=True),
zone=dict(required=True),
hosted_zone_id=dict(required=False, default=None),
record=dict(required=True),
ttl=dict(required=False, type='int', default=3600),
type=dict(choices=['A', 'CNAME', 'MX', 'AAAA', 'TXT', 'PTR', 'SRV', 'SPF', 'CAA', 'NS', 'SOA'], required=True),
alias=dict(required=False, type='bool'),
alias_hosted_zone_id=dict(required=False),
alias_evaluate_target_health=dict(required=False, type='bool', default=False),
value=dict(required=False, type='list'),
overwrite=dict(required=False, type='bool'),
retry_interval=dict(required=False, default=500),
private_zone=dict(required=False, type='bool', default=False),
identifier=dict(required=False, default=None),
weight=dict(required=False, type='int'),
region=dict(required=False),
health_check=dict(required=False),
failover=dict(required=False, choices=['PRIMARY', 'SECONDARY']),
vpc_id=dict(required=False),
wait=dict(required=False, type='bool', default=False),
wait_timeout=dict(required=False, type='int', default=300),
))
# state=present, absent, create, delete THEN value is required
required_if = [('state', 'present', ['value']), ('state', 'create', ['value'])]
required_if.extend([('state', 'absent', ['value']), ('state', 'delete', ['value'])])
# If alias is True then you must specify alias_hosted_zone as well
required_together = [['alias', 'alias_hosted_zone_id']]
# failover, region, and weight are mutually exclusive
mutually_exclusive = [('failover', 'region', 'weight')]
module = AnsibleModule(argument_spec=argument_spec, required_together=required_together, required_if=required_if,
mutually_exclusive=mutually_exclusive)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
if distutils.version.StrictVersion(boto.__version__) < distutils.version.StrictVersion(MINIMUM_BOTO_VERSION):
module.fail_json(msg='Found boto in version %s, but >= %s is required' % (boto.__version__, MINIMUM_BOTO_VERSION))
if module.params['state'] in ('present', 'create'):
command_in = 'create'
elif module.params['state'] in ('absent', 'delete'):
command_in = 'delete'
elif module.params['state'] == 'get':
command_in = 'get'
zone_in = module.params.get('zone').lower()
hosted_zone_id_in = module.params.get('hosted_zone_id')
ttl_in = module.params.get('ttl')
record_in = module.params.get('record').lower()
type_in = module.params.get('type')
value_in = module.params.get('value') or []
alias_in = module.params.get('alias')
alias_hosted_zone_id_in = module.params.get('alias_hosted_zone_id')
alias_evaluate_target_health_in = module.params.get('alias_evaluate_target_health')
retry_interval_in = module.params.get('retry_interval')
if module.params['vpc_id'] is not None:
private_zone_in = True
else:
private_zone_in = module.params.get('private_zone')
identifier_in = module.params.get('identifier')
weight_in = module.params.get('weight')
region_in = module.params.get('region')
health_check_in = module.params.get('health_check')
failover_in = module.params.get('failover')
vpc_id_in = module.params.get('vpc_id')
wait_in = module.params.get('wait')
wait_timeout_in = module.params.get('wait_timeout')
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if zone_in[-1:] != '.':
zone_in += "."
if record_in[-1:] != '.':
record_in += "."
if command_in == 'create' or command_in == 'delete':
if alias_in and len(value_in) != 1:
module.fail_json(msg="parameter 'value' must contain a single dns name for alias records")
if (weight_in is not None or region_in is not None or failover_in is not None) and identifier_in is None:
module.fail_json(msg="If you specify failover, region or weight you must also specify identifier")
if (weight_in is None and region_in is None and failover_in is None) and identifier_in is not None:
module.fail_json(msg="You have specified identifier which makes sense only if you specify one of: weight, region or failover.")
# connect to the route53 endpoint
try:
conn = Route53Connection(**aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg=e.error_message)
# Find the named zone ID
zone = get_zone_by_name(conn, module, zone_in, private_zone_in, hosted_zone_id_in, vpc_id_in)
# Verify that the requested zone is already defined in Route53
if zone is None:
errmsg = "Zone %s does not exist in Route53" % zone_in
module.fail_json(msg=errmsg)
record = {}
found_record = False
wanted_rset = Record(name=record_in, type=type_in, ttl=ttl_in,
identifier=identifier_in, weight=weight_in,
region=region_in, health_check=health_check_in,
failover=failover_in)
for v in value_in:
if alias_in:
wanted_rset.set_alias(alias_hosted_zone_id_in, v, alias_evaluate_target_health_in)
else:
wanted_rset.add_value(v)
sets = invoke_with_throttling_retries(conn.get_all_rrsets, zone.id, name=record_in,
type=type_in, identifier=identifier_in)
sets_iter = iter(sets)
while True:
try:
rset = invoke_with_throttling_retries(next, sets_iter)
except StopIteration:
break
# Due to a bug in either AWS or Boto, "special" characters are returned as octals, preventing round
# tripping of things like * and @.
decoded_name = rset.name.replace(r'\052', '*')
decoded_name = decoded_name.replace(r'\100', '@')
# Need to save this changes in rset, because of comparing rset.to_xml() == wanted_rset.to_xml() in next block
rset.name = decoded_name
if identifier_in is not None:
identifier_in = str(identifier_in)
if rset.type == type_in and decoded_name.lower() == record_in.lower() and rset.identifier == identifier_in:
found_record = True
record['zone'] = zone_in
record['type'] = rset.type
record['record'] = decoded_name
record['ttl'] = rset.ttl
record['value'] = ','.join(sorted(rset.resource_records))
record['values'] = sorted(rset.resource_records)
if hosted_zone_id_in:
record['hosted_zone_id'] = hosted_zone_id_in
record['identifier'] = rset.identifier
record['weight'] = rset.weight
record['region'] = rset.region
record['failover'] = rset.failover
record['health_check'] = rset.health_check
if hosted_zone_id_in:
record['hosted_zone_id'] = hosted_zone_id_in
if rset.alias_dns_name:
record['alias'] = True
record['value'] = rset.alias_dns_name
record['values'] = [rset.alias_dns_name]
record['alias_hosted_zone_id'] = rset.alias_hosted_zone_id
record['alias_evaluate_target_health'] = rset.alias_evaluate_target_health
else:
record['alias'] = False
record['value'] = ','.join(sorted(rset.resource_records))
record['values'] = sorted(rset.resource_records)
if command_in == 'create' and rset.to_xml() == wanted_rset.to_xml():
module.exit_json(changed=False)
# We need to look only at the first rrset returned by the above call,
# so break here. The returned elements begin with the one matching our
# requested name, type, and identifier, if such an element exists,
# followed by all others that come after it in alphabetical order.
# Therefore, if the first set does not match, no subsequent set will
# match either.
break
if command_in == 'get':
if type_in == 'NS':
ns = record.get('values', [])
else:
# Retrieve name servers associated to the zone.
z = invoke_with_throttling_retries(conn.get_zone, zone_in)
ns = invoke_with_throttling_retries(z.get_nameservers)
module.exit_json(changed=False, set=record, nameservers=ns)
if command_in == 'delete' and not found_record:
module.exit_json(changed=False)
changes = ResourceRecordSets(conn, zone.id)
if command_in == 'create' or command_in == 'delete':
if command_in == 'create' and found_record:
if not module.params['overwrite']:
module.fail_json(msg="Record already exists with different value. Set 'overwrite' to replace it")
command = 'UPSERT'
else:
command = command_in.upper()
changes.add_change_record(command, wanted_rset)
try:
result = invoke_with_throttling_retries(commit, changes, retry_interval_in, wait_in, wait_timeout_in)
except boto.route53.exception.DNSServerError as e:
txt = e.body.split("<Message>")[1]
txt = txt.split("</Message>")[0]
if "but it already exists" in txt:
module.exit_json(changed=False)
else:
module.fail_json(msg=txt)
except TimeoutError:
module.fail_json(msg='Timeout waiting for changes to replicate')
module.exit_json(changed=True)
if __name__ == '__main__':
main()
| gpl-3.0 |
Comunitea/OCB | addons/account/wizard/account_chart.py | 271 | 5191 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_chart(osv.osv_memory):
"""
For Chart of Accounts
"""
_name = "account.chart"
_description = "Account chart"
_columns = {
'fiscalyear': fields.many2one('account.fiscalyear', \
'Fiscal year', \
help='Keep empty for all open fiscal years'),
'period_from': fields.many2one('account.period', 'Start period'),
'period_to': fields.many2one('account.period', 'End period'),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', required=True),
}
def _get_fiscalyear(self, cr, uid, context=None):
"""Return default Fiscalyear value"""
return self.pool.get('account.fiscalyear').find(cr, uid, context=context)
def onchange_fiscalyear(self, cr, uid, ids, fiscalyear_id=False, context=None):
res = {}
if fiscalyear_id:
start_period = end_period = False
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
ORDER BY p.date_start ASC, p.special DESC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods:
start_period = periods[0]
if len(periods) > 1:
end_period = periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period}
else:
res['value'] = {'period_from': False, 'period_to': False}
return res
def account_chart_open_window(self, cr, uid, ids, context=None):
"""
Opens chart of Accounts
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of account chart’s IDs
@return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries
"""
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
period_obj = self.pool.get('account.period')
fy_obj = self.pool.get('account.fiscalyear')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_tree')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
fiscalyear_id = data.get('fiscalyear', False) and data['fiscalyear'][0] or False
result['periods'] = []
if data['period_from'] and data['period_to']:
period_from = data.get('period_from', False) and data['period_from'][0] or False
period_to = data.get('period_to', False) and data['period_to'][0] or False
result['periods'] = period_obj.build_ctx_periods(cr, uid, period_from, period_to)
result['context'] = str({'fiscalyear': fiscalyear_id, 'periods': result['periods'], \
'state': data['target_move']})
if fiscalyear_id:
result['name'] += ':' + fy_obj.read(cr, uid, [fiscalyear_id], context=context)[0]['code']
return result
_defaults = {
'target_move': 'posted',
'fiscalyear': _get_fiscalyear,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fangxingli/hue | desktop/core/ext-py/pysaml2-2.4.0/src/saml2/response.py | 31 | 38757 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import calendar
import logging
from saml2.samlp import STATUS_VERSION_MISMATCH
from saml2.samlp import STATUS_AUTHN_FAILED
from saml2.samlp import STATUS_INVALID_ATTR_NAME_OR_VALUE
from saml2.samlp import STATUS_INVALID_NAMEID_POLICY
from saml2.samlp import STATUS_NO_AUTHN_CONTEXT
from saml2.samlp import STATUS_NO_AVAILABLE_IDP
from saml2.samlp import STATUS_NO_PASSIVE
from saml2.samlp import STATUS_NO_SUPPORTED_IDP
from saml2.samlp import STATUS_PARTIAL_LOGOUT
from saml2.samlp import STATUS_PROXY_COUNT_EXCEEDED
from saml2.samlp import STATUS_REQUEST_DENIED
from saml2.samlp import STATUS_REQUEST_UNSUPPORTED
from saml2.samlp import STATUS_REQUEST_VERSION_DEPRECATED
from saml2.samlp import STATUS_REQUEST_VERSION_TOO_HIGH
from saml2.samlp import STATUS_REQUEST_VERSION_TOO_LOW
from saml2.samlp import STATUS_RESOURCE_NOT_RECOGNIZED
from saml2.samlp import STATUS_TOO_MANY_RESPONSES
from saml2.samlp import STATUS_UNKNOWN_ATTR_PROFILE
from saml2.samlp import STATUS_UNKNOWN_PRINCIPAL
from saml2.samlp import STATUS_UNSUPPORTED_BINDING
from saml2.samlp import STATUS_RESPONDER
import xmldsig as ds
import xmlenc as xenc
from saml2 import samlp
from saml2 import class_name
from saml2 import saml
from saml2 import extension_elements_to_elements
from saml2 import SAMLError
from saml2 import time_util
from saml2.s_utils import RequestVersionTooLow
from saml2.s_utils import RequestVersionTooHigh
from saml2.saml import attribute_from_string, XSI_TYPE
from saml2.saml import SCM_BEARER
from saml2.saml import SCM_HOLDER_OF_KEY
from saml2.saml import SCM_SENDER_VOUCHES
from saml2.saml import encrypted_attribute_from_string
from saml2.sigver import security_context
from saml2.sigver import SignatureError
from saml2.sigver import signed
from saml2.attribute_converter import to_local
from saml2.time_util import str_to_time, later_than
from saml2.validate import validate_on_or_after
from saml2.validate import validate_before
from saml2.validate import valid_instance
from saml2.validate import valid_address
from saml2.validate import NotValid
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
class IncorrectlySigned(SAMLError):
pass
class DecryptionFailed(SAMLError):
pass
class VerificationError(SAMLError):
pass
class StatusError(SAMLError):
pass
class UnsolicitedResponse(SAMLError):
pass
class StatusVersionMismatch(StatusError):
pass
class StatusAuthnFailed(StatusError):
pass
class StatusInvalidAttrNameOrValue(StatusError):
pass
class StatusInvalidNameidPolicy(StatusError):
pass
class StatusNoAuthnContext(StatusError):
pass
class StatusNoAvailableIdp(StatusError):
pass
class StatusNoPassive(StatusError):
pass
class StatusNoSupportedIdp(StatusError):
pass
class StatusPartialLogout(StatusError):
pass
class StatusProxyCountExceeded(StatusError):
pass
class StatusRequestDenied(StatusError):
pass
class StatusRequestUnsupported(StatusError):
pass
class StatusRequestVersionDeprecated(StatusError):
pass
class StatusRequestVersionTooHigh(StatusError):
pass
class StatusRequestVersionTooLow(StatusError):
pass
class StatusResourceNotRecognized(StatusError):
pass
class StatusTooManyResponses(StatusError):
pass
class StatusUnknownAttrProfile(StatusError):
pass
class StatusUnknownPrincipal(StatusError):
pass
class StatusUnsupportedBinding(StatusError):
pass
class StatusResponder(StatusError):
pass
STATUSCODE2EXCEPTION = {
STATUS_VERSION_MISMATCH: StatusVersionMismatch,
STATUS_AUTHN_FAILED: StatusAuthnFailed,
STATUS_INVALID_ATTR_NAME_OR_VALUE: StatusInvalidAttrNameOrValue,
STATUS_INVALID_NAMEID_POLICY: StatusInvalidNameidPolicy,
STATUS_NO_AUTHN_CONTEXT: StatusNoAuthnContext,
STATUS_NO_AVAILABLE_IDP: StatusNoAvailableIdp,
STATUS_NO_PASSIVE: StatusNoPassive,
STATUS_NO_SUPPORTED_IDP: StatusNoSupportedIdp,
STATUS_PARTIAL_LOGOUT: StatusPartialLogout,
STATUS_PROXY_COUNT_EXCEEDED: StatusProxyCountExceeded,
STATUS_REQUEST_DENIED: StatusRequestDenied,
STATUS_REQUEST_UNSUPPORTED: StatusRequestUnsupported,
STATUS_REQUEST_VERSION_DEPRECATED: StatusRequestVersionDeprecated,
STATUS_REQUEST_VERSION_TOO_HIGH: StatusRequestVersionTooHigh,
STATUS_REQUEST_VERSION_TOO_LOW: StatusRequestVersionTooLow,
STATUS_RESOURCE_NOT_RECOGNIZED: StatusResourceNotRecognized,
STATUS_TOO_MANY_RESPONSES: StatusTooManyResponses,
STATUS_UNKNOWN_ATTR_PROFILE: StatusUnknownAttrProfile,
STATUS_UNKNOWN_PRINCIPAL: StatusUnknownPrincipal,
STATUS_UNSUPPORTED_BINDING: StatusUnsupportedBinding,
STATUS_RESPONDER: StatusResponder,
}
# ---------------------------------------------------------------------------
def _dummy(_):
return None
def for_me(conditions, myself):
""" Am I among the intended audiences """
if not conditions.audience_restriction: # No audience restriction
return True
for restriction in conditions.audience_restriction:
if not restriction.audience:
continue
for audience in restriction.audience:
if audience.text.strip() == myself:
return True
else:
#print "Not for me: %s != %s" % (audience.text.strip(), myself)
pass
return False
def authn_response(conf, return_addrs, outstanding_queries=None, timeslack=0,
asynchop=True, allow_unsolicited=False,
want_assertions_signed=False):
sec = security_context(conf)
if not timeslack:
try:
timeslack = int(conf.accepted_time_diff)
except TypeError:
timeslack = 0
return AuthnResponse(sec, conf.attribute_converters, conf.entityid,
return_addrs, outstanding_queries, timeslack,
asynchop=asynchop, allow_unsolicited=allow_unsolicited,
want_assertions_signed=want_assertions_signed)
# comes in over SOAP so synchronous
def attribute_response(conf, return_addrs, timeslack=0, asynchop=False,
test=False):
sec = security_context(conf)
if not timeslack:
try:
timeslack = int(conf.accepted_time_diff)
except TypeError:
timeslack = 0
return AttributeResponse(sec, conf.attribute_converters, conf.entityid,
return_addrs, timeslack, asynchop=asynchop,
test=test)
class StatusResponse(object):
msgtype = "status_response"
def __init__(self, sec_context, return_addrs=None, timeslack=0,
request_id=0, asynchop=True):
self.sec = sec_context
self.return_addrs = return_addrs
self.timeslack = timeslack
self.request_id = request_id
self.xmlstr = ""
self.origxml = ""
self.name_id = None
self.response = None
self.not_on_or_after = 0
self.in_response_to = None
self.signature_check = self.sec.correctly_signed_response
self.require_signature = False
self.require_response_signature = False
self.not_signed = False
self.asynchop = asynchop
def _clear(self):
self.xmlstr = ""
self.name_id = None
self.response = None
self.not_on_or_after = 0
def _postamble(self):
if not self.response:
logger.error("Response was not correctly signed")
if self.xmlstr:
logger.info(self.xmlstr)
raise IncorrectlySigned()
logger.debug("response: %s" % (self.response,))
try:
valid_instance(self.response)
except NotValid as exc:
logger.error("Not valid response: %s" % exc.args[0])
self._clear()
return self
self.in_response_to = self.response.in_response_to
return self
def load_instance(self, instance):
if signed(instance):
# This will check signature on Assertion which is the default
try:
self.response = self.sec.check_signature(instance)
except SignatureError:
# The response as a whole might be signed or not
self.response = self.sec.check_signature(
instance, samlp.NAMESPACE + ":Response")
else:
self.not_signed = True
self.response = instance
return self._postamble()
def _loads(self, xmldata, decode=True, origxml=None):
# own copy
self.xmlstr = xmldata[:]
logger.debug("xmlstr: %s" % (self.xmlstr,))
if origxml:
self.origxml = origxml
else:
self.origxml = self.xmlstr
try:
self.response = self.signature_check(
xmldata, origdoc=origxml, must=self.require_signature,
require_response_signature=self.require_response_signature)
except TypeError:
raise
except SignatureError:
raise
except Exception as excp:
logger.exception("EXCEPTION: %s", excp)
raise
#print "<", self.response
return self._postamble()
def status_ok(self):
if self.response.status:
status = self.response.status
logger.info("status: %s" % (status,))
if status.status_code.value != samlp.STATUS_SUCCESS:
logger.info("Not successful operation: %s" % status)
if status.status_code.status_code:
excep = STATUSCODE2EXCEPTION[
status.status_code.status_code.value]
else:
excep = StatusError
if status.status_message:
msg = status.status_message.text
else:
try:
msg = status.status_code.status_code.value
except Exception:
msg = "Unknown error"
raise excep(
"%s from %s" % (msg, status.status_code.value,))
return True
def issue_instant_ok(self):
""" Check that the response was issued at a reasonable time """
upper = time_util.shift_time(time_util.time_in_a_while(days=1),
self.timeslack).timetuple()
lower = time_util.shift_time(time_util.time_a_while_ago(days=1),
-self.timeslack).timetuple()
# print "issue_instant: %s" % self.response.issue_instant
# print "%s < x < %s" % (lower, upper)
issued_at = str_to_time(self.response.issue_instant)
return lower < issued_at < upper
def _verify(self):
if self.request_id and self.in_response_to and \
self.in_response_to != self.request_id:
logger.error("Not the id I expected: %s != %s" % (
self.in_response_to, self.request_id))
return None
try:
assert self.response.version == "2.0"
except AssertionError:
_ver = float(self.response.version)
if _ver < 2.0:
raise RequestVersionTooLow()
else:
raise RequestVersionTooHigh()
if self.asynchop:
if self.response.destination and \
self.response.destination not in self.return_addrs:
logger.error("%s not in %s" % (self.response.destination,
self.return_addrs))
return None
assert self.issue_instant_ok()
assert self.status_ok()
return self
def loads(self, xmldata, decode=True, origxml=None):
return self._loads(xmldata, decode, origxml)
def verify(self, key_file=""):
try:
return self._verify()
except AssertionError:
logger.exception("verify")
return None
def update(self, mold):
self.xmlstr = mold.xmlstr
self.in_response_to = mold.in_response_to
self.response = mold.response
def issuer(self):
return self.response.issuer.text.strip()
class LogoutResponse(StatusResponse):
msgtype = "logout_response"
def __init__(self, sec_context, return_addrs=None, timeslack=0,
asynchop=True):
StatusResponse.__init__(self, sec_context, return_addrs, timeslack,
asynchop=asynchop)
self.signature_check = self.sec.correctly_signed_logout_response
class NameIDMappingResponse(StatusResponse):
msgtype = "name_id_mapping_response"
def __init__(self, sec_context, return_addrs=None, timeslack=0,
request_id=0, asynchop=True):
StatusResponse.__init__(self, sec_context, return_addrs, timeslack,
request_id, asynchop)
self.signature_check = self.sec\
.correctly_signed_name_id_mapping_response
class ManageNameIDResponse(StatusResponse):
msgtype = "manage_name_id_response"
def __init__(self, sec_context, return_addrs=None, timeslack=0,
request_id=0, asynchop=True):
StatusResponse.__init__(self, sec_context, return_addrs, timeslack,
request_id, asynchop)
self.signature_check = self.sec.correctly_signed_manage_name_id_response
# ----------------------------------------------------------------------------
class AuthnResponse(StatusResponse):
""" This is where all the profile compliance is checked.
This one does saml2int compliance. """
msgtype = "authn_response"
def __init__(self, sec_context, attribute_converters, entity_id,
return_addrs=None, outstanding_queries=None,
timeslack=0, asynchop=True, allow_unsolicited=False,
test=False, allow_unknown_attributes=False,
want_assertions_signed=False, want_response_signed=False,
**kwargs):
StatusResponse.__init__(self, sec_context, return_addrs, timeslack,
asynchop=asynchop)
self.entity_id = entity_id
self.attribute_converters = attribute_converters
if outstanding_queries:
self.outstanding_queries = outstanding_queries
else:
self.outstanding_queries = {}
self.context = "AuthnReq"
self.came_from = None
self.ava = None
self.assertion = None
self.assertions = []
self.session_not_on_or_after = 0
self.allow_unsolicited = allow_unsolicited
self.require_signature = want_assertions_signed
self.require_response_signature = want_response_signed
self.test = test
self.allow_unknown_attributes = allow_unknown_attributes
#
try:
self.extension_schema = kwargs["extension_schema"]
except KeyError:
self.extension_schema = {}
def check_subject_confirmation_in_response_to(self, irp):
for assertion in self.response.assertion:
for _sc in assertion.subject.subject_confirmation:
try:
assert _sc.subject_confirmation_data.in_response_to == irp
except AssertionError:
return False
return True
def loads(self, xmldata, decode=True, origxml=None):
self._loads(xmldata, decode, origxml)
if self.asynchop:
if self.in_response_to in self.outstanding_queries:
self.came_from = self.outstanding_queries[self.in_response_to]
#del self.outstanding_queries[self.in_response_to]
try:
if not self.check_subject_confirmation_in_response_to(
self.in_response_to):
logger.exception(
"Unsolicited response %s" % self.in_response_to)
raise UnsolicitedResponse(
"Unsolicited response: %s" % self.in_response_to)
except AttributeError:
pass
elif self.allow_unsolicited:
pass
else:
logger.exception(
"Unsolicited response %s" % self.in_response_to)
raise UnsolicitedResponse(
"Unsolicited response: %s" % self.in_response_to)
return self
def clear(self):
self._clear()
self.came_from = None
self.ava = None
self.assertion = None
def authn_statement_ok(self, optional=False):
try:
# the assertion MUST contain one AuthNStatement
assert len(self.assertion.authn_statement) == 1
except AssertionError:
if optional:
return True
else:
logger.error("No AuthnStatement")
raise
authn_statement = self.assertion.authn_statement[0]
if authn_statement.session_not_on_or_after:
if validate_on_or_after(authn_statement.session_not_on_or_after,
self.timeslack):
self.session_not_on_or_after = calendar.timegm(
time_util.str_to_time(
authn_statement.session_not_on_or_after))
else:
return False
return True
# check authn_statement.session_index
def condition_ok(self, lax=False):
if self.test:
lax = True
# The Identity Provider MUST include a <saml:Conditions> element
assert self.assertion.conditions
conditions = self.assertion.conditions
logger.debug("conditions: %s" % conditions)
# if no sub-elements or elements are supplied, then the
# assertion is considered to be valid.
if not conditions.keyswv():
return True
# if both are present NotBefore must be earlier than NotOnOrAfter
if conditions.not_before and conditions.not_on_or_after:
if not later_than(conditions.not_on_or_after,
conditions.not_before):
return False
try:
if conditions.not_on_or_after:
self.not_on_or_after = validate_on_or_after(
conditions.not_on_or_after, self.timeslack)
if conditions.not_before:
validate_before(conditions.not_before, self.timeslack)
except Exception as excp:
logger.error("Exception on conditions: %s" % (excp,))
if not lax:
raise
else:
self.not_on_or_after = 0
if not self.allow_unsolicited:
if not for_me(conditions, self.entity_id):
if not lax:
raise Exception("Not for me!!!")
if conditions.condition: # extra conditions
for cond in conditions.condition:
try:
if cond.extension_attributes[
XSI_TYPE] in self.extension_schema:
pass
else:
raise Exception("Unknown condition")
except KeyError:
raise Exception("Missing xsi:type specification")
return True
def decrypt_attributes(self, attribute_statement):
"""
Decrypts possible encrypted attributes and adds the decrypts to the
list of attributes.
:param attribute_statement: A SAML.AttributeStatement which might
contain both encrypted attributes and attributes.
"""
# _node_name = [
# "urn:oasis:names:tc:SAML:2.0:assertion:EncryptedData",
# "urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAttribute"]
for encattr in attribute_statement.encrypted_attribute:
if not encattr.encrypted_key:
_decr = self.sec.decrypt(encattr.encrypted_data)
_attr = attribute_from_string(_decr)
attribute_statement.attribute.append(_attr)
else:
_decr = self.sec.decrypt(encattr)
enc_attr = encrypted_attribute_from_string(_decr)
attrlist = enc_attr.extensions_as_elements("Attribute", saml)
attribute_statement.attribute.extend(attrlist)
def get_identity(self):
""" The assertion can contain zero or one attributeStatements
"""
if not self.assertion.attribute_statement:
logger.error("Missing Attribute Statement")
ava = {}
else:
assert len(self.assertion.attribute_statement) == 1
_attr_statem = self.assertion.attribute_statement[0]
logger.debug("Attribute Statement: %s" % (_attr_statem,))
for aconv in self.attribute_converters:
logger.debug("Converts name format: %s" % (aconv.name_format,))
self.decrypt_attributes(_attr_statem)
ava = to_local(self.attribute_converters, _attr_statem,
self.allow_unknown_attributes)
return ava
def _bearer_confirmed(self, data):
if not data:
return False
if data.address:
if not valid_address(data.address):
return False
# verify that I got it from the correct sender
# These two will raise exception if untrue
validate_on_or_after(data.not_on_or_after, self.timeslack)
validate_before(data.not_before, self.timeslack)
# not_before must be < not_on_or_after
if not later_than(data.not_on_or_after, data.not_before):
return False
if self.asynchop and self.came_from is None:
if data.in_response_to:
if data.in_response_to in self.outstanding_queries:
self.came_from = self.outstanding_queries[
data.in_response_to]
#del self.outstanding_queries[data.in_response_to]
elif self.allow_unsolicited:
pass
else:
# This is where I don't allow unsolicited reponses
# Either in_response_to == None or has a value I don't
# recognize
logger.debug("in response to: '%s'" % data.in_response_to)
logger.info("outstanding queries: %s" % (
self.outstanding_queries.keys(),))
raise Exception(
"Combination of session id and requestURI I don't "
"recall")
return True
def _holder_of_key_confirmed(self, data):
if not data:
return False
has_keyinfo = False
for element in extension_elements_to_elements(data,
[samlp, saml, xenc, ds]):
if isinstance(element, ds.KeyInfo):
has_keyinfo = True
return has_keyinfo
def get_subject(self):
""" The assertion must contain a Subject
"""
assert self.assertion.subject
subject = self.assertion.subject
subjconf = []
for subject_confirmation in subject.subject_confirmation:
_data = subject_confirmation.subject_confirmation_data
if subject_confirmation.method == SCM_BEARER:
if not self._bearer_confirmed(_data):
continue
elif subject_confirmation.method == SCM_HOLDER_OF_KEY:
if not self._holder_of_key_confirmed(_data):
continue
elif subject_confirmation.method == SCM_SENDER_VOUCHES:
pass
else:
raise ValueError("Unknown subject confirmation method: %s" % (
subject_confirmation.method,))
subjconf.append(subject_confirmation)
if not subjconf:
raise VerificationError("No valid subject confirmation")
subject.subject_confirmation = subjconf
# The subject must contain a name_id
try:
assert subject.name_id
self.name_id = subject.name_id
except AssertionError:
if subject.encrypted_id:
# decrypt encrypted ID
_name_id_str = self.sec.decrypt(
subject.encrypted_id.encrypted_data.to_string())
_name_id = saml.name_id_from_string(_name_id_str)
self.name_id = _name_id
else:
raise VerificationError("Missing NameID")
logger.info("Subject NameID: %s" % self.name_id)
return self.name_id
def _assertion(self, assertion, verified=False):
"""
Check the assertion
:param assertion:
:return: True/False depending on if the assertion is sane or not
"""
if not hasattr(assertion, 'signature') or not assertion.signature:
logger.debug("unsigned")
if self.require_signature:
raise SignatureError("Signature missing for assertion")
else:
logger.debug("signed")
if not verified:
try:
self.sec.check_signature(assertion, class_name(assertion),
self.xmlstr)
except Exception as exc:
logger.error("correctly_signed_response: %s" % exc)
raise
self.assertion = assertion
logger.debug("assertion context: %s" % (self.context,))
logger.debug("assertion keys: %s" % (assertion.keyswv()))
logger.debug("outstanding_queries: %s" % (self.outstanding_queries,))
#if self.context == "AuthnReq" or self.context == "AttrQuery":
if self.context == "AuthnReq":
self.authn_statement_ok()
# elif self.context == "AttrQuery":
# self.authn_statement_ok(True)
if not self.condition_ok():
raise VerificationError("Condition not OK")
logger.debug("--- Getting Identity ---")
if self.context == "AuthnReq" or self.context == "AttrQuery":
self.ava = self.get_identity()
logger.debug("--- AVA: %s" % (self.ava,))
try:
self.get_subject()
if self.asynchop:
if self.allow_unsolicited:
pass
elif self.came_from is None:
raise VerificationError("Came from")
return True
except Exception:
logger.exception("get subject")
raise
def decrypt_assertions(self, encrypted_assertions, decr_txt):
res = []
for encrypted_assertion in encrypted_assertions:
if encrypted_assertion.extension_elements:
assertions = extension_elements_to_elements(
encrypted_assertion.extension_elements, [saml, samlp])
for assertion in assertions:
if assertion.signature:
if not self.sec.check_signature(
assertion, origdoc=decr_txt,
node_name=class_name(assertion)):
logger.error(
"Failed to verify signature on '%s'" % assertion)
raise SignatureError()
res.append(assertion)
return res
def parse_assertion(self, key_file=""):
if self.context == "AuthnQuery":
# can contain one or more assertions
pass
else: # This is a saml2int limitation
try:
assert len(self.response.assertion) == 1 or \
len(self.response.encrypted_assertion) == 1
except AssertionError:
raise Exception("No assertion part")
res = []
if self.response.encrypted_assertion:
logger.debug("***Encrypted assertion/-s***")
decr_text = self.sec.decrypt(self.xmlstr, key_file)
resp = samlp.response_from_string(decr_text)
res = self.decrypt_assertions(resp.encrypted_assertion, decr_text)
if self.response.assertion:
self.response.assertion.extend(res)
else:
self.response.assertion = res
self.response.encrypted_assertion = []
self.xmlstr = decr_text
if self.response.assertion:
logger.debug("***Unencrypted assertion***")
for assertion in self.response.assertion:
if not self._assertion(assertion, assertion in res):
return False
else:
self.assertions.append(assertion)
self.assertion = self.assertions[0]
return True
def verify(self, key_file=""):
""" Verify that the assertion is syntactically correct and
the signature is correct if present.
:param key_file: If not the default key file should be used this is it.
"""
try:
res = self._verify()
except AssertionError as err:
logger.error("Verification error on the response: %s" % err)
raise
else:
if res is None:
return None
if not isinstance(self.response, samlp.Response):
return self
if self.parse_assertion(key_file):
return self
else:
logger.error("Could not parse the assertion")
return None
def session_id(self):
""" Returns the SessionID of the response """
return self.response.in_response_to
def id(self):
""" Return the ID of the response """
return self.response.id
def authn_info(self):
res = []
for astat in self.assertion.authn_statement:
context = astat.authn_context
if context:
try:
aclass = context.authn_context_class_ref.text
except AttributeError:
aclass = ""
try:
authn_auth = [a.text for a in
context.authenticating_authority]
except AttributeError:
authn_auth = []
res.append((aclass, authn_auth))
return res
def authz_decision_info(self):
res = {"permit": [], "deny": [], "indeterminate": []}
for adstat in self.assertion.authz_decision_statement:
# one of 'Permit', 'Deny', 'Indeterminate'
res[adstat.decision.text.lower()] = adstat
return res
def session_info(self):
""" Returns a predefined set of information gleened from the
response.
:returns: Dictionary with information
"""
if self.session_not_on_or_after > 0:
nooa = self.session_not_on_or_after
else:
nooa = self.not_on_or_after
if self.context == "AuthzQuery":
return {"name_id": self.name_id, "came_from": self.came_from,
"issuer": self.issuer(), "not_on_or_after": nooa,
"authz_decision_info": self.authz_decision_info()}
else:
return {"ava": self.ava, "name_id": self.name_id,
"came_from": self.came_from, "issuer": self.issuer(),
"not_on_or_after": nooa, "authn_info": self.authn_info()}
def __str__(self):
return "%s" % self.xmlstr
def verify_attesting_entity(self, address):
"""
Assumes one assertion. At least one address specification has to be
correct.
:param address: IP address of attesting entity
:return: True/False
"""
correct = 0
for subject_conf in self.assertion.subject.subject_confirmation:
if subject_conf.subject_confirmation_data is None:
correct += 1 # In reality undefined
elif subject_conf.subject_confirmation_data.address:
if subject_conf.subject_confirmation_data.address == address:
correct += 1
else:
correct += 1
if correct:
return True
else:
return False
class AuthnQueryResponse(AuthnResponse):
msgtype = "authn_query_response"
def __init__(self, sec_context, attribute_converters, entity_id,
return_addrs=None, timeslack=0, asynchop=False, test=False):
AuthnResponse.__init__(self, sec_context, attribute_converters,
entity_id, return_addrs, timeslack=timeslack,
asynchop=asynchop, test=test)
self.entity_id = entity_id
self.attribute_converters = attribute_converters
self.assertion = None
self.context = "AuthnQuery"
def condition_ok(self, lax=False): # Should I care about conditions ?
return True
class AttributeResponse(AuthnResponse):
msgtype = "attribute_response"
def __init__(self, sec_context, attribute_converters, entity_id,
return_addrs=None, timeslack=0, asynchop=False, test=False):
AuthnResponse.__init__(self, sec_context, attribute_converters,
entity_id, return_addrs, timeslack=timeslack,
asynchop=asynchop, test=test)
self.entity_id = entity_id
self.attribute_converters = attribute_converters
self.assertion = None
self.context = "AttrQuery"
class AuthzResponse(AuthnResponse):
""" A successful response will be in the form of assertions containing
authorization decision statements."""
msgtype = "authz_decision_response"
def __init__(self, sec_context, attribute_converters, entity_id,
return_addrs=None, timeslack=0, asynchop=False):
AuthnResponse.__init__(self, sec_context, attribute_converters,
entity_id, return_addrs, timeslack=timeslack,
asynchop=asynchop)
self.entity_id = entity_id
self.attribute_converters = attribute_converters
self.assertion = None
self.context = "AuthzQuery"
class ArtifactResponse(AuthnResponse):
msgtype = "artifact_response"
def __init__(self, sec_context, attribute_converters, entity_id,
return_addrs=None, timeslack=0, asynchop=False, test=False):
AuthnResponse.__init__(self, sec_context, attribute_converters,
entity_id, return_addrs, timeslack=timeslack,
asynchop=asynchop, test=test)
self.entity_id = entity_id
self.attribute_converters = attribute_converters
self.assertion = None
self.context = "ArtifactResolve"
def response_factory(xmlstr, conf, return_addrs=None, outstanding_queries=None,
timeslack=0, decode=True, request_id=0, origxml=None,
asynchop=True, allow_unsolicited=False,
want_assertions_signed=False):
sec_context = security_context(conf)
if not timeslack:
try:
timeslack = int(conf.accepted_time_diff)
except TypeError:
timeslack = 0
attribute_converters = conf.attribute_converters
entity_id = conf.entityid
extension_schema = conf.extension_schema
response = StatusResponse(sec_context, return_addrs, timeslack, request_id,
asynchop)
try:
response.loads(xmlstr, decode, origxml)
if response.response.assertion or response.response.encrypted_assertion:
authnresp = AuthnResponse(sec_context, attribute_converters,
entity_id, return_addrs,
outstanding_queries, timeslack, asynchop,
allow_unsolicited,
extension_schema=extension_schema,
want_assertions_signed=want_assertions_signed)
authnresp.update(response)
return authnresp
except TypeError:
response.signature_check = sec_context.correctly_signed_logout_response
response.loads(xmlstr, decode, origxml)
logoutresp = LogoutResponse(sec_context, return_addrs, timeslack,
asynchop=asynchop)
logoutresp.update(response)
return logoutresp
return response
# ===========================================================================
# A class of it's own
class AssertionIDResponse(object):
msgtype = "assertion_id_response"
def __init__(self, sec_context, attribute_converters, timeslack=0,
**kwargs):
self.sec = sec_context
self.timeslack = timeslack
self.xmlstr = ""
self.origxml = ""
self.name_id = ""
self.response = None
self.not_signed = False
self.attribute_converters = attribute_converters
self.assertion = None
self.context = "AssertionIdResponse"
self.signature_check = self.sec.correctly_signed_assertion_id_response
def loads(self, xmldata, decode=True, origxml=None):
# own copy
self.xmlstr = xmldata[:]
logger.debug("xmlstr: %s" % (self.xmlstr,))
self.origxml = origxml
try:
self.response = self.signature_check(xmldata, origdoc=origxml)
self.assertion = self.response
except TypeError:
raise
except SignatureError:
raise
except Exception as excp:
logger.exception("EXCEPTION: %s", excp)
raise
#print "<", self.response
return self._postamble()
def verify(self, key_file=""):
try:
valid_instance(self.response)
except NotValid as exc:
logger.error("Not valid response: %s" % exc.args[0])
raise
return self
def _postamble(self):
if not self.response:
logger.error("Response was not correctly signed")
if self.xmlstr:
logger.info(self.xmlstr)
raise IncorrectlySigned()
logger.debug("response: %s" % (self.response,))
return self
| apache-2.0 |
nicolargo/intellij-community | python/lib/Lib/site-packages/django/template/smartif.py | 331 | 6261 | """
Parser and utilities for the smart 'if' tag
"""
import operator
# Using a simple top down parser, as described here:
# http://effbot.org/zone/simple-top-down-parsing.htm.
# 'led' = left denotation
# 'nud' = null denotation
# 'bp' = binding power (left = lbp, right = rbp)
class TokenBase(object):
"""
Base class for operators and literals, mainly for debugging and for throwing
syntax errors.
"""
id = None # node/token type name
value = None # used by literals
first = second = None # used by tree nodes
def nud(self, parser):
# Null denotation - called in prefix context
raise parser.error_class(
"Not expecting '%s' in this position in if tag." % self.id
)
def led(self, left, parser):
# Left denotation - called in infix context
raise parser.error_class(
"Not expecting '%s' as infix operator in if tag." % self.id
)
def display(self):
"""
Returns what to display in error messages for this node
"""
return self.id
def __repr__(self):
out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
return "(" + " ".join(out) + ")"
def infix(bp, func):
"""
Creates an infix operator, given a binding power and a function that
evaluates the node
"""
class Operator(TokenBase):
lbp = bp
def led(self, left, parser):
self.first = left
self.second = parser.expression(bp)
return self
def eval(self, context):
try:
return func(context, self.first, self.second)
except Exception:
# Templates shouldn't throw exceptions when rendering. We are
# most likely to get exceptions for things like {% if foo in bar
# %} where 'bar' does not support 'in', so default to False
return False
return Operator
def prefix(bp, func):
"""
Creates a prefix operator, given a binding power and a function that
evaluates the node.
"""
class Operator(TokenBase):
lbp = bp
def nud(self, parser):
self.first = parser.expression(bp)
self.second = None
return self
def eval(self, context):
try:
return func(context, self.first)
except Exception:
return False
return Operator
# Operator precedence follows Python.
# NB - we can get slightly more accurate syntax error messages by not using the
# same object for '==' and '='.
# We defer variable evaluation to the lambda to ensure that terms are
# lazily evaluated using Python's boolean parsing logic.
OPERATORS = {
'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
'not': prefix(8, lambda context, x: not x.eval(context)),
'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
'=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
'>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
'>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
'<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
'<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
}
# Assign 'id' to each:
for key, op in OPERATORS.items():
op.id = key
class Literal(TokenBase):
"""
A basic self-resolvable object similar to a Django template variable.
"""
# IfParser uses Literal in create_var, but TemplateIfParser overrides
# create_var so that a proper implementation that actually resolves
# variables, filters etc is used.
id = "literal"
lbp = 0
def __init__(self, value):
self.value = value
def display(self):
return repr(self.value)
def nud(self, parser):
return self
def eval(self, context):
return self.value
def __repr__(self):
return "(%s %r)" % (self.id, self.value)
class EndToken(TokenBase):
lbp = 0
def nud(self, parser):
raise parser.error_class("Unexpected end of expression in if tag.")
EndToken = EndToken()
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
# pre-pass necessary to turn 'not','in' into single token
l = len(tokens)
mapped_tokens = []
i = 0
while i < l:
token = tokens[i]
if token == "not" and i + 1 < l and tokens[i+1] == "in":
token = "not in"
i += 1 # skip 'in'
mapped_tokens.append(self.translate_token(token))
i += 1
self.tokens = mapped_tokens
self.pos = 0
self.current_token = self.next()
def translate_token(self, token):
try:
op = OPERATORS[token]
except (KeyError, TypeError):
return self.create_var(token)
else:
return op()
def next(self):
if self.pos >= len(self.tokens):
return EndToken
else:
retval = self.tokens[self.pos]
self.pos += 1
return retval
def parse(self):
retval = self.expression()
# Check that we have exhausted all the tokens
if self.current_token is not EndToken:
raise self.error_class("Unused '%s' at end of if expression." %
self.current_token.display())
return retval
def expression(self, rbp=0):
t = self.current_token
self.current_token = self.next()
left = t.nud(self)
while rbp < self.current_token.lbp:
t = self.current_token
self.current_token = self.next()
left = t.led(left, self)
return left
def create_var(self, value):
return Literal(value)
| apache-2.0 |
hmflash/Cura | cura/CuraActions.py | 4 | 5131 | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from PyQt5.QtCore import QObject, QUrl
from PyQt5.QtGui import QDesktopServices
from UM.FlameProfiler import pyqtSlot
from UM.Event import CallFunctionEvent
from UM.Application import Application
from UM.Math.Vector import Vector
from UM.Scene.Selection import Selection
from UM.Scene.Iterator.BreadthFirstIterator import BreadthFirstIterator
from UM.Operations.GroupedOperation import GroupedOperation
from UM.Operations.RemoveSceneNodeOperation import RemoveSceneNodeOperation
from UM.Operations.SetTransformOperation import SetTransformOperation
from cura.SetParentOperation import SetParentOperation
from cura.MultiplyObjectsJob import MultiplyObjectsJob
from cura.Settings.SetObjectExtruderOperation import SetObjectExtruderOperation
from cura.Settings.ExtruderManager import ExtruderManager
class CuraActions(QObject):
def __init__(self, parent = None):
super().__init__(parent)
@pyqtSlot()
def openDocumentation(self):
# Starting a web browser from a signal handler connected to a menu will crash on windows.
# So instead, defer the call to the next run of the event loop, since that does work.
# Note that weirdly enough, only signal handlers that open a web browser fail like that.
event = CallFunctionEvent(self._openUrl, [QUrl("http://ultimaker.com/en/support/software")], {})
Application.getInstance().functionEvent(event)
@pyqtSlot()
def openBugReportPage(self):
event = CallFunctionEvent(self._openUrl, [QUrl("http://github.com/Ultimaker/Cura/issues")], {})
Application.getInstance().functionEvent(event)
## Center all objects in the selection
@pyqtSlot()
def centerSelection(self) -> None:
operation = GroupedOperation()
for node in Selection.getAllSelectedObjects():
current_node = node
while current_node.getParent() and current_node.getParent().callDecoration("isGroup"):
current_node = current_node.getParent()
center_operation = SetTransformOperation(current_node, Vector())
operation.addOperation(center_operation)
operation.push()
## Multiply all objects in the selection
#
# \param count The number of times to multiply the selection.
@pyqtSlot(int)
def multiplySelection(self, count: int) -> None:
job = MultiplyObjectsJob(Selection.getAllSelectedObjects(), count, 8)
job.start()
## Delete all selected objects.
@pyqtSlot()
def deleteSelection(self) -> None:
if not Application.getInstance().getController().getToolsEnabled():
return
removed_group_nodes = []
op = GroupedOperation()
nodes = Selection.getAllSelectedObjects()
for node in nodes:
op.addOperation(RemoveSceneNodeOperation(node))
group_node = node.getParent()
if group_node and group_node.callDecoration("isGroup") and group_node not in removed_group_nodes:
remaining_nodes_in_group = list(set(group_node.getChildren()) - set(nodes))
if len(remaining_nodes_in_group) == 1:
removed_group_nodes.append(group_node)
op.addOperation(SetParentOperation(remaining_nodes_in_group[0], group_node.getParent()))
op.addOperation(RemoveSceneNodeOperation(group_node))
op.push()
## Set the extruder that should be used to print the selection.
#
# \param extruder_id The ID of the extruder stack to use for the selected objects.
@pyqtSlot(str)
def setExtruderForSelection(self, extruder_id: str) -> None:
operation = GroupedOperation()
nodes_to_change = []
for node in Selection.getAllSelectedObjects():
# Do not change any nodes that already have the right extruder set.
if node.callDecoration("getActiveExtruder") == extruder_id:
continue
# If the node is a group, apply the active extruder to all children of the group.
if node.callDecoration("isGroup"):
for grouped_node in BreadthFirstIterator(node):
if grouped_node.callDecoration("getActiveExtruder") == extruder_id:
continue
if grouped_node.callDecoration("isGroup"):
continue
nodes_to_change.append(grouped_node)
continue
nodes_to_change.append(node)
if not nodes_to_change:
# If there are no changes to make, we still need to reset the selected extruders.
# This is a workaround for checked menu items being deselected while still being
# selected.
ExtruderManager.getInstance().resetSelectedObjectExtruders()
return
for node in nodes_to_change:
operation.addOperation(SetObjectExtruderOperation(node, extruder_id))
operation.push()
def _openUrl(self, url):
QDesktopServices.openUrl(url)
| agpl-3.0 |
CoDEmanX/ArangoDB | 3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_inspect.py | 53 | 19756 | import sys
import types
import unittest
import inspect
import datetime
from test.test_support import TESTFN, run_unittest
from test import inspect_fodder as mod
from test import inspect_fodder2 as mod2
# Functions tested in this suite:
# ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode,
# isbuiltin, isroutine, isgenerator, isgeneratorfunction, getmembers,
# getdoc, getfile, getmodule, getsourcefile, getcomments, getsource,
# getclasstree, getargspec, getargvalues, formatargspec, formatargvalues,
# currentframe, stack, trace, isdatadescriptor
# NOTE: There are some additional tests relating to interaction with
# zipimport in the test_zipimport_support test module.
modfile = mod.__file__
if modfile.endswith(('c', 'o')):
modfile = modfile[:-1]
import __builtin__
try:
1/0
except:
tb = sys.exc_traceback
git = mod.StupidGit()
class IsTestBase(unittest.TestCase):
predicates = set([inspect.isbuiltin, inspect.isclass, inspect.iscode,
inspect.isframe, inspect.isfunction, inspect.ismethod,
inspect.ismodule, inspect.istraceback,
inspect.isgenerator, inspect.isgeneratorfunction])
def istest(self, predicate, exp):
obj = eval(exp)
self.failUnless(predicate(obj), '%s(%s)' % (predicate.__name__, exp))
for other in self.predicates - set([predicate]):
if predicate == inspect.isgeneratorfunction and\
other == inspect.isfunction:
continue
self.failIf(other(obj), 'not %s(%s)' % (other.__name__, exp))
def generator_function_example(self):
for i in xrange(2):
yield i
class TestPredicates(IsTestBase):
def test_sixteen(self):
count = len(filter(lambda x:x.startswith('is'), dir(inspect)))
# This test is here for remember you to update Doc/library/inspect.rst
# which claims there are 16 such functions
expected = 16
err_msg = "There are %d (not %d) is* functions" % (count, expected)
self.assertEqual(count, expected, err_msg)
def test_excluding_predicates(self):
self.istest(inspect.isbuiltin, 'sys.exit')
self.istest(inspect.isbuiltin, '[].append')
self.istest(inspect.isclass, 'mod.StupidGit')
self.istest(inspect.iscode, 'mod.spam.func_code')
self.istest(inspect.isframe, 'tb.tb_frame')
self.istest(inspect.isfunction, 'mod.spam')
self.istest(inspect.ismethod, 'mod.StupidGit.abuse')
self.istest(inspect.ismethod, 'git.argue')
self.istest(inspect.ismodule, 'mod')
self.istest(inspect.istraceback, 'tb')
self.istest(inspect.isdatadescriptor, '__builtin__.file.closed')
self.istest(inspect.isdatadescriptor, '__builtin__.file.softspace')
self.istest(inspect.isgenerator, '(x for x in xrange(2))')
self.istest(inspect.isgeneratorfunction, 'generator_function_example')
if hasattr(types, 'GetSetDescriptorType'):
self.istest(inspect.isgetsetdescriptor,
'type(tb.tb_frame).f_locals')
else:
self.failIf(inspect.isgetsetdescriptor(type(tb.tb_frame).f_locals))
if hasattr(types, 'MemberDescriptorType'):
self.istest(inspect.ismemberdescriptor, 'datetime.timedelta.days')
else:
self.failIf(inspect.ismemberdescriptor(datetime.timedelta.days))
def test_isroutine(self):
self.assert_(inspect.isroutine(mod.spam))
self.assert_(inspect.isroutine([].count))
class TestInterpreterStack(IsTestBase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
git.abuse(7, 8, 9)
def test_abuse_done(self):
self.istest(inspect.istraceback, 'git.ex[2]')
self.istest(inspect.isframe, 'mod.fr')
def test_stack(self):
self.assert_(len(mod.st) >= 5)
self.assertEqual(mod.st[0][1:],
(modfile, 16, 'eggs', [' st = inspect.stack()\n'], 0))
self.assertEqual(mod.st[1][1:],
(modfile, 9, 'spam', [' eggs(b + d, c + f)\n'], 0))
self.assertEqual(mod.st[2][1:],
(modfile, 43, 'argue', [' spam(a, b, c)\n'], 0))
self.assertEqual(mod.st[3][1:],
(modfile, 39, 'abuse', [' self.argue(a, b, c)\n'], 0))
def test_trace(self):
self.assertEqual(len(git.tr), 3)
self.assertEqual(git.tr[0][1:], (modfile, 43, 'argue',
[' spam(a, b, c)\n'], 0))
self.assertEqual(git.tr[1][1:], (modfile, 9, 'spam',
[' eggs(b + d, c + f)\n'], 0))
self.assertEqual(git.tr[2][1:], (modfile, 18, 'eggs',
[' q = y / 0\n'], 0))
def test_frame(self):
args, varargs, varkw, locals = inspect.getargvalues(mod.fr)
self.assertEqual(args, ['x', 'y'])
self.assertEqual(varargs, None)
self.assertEqual(varkw, None)
self.assertEqual(locals, {'x': 11, 'p': 11, 'y': 14})
self.assertEqual(inspect.formatargvalues(args, varargs, varkw, locals),
'(x=11, y=14)')
def test_previous_frame(self):
args, varargs, varkw, locals = inspect.getargvalues(mod.fr.f_back)
self.assertEqual(args, ['a', 'b', 'c', 'd', ['e', ['f']]])
self.assertEqual(varargs, 'g')
self.assertEqual(varkw, 'h')
self.assertEqual(inspect.formatargvalues(args, varargs, varkw, locals),
'(a=7, b=8, c=9, d=3, (e=4, (f=5,)), *g=(), **h={})')
class GetSourceBase(unittest.TestCase):
# Subclasses must override.
fodderFile = None
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.source = file(inspect.getsourcefile(self.fodderFile)).read()
def sourcerange(self, top, bottom):
lines = self.source.split("\n")
return "\n".join(lines[top-1:bottom]) + "\n"
def assertSourceEqual(self, obj, top, bottom):
self.assertEqual(inspect.getsource(obj),
self.sourcerange(top, bottom))
class TestRetrievingSourceCode(GetSourceBase):
fodderFile = mod
def test_getclasses(self):
classes = inspect.getmembers(mod, inspect.isclass)
self.assertEqual(classes,
[('FesteringGob', mod.FesteringGob),
('MalodorousPervert', mod.MalodorousPervert),
('ParrotDroppings', mod.ParrotDroppings),
('StupidGit', mod.StupidGit)])
tree = inspect.getclasstree([cls[1] for cls in classes], 1)
self.assertEqual(tree,
[(mod.ParrotDroppings, ()),
(mod.StupidGit, ()),
[(mod.MalodorousPervert, (mod.StupidGit,)),
[(mod.FesteringGob, (mod.MalodorousPervert,
mod.ParrotDroppings))
]
]
])
def test_getfunctions(self):
functions = inspect.getmembers(mod, inspect.isfunction)
self.assertEqual(functions, [('eggs', mod.eggs),
('spam', mod.spam)])
def test_getdoc(self):
self.assertEqual(inspect.getdoc(mod), 'A module docstring.')
self.assertEqual(inspect.getdoc(mod.StupidGit),
'A longer,\n\nindented\n\ndocstring.')
self.assertEqual(inspect.getdoc(git.abuse),
'Another\n\ndocstring\n\ncontaining\n\ntabs')
def test_cleandoc(self):
self.assertEqual(inspect.cleandoc('An\n indented\n docstring.'),
'An\nindented\ndocstring.')
def test_getcomments(self):
self.assertEqual(inspect.getcomments(mod), '# line 1\n')
self.assertEqual(inspect.getcomments(mod.StupidGit), '# line 20\n')
def test_getmodule(self):
# Check actual module
self.assertEqual(inspect.getmodule(mod), mod)
# Check class (uses __module__ attribute)
self.assertEqual(inspect.getmodule(mod.StupidGit), mod)
# Check a method (no __module__ attribute, falls back to filename)
self.assertEqual(inspect.getmodule(mod.StupidGit.abuse), mod)
# Do it again (check the caching isn't broken)
self.assertEqual(inspect.getmodule(mod.StupidGit.abuse), mod)
# Check a builtin
self.assertEqual(inspect.getmodule(str), sys.modules["__builtin__"])
# Check filename override
self.assertEqual(inspect.getmodule(None, modfile), mod)
def test_getsource(self):
self.assertSourceEqual(git.abuse, 29, 39)
self.assertSourceEqual(mod.StupidGit, 21, 46)
def test_getsourcefile(self):
self.assertEqual(inspect.getsourcefile(mod.spam), modfile)
self.assertEqual(inspect.getsourcefile(git.abuse), modfile)
def test_getfile(self):
self.assertEqual(inspect.getfile(mod.StupidGit), mod.__file__)
def test_getmodule_recursion(self):
from types import ModuleType
name = '__inspect_dummy'
m = sys.modules[name] = ModuleType(name)
m.__file__ = "<string>" # hopefully not a real filename...
m.__loader__ = "dummy" # pretend the filename is understood by a loader
exec "def x(): pass" in m.__dict__
self.assertEqual(inspect.getsourcefile(m.x.func_code), '<string>')
del sys.modules[name]
inspect.getmodule(compile('a=10','','single'))
class TestDecorators(GetSourceBase):
fodderFile = mod2
def test_wrapped_decorator(self):
self.assertSourceEqual(mod2.wrapped, 14, 17)
def test_replacing_decorator(self):
self.assertSourceEqual(mod2.gone, 9, 10)
class TestOneliners(GetSourceBase):
fodderFile = mod2
def test_oneline_lambda(self):
# Test inspect.getsource with a one-line lambda function.
self.assertSourceEqual(mod2.oll, 25, 25)
def test_threeline_lambda(self):
# Test inspect.getsource with a three-line lambda function,
# where the second and third lines are _not_ indented.
self.assertSourceEqual(mod2.tll, 28, 30)
def test_twoline_indented_lambda(self):
# Test inspect.getsource with a two-line lambda function,
# where the second line _is_ indented.
self.assertSourceEqual(mod2.tlli, 33, 34)
def test_onelinefunc(self):
# Test inspect.getsource with a regular one-line function.
self.assertSourceEqual(mod2.onelinefunc, 37, 37)
def test_manyargs(self):
# Test inspect.getsource with a regular function where
# the arguments are on two lines and _not_ indented and
# the body on the second line with the last arguments.
self.assertSourceEqual(mod2.manyargs, 40, 41)
def test_twolinefunc(self):
# Test inspect.getsource with a regular function where
# the body is on two lines, following the argument list and
# continued on the next line by a \\.
self.assertSourceEqual(mod2.twolinefunc, 44, 45)
def test_lambda_in_list(self):
# Test inspect.getsource with a one-line lambda function
# defined in a list, indented.
self.assertSourceEqual(mod2.a[1], 49, 49)
def test_anonymous(self):
# Test inspect.getsource with a lambda function defined
# as argument to another function.
self.assertSourceEqual(mod2.anonymous, 55, 55)
class TestBuggyCases(GetSourceBase):
fodderFile = mod2
def test_with_comment(self):
self.assertSourceEqual(mod2.with_comment, 58, 59)
def test_multiline_sig(self):
self.assertSourceEqual(mod2.multiline_sig[0], 63, 64)
def test_nested_class(self):
self.assertSourceEqual(mod2.func69().func71, 71, 72)
def test_one_liner_followed_by_non_name(self):
self.assertSourceEqual(mod2.func77, 77, 77)
def test_one_liner_dedent_non_name(self):
self.assertSourceEqual(mod2.cls82.func83, 83, 83)
def test_with_comment_instead_of_docstring(self):
self.assertSourceEqual(mod2.func88, 88, 90)
def test_method_in_dynamic_class(self):
self.assertSourceEqual(mod2.method_in_dynamic_class, 95, 97)
# Helper for testing classify_class_attrs.
def attrs_wo_objs(cls):
return [t[:3] for t in inspect.classify_class_attrs(cls)]
class TestClassesAndFunctions(unittest.TestCase):
def test_classic_mro(self):
# Test classic-class method resolution order.
class A: pass
class B(A): pass
class C(A): pass
class D(B, C): pass
expected = (D, B, A, C)
got = inspect.getmro(D)
self.assertEqual(expected, got)
def test_newstyle_mro(self):
# The same w/ new-class MRO.
class A(object): pass
class B(A): pass
class C(A): pass
class D(B, C): pass
expected = (D, B, C, A, object)
got = inspect.getmro(D)
self.assertEqual(expected, got)
def assertArgSpecEquals(self, routine, args_e, varargs_e = None,
varkw_e = None, defaults_e = None,
formatted = None):
args, varargs, varkw, defaults = inspect.getargspec(routine)
self.assertEqual(args, args_e)
self.assertEqual(varargs, varargs_e)
self.assertEqual(varkw, varkw_e)
self.assertEqual(defaults, defaults_e)
if formatted is not None:
self.assertEqual(inspect.formatargspec(args, varargs, varkw, defaults),
formatted)
def test_getargspec(self):
self.assertArgSpecEquals(mod.eggs, ['x', 'y'], formatted = '(x, y)')
self.assertArgSpecEquals(mod.spam,
['a', 'b', 'c', 'd', ['e', ['f']]],
'g', 'h', (3, (4, (5,))),
'(a, b, c, d=3, (e, (f,))=(4, (5,)), *g, **h)')
def test_getargspec_method(self):
class A(object):
def m(self):
pass
self.assertArgSpecEquals(A.m, ['self'])
def test_getargspec_sublistofone(self):
def sublistOfOne((foo,)): return 1
self.assertArgSpecEquals(sublistOfOne, [['foo']])
def fakeSublistOfOne((foo)): return 1
self.assertArgSpecEquals(fakeSublistOfOne, ['foo'])
def test_classify_oldstyle(self):
class A:
def s(): pass
s = staticmethod(s)
def c(cls): pass
c = classmethod(c)
def getp(self): pass
p = property(getp)
def m(self): pass
def m1(self): pass
datablob = '1'
attrs = attrs_wo_objs(A)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'class method', A) in attrs, 'missing class method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', A) in attrs, 'missing plain method')
self.assert_(('m1', 'method', A) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
class B(A):
def m(self): pass
attrs = attrs_wo_objs(B)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'class method', A) in attrs, 'missing class method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', B) in attrs, 'missing plain method')
self.assert_(('m1', 'method', A) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
class C(A):
def m(self): pass
def c(self): pass
attrs = attrs_wo_objs(C)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'method', C) in attrs, 'missing plain method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', C) in attrs, 'missing plain method')
self.assert_(('m1', 'method', A) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
class D(B, C):
def m1(self): pass
attrs = attrs_wo_objs(D)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'class method', A) in attrs, 'missing class method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', B) in attrs, 'missing plain method')
self.assert_(('m1', 'method', D) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
# Repeat all that, but w/ new-style classes.
def test_classify_newstyle(self):
class A(object):
def s(): pass
s = staticmethod(s)
def c(cls): pass
c = classmethod(c)
def getp(self): pass
p = property(getp)
def m(self): pass
def m1(self): pass
datablob = '1'
attrs = attrs_wo_objs(A)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'class method', A) in attrs, 'missing class method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', A) in attrs, 'missing plain method')
self.assert_(('m1', 'method', A) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
class B(A):
def m(self): pass
attrs = attrs_wo_objs(B)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'class method', A) in attrs, 'missing class method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', B) in attrs, 'missing plain method')
self.assert_(('m1', 'method', A) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
class C(A):
def m(self): pass
def c(self): pass
attrs = attrs_wo_objs(C)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'method', C) in attrs, 'missing plain method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', C) in attrs, 'missing plain method')
self.assert_(('m1', 'method', A) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
class D(B, C):
def m1(self): pass
attrs = attrs_wo_objs(D)
self.assert_(('s', 'static method', A) in attrs, 'missing static method')
self.assert_(('c', 'method', C) in attrs, 'missing plain method')
self.assert_(('p', 'property', A) in attrs, 'missing property')
self.assert_(('m', 'method', B) in attrs, 'missing plain method')
self.assert_(('m1', 'method', D) in attrs, 'missing plain method')
self.assert_(('datablob', 'data', A) in attrs, 'missing data')
def test_main():
run_unittest(TestDecorators, TestRetrievingSourceCode, TestOneliners,
TestBuggyCases,
TestInterpreterStack, TestClassesAndFunctions, TestPredicates)
if __name__ == "__main__":
test_main()
| apache-2.0 |
kenshay/ImageScript | ProgramData/Android/ADB/platform-tools/systrace/catapult/devil/devil/utils/cmd_helper_test.py | 5 | 9343 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for the cmd_helper module."""
import unittest
import subprocess
import time
from devil import devil_env
from devil.utils import cmd_helper
with devil_env.SysPath(devil_env.PYMOCK_PATH):
import mock # pylint: disable=import-error
class CmdHelperSingleQuoteTest(unittest.TestCase):
def testSingleQuote_basic(self):
self.assertEquals('hello',
cmd_helper.SingleQuote('hello'))
def testSingleQuote_withSpaces(self):
self.assertEquals("'hello world'",
cmd_helper.SingleQuote('hello world'))
def testSingleQuote_withUnsafeChars(self):
self.assertEquals("""'hello'"'"'; rm -rf /'""",
cmd_helper.SingleQuote("hello'; rm -rf /"))
def testSingleQuote_dontExpand(self):
test_string = 'hello $TEST_VAR'
cmd = 'TEST_VAR=world; echo %s' % cmd_helper.SingleQuote(test_string)
self.assertEquals(test_string,
cmd_helper.GetCmdOutput(cmd, shell=True).rstrip())
class CmdHelperDoubleQuoteTest(unittest.TestCase):
def testDoubleQuote_basic(self):
self.assertEquals('hello',
cmd_helper.DoubleQuote('hello'))
def testDoubleQuote_withSpaces(self):
self.assertEquals('"hello world"',
cmd_helper.DoubleQuote('hello world'))
def testDoubleQuote_withUnsafeChars(self):
self.assertEquals('''"hello\\"; rm -rf /"''',
cmd_helper.DoubleQuote('hello"; rm -rf /'))
def testSingleQuote_doExpand(self):
test_string = 'hello $TEST_VAR'
cmd = 'TEST_VAR=world; echo %s' % cmd_helper.DoubleQuote(test_string)
self.assertEquals('hello world',
cmd_helper.GetCmdOutput(cmd, shell=True).rstrip())
class CmdHelperShinkToSnippetTest(unittest.TestCase):
def testShrinkToSnippet_noArgs(self):
self.assertEquals('foo',
cmd_helper.ShrinkToSnippet(['foo'], 'a', 'bar'))
self.assertEquals("'foo foo'",
cmd_helper.ShrinkToSnippet(['foo foo'], 'a', 'bar'))
self.assertEquals('"$a"\' bar\'',
cmd_helper.ShrinkToSnippet(['foo bar'], 'a', 'foo'))
self.assertEquals('\'foo \'"$a"',
cmd_helper.ShrinkToSnippet(['foo bar'], 'a', 'bar'))
self.assertEquals('foo"$a"',
cmd_helper.ShrinkToSnippet(['foobar'], 'a', 'bar'))
def testShrinkToSnippet_singleArg(self):
self.assertEquals("foo ''",
cmd_helper.ShrinkToSnippet(['foo', ''], 'a', 'bar'))
self.assertEquals("foo foo",
cmd_helper.ShrinkToSnippet(['foo', 'foo'], 'a', 'bar'))
self.assertEquals('"$a" "$a"',
cmd_helper.ShrinkToSnippet(['foo', 'foo'], 'a', 'foo'))
self.assertEquals('foo "$a""$a"',
cmd_helper.ShrinkToSnippet(['foo', 'barbar'], 'a', 'bar'))
self.assertEquals('foo "$a"\' \'"$a"',
cmd_helper.ShrinkToSnippet(['foo', 'bar bar'], 'a', 'bar'))
self.assertEquals('foo "$a""$a"\' \'',
cmd_helper.ShrinkToSnippet(['foo', 'barbar '], 'a', 'bar'))
self.assertEquals('foo \' \'"$a""$a"\' \'',
cmd_helper.ShrinkToSnippet(['foo', ' barbar '], 'a', 'bar'))
_DEFAULT = 'DEFAULT'
class _ProcessOutputEvent(object):
def __init__(self, select_fds=_DEFAULT, read_contents=None, ts=_DEFAULT):
self.select_fds = select_fds
self.read_contents = read_contents
self.ts = ts
class _MockProcess(object):
def __init__(self, output_sequence=None, return_value=0):
# Arbitrary.
fake_stdout_fileno = 25
self.mock_proc = mock.MagicMock(spec=subprocess.Popen)
self.mock_proc.stdout = mock.MagicMock()
self.mock_proc.stdout.fileno = mock.MagicMock(
return_value=fake_stdout_fileno)
self.mock_proc.returncode = None
self._return_value = return_value
# This links the behavior of os.read, select.select, time.time, and
# <process>.poll. The output sequence can be thought of as a list of
# return values for select.select with corresponding return values for
# the other calls at any time between that select call and the following
# one. We iterate through the sequence only on calls to select.select.
#
# os.read is a special case, though, where we only return a given chunk
# of data *once* after a given call to select.
if not output_sequence:
output_sequence = []
# Use an leading element to make the iteration logic work.
initial_seq_element = _ProcessOutputEvent(
_DEFAULT, '',
output_sequence[0].ts if output_sequence else _DEFAULT)
output_sequence.insert(0, initial_seq_element)
for o in output_sequence:
if o.select_fds == _DEFAULT:
if o.read_contents is None:
o.select_fds = []
else:
o.select_fds = [fake_stdout_fileno]
if o.ts == _DEFAULT:
o.ts = time.time()
self._output_sequence = output_sequence
self._output_seq_index = 0
self._read_flags = [False] * len(output_sequence)
def read_side_effect(*_args, **_kwargs):
if self._read_flags[self._output_seq_index]:
return None
self._read_flags[self._output_seq_index] = True
return self._output_sequence[self._output_seq_index].read_contents
def select_side_effect(*_args, **_kwargs):
if self._output_seq_index is None:
self._output_seq_index = 0
else:
self._output_seq_index += 1
return (self._output_sequence[self._output_seq_index].select_fds,
None, None)
def time_side_effect(*_args, **_kwargs):
return self._output_sequence[self._output_seq_index].ts
def poll_side_effect(*_args, **_kwargs):
if self._output_seq_index >= len(self._output_sequence) - 1:
self.mock_proc.returncode = self._return_value
return self.mock_proc.returncode
mock_read = mock.MagicMock(side_effect=read_side_effect)
mock_select = mock.MagicMock(side_effect=select_side_effect)
mock_time = mock.MagicMock(side_effect=time_side_effect)
self.mock_proc.poll = mock.MagicMock(side_effect=poll_side_effect)
# Set up but *do not start* the mocks.
self._mocks = [
mock.patch('fcntl.fcntl'),
mock.patch('os.read', new=mock_read),
mock.patch('select.select', new=mock_select),
mock.patch('time.time', new=mock_time),
]
def __enter__(self):
for m in self._mocks:
m.__enter__()
return self.mock_proc
def __exit__(self, exc_type, exc_val, exc_tb):
for m in reversed(self._mocks):
m.__exit__(exc_type, exc_val, exc_tb)
class CmdHelperIterCmdOutputLinesTest(unittest.TestCase):
"""Test IterCmdOutputLines with some calls to the unix 'seq' command."""
# This calls _IterCmdOutputLines rather than IterCmdOutputLines s.t. it
# can mock the process.
# pylint: disable=protected-access
_SIMPLE_OUTPUT_SEQUENCE = [
_ProcessOutputEvent(read_contents='1\n2\n'),
]
def testIterCmdOutputLines_success(self):
with _MockProcess(
output_sequence=self._SIMPLE_OUTPUT_SEQUENCE) as mock_proc:
for num, line in enumerate(
cmd_helper._IterCmdOutputLines(mock_proc, 'mock_proc'), 1):
self.assertEquals(num, int(line))
def testIterCmdOutputLines_exitStatusFail(self):
with self.assertRaises(subprocess.CalledProcessError):
with _MockProcess(output_sequence=self._SIMPLE_OUTPUT_SEQUENCE,
return_value=1) as mock_proc:
for num, line in enumerate(
cmd_helper._IterCmdOutputLines(mock_proc, 'mock_proc'), 1):
self.assertEquals(num, int(line))
# after reading all the output we get an exit status of 1
def testIterCmdOutputLines_exitStatusIgnored(self):
with _MockProcess(output_sequence=self._SIMPLE_OUTPUT_SEQUENCE,
return_value=1) as mock_proc:
for num, line in enumerate(
cmd_helper._IterCmdOutputLines(
mock_proc, 'mock_proc', check_status=False),
1):
self.assertEquals(num, int(line))
def testIterCmdOutputLines_exitStatusSkipped(self):
with _MockProcess(output_sequence=self._SIMPLE_OUTPUT_SEQUENCE,
return_value=1) as mock_proc:
for num, line in enumerate(
cmd_helper._IterCmdOutputLines(mock_proc, 'mock_proc'), 1):
self.assertEquals(num, int(line))
# no exception will be raised because we don't attempt to read past
# the end of the output and, thus, the status never gets checked
if num == 2:
break
def testIterCmdOutputLines_delay(self):
output_sequence = [
_ProcessOutputEvent(read_contents='1\n2\n', ts=1),
_ProcessOutputEvent(read_contents=None, ts=2),
_ProcessOutputEvent(read_contents='Awake', ts=10),
]
with _MockProcess(output_sequence=output_sequence) as mock_proc:
for num, line in enumerate(
cmd_helper._IterCmdOutputLines(mock_proc, 'mock_proc',
iter_timeout=5), 1):
if num <= 2:
self.assertEquals(num, int(line))
elif num == 3:
self.assertEquals(None, line)
elif num == 4:
self.assertEquals('Awake', line)
else:
self.fail()
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
supriyasawant/gstudio | gnowsys-ndf/gnowsys_ndf/ndf/views/forum.py | 2 | 40460 | ''' -- imports from installed packages -- '''
import json
import datetime
''' -- imports from django -- '''
from django.shortcuts import render_to_response, render
from django.template import RequestContext
from django.template import Context
from django.template.defaultfilters import slugify
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.template.loader import get_template
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.contrib.auth.decorators import login_required
''' -- imports from django_mongokit -- '''
''' -- imports from gstudio -- '''
from gnowsys_ndf.ndf.models import GSystemType, GSystem,Node
from gnowsys_ndf.ndf.models import node_collection, triple_collection
from gnowsys_ndf.ndf.views.methods import get_forum_repl_type, forum_notification_status
from gnowsys_ndf.ndf.views.methods import set_all_urls,check_delete,get_execution_time
from gnowsys_ndf.ndf.views.methods import get_group_name_id
from gnowsys_ndf.ndf.views.notify import set_notif_val,get_userobject
from gnowsys_ndf.ndf.templatetags.ndf_tags import get_forum_twists,get_all_replies
from gnowsys_ndf.settings import GAPPS
from gnowsys_ndf.ndf.org2any import org2html
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
# ##########################################################################
forum_gst = node_collection.one({ '_type': 'GSystemType', 'name': u"Forum" })
reply_gst = node_collection.one({ '_type':'GSystemType' , 'name': u'Reply' })
twist_gst = node_collection.one({ '_type':'GSystemType', 'name': u'Twist' })
start_time = node_collection.one({'$and':[{'_type':'AttributeType'},{'name':'start_time'}]})
end_time = node_collection.one({'$and':[{'_type':'AttributeType'},{'name':'end_time'}]})
sitename = Site.objects.all()[0].name.__str__()
app = forum_gst
@get_execution_time
def forum(request, group_id, node_id=None):
'''
Method to list all the available forums and to return forum-search-query result.
'''
# getting group id and group name
group_name, group_id = get_group_name_id(group_id)
# getting Forum GSystem's ObjectId
node_id = str(forum_gst._id)
if request.method == "POST":
# Forum search view
title = forum_gst.name
search_field = request.POST['search_field']
existing_forums = node_collection.find({'member_of': {'$all': [ObjectId(forum_gst._id)]},
'$or': [{'name': {'$regex': search_field, '$options': 'i'}},
{'tags': {'$regex':search_field, '$options': 'i'}}],
'group_set': {'$all': [ObjectId(group_id)]},
'status':{'$nin':['HIDDEN']}
}).sort('last_update', -1)
return render_to_response("ndf/forum.html",
{'title': title,
'searching': True, 'query': search_field,
'existing_forums': existing_forums, 'groupid':group_id, 'group_id':group_id
},
context_instance=RequestContext(request)
)
elif forum_gst._id == ObjectId(node_id):
# Forum list view
existing_forums = node_collection.find({
'member_of': {'$all': [ObjectId(node_id)]},
'group_set': {'$all': [ObjectId(group_id)]},
'status':{'$nin':['HIDDEN']}
}).sort('last_update', -1)
forum_detail_list = []
for each in existing_forums:
temp_forum = {}
temp_forum['name'] = each.name
temp_forum['created_at'] = each.created_at
temp_forum['tags'] = each.tags
temp_forum['member_of_names_list'] = each.member_of_names_list
temp_forum['user_details_dict'] = each.user_details_dict
temp_forum['html_content'] = each.html_content
temp_forum['contributors'] = each.contributors
temp_forum['id'] = each._id
temp_forum['threads'] = node_collection.find({
'$and':[
{'_type': 'GSystem'},
{'prior_node': ObjectId(each._id)}
],
'status': {'$nin': ['HIDDEN']}
}).count()
forum_detail_list.append(temp_forum)
variables = RequestContext(request, {'existing_forums': forum_detail_list,'groupid': group_id, 'group_id': group_id})
return render_to_response("ndf/forum.html",variables)
@login_required
@get_execution_time
def create_forum(request, group_id):
'''
Method to create forum and Retrieve all the forums
'''
# getting group id and group name
group_name, group_id = get_group_name_id(group_id)
# getting all the values from submitted form
if request.method == "POST":
colg = node_collection.one({'_id':ObjectId(group_id)}) # getting group ObjectId
colf = node_collection.collection.GSystem() # creating new/empty GSystem object
name = unicode(request.POST.get('forum_name',"")).strip() # forum name
colf.name = name
content_org = request.POST.get('content_org',"") # forum content
if content_org:
colf.content_org = unicode(content_org)
usrname = request.user.username
filename = slugify(name) + "-" + usrname + "-"
colf.content = org2html(content_org, file_prefix=filename)
usrid = int(request.user.id)
usrname = unicode(request.user.username)
colf.created_by=usrid
colf.modified_by = usrid
if usrid not in colf.contributors:
colf.contributors.append(usrid)
colf.group_set.append(colg._id)
# appending user group's ObjectId in group_set
user_group_obj = node_collection.one({'$and':[{'_type':u'Group'},{'name':usrname}]})
if user_group_obj:
if user_group_obj._id not in colf.group_set:
colf.group_set.append(user_group_obj._id)
colf.member_of.append(forum_gst._id)
################# ADDED 14th July.Its done!
colf.access_policy = u"PUBLIC"
colf.url = set_all_urls(colf.member_of)
### currently timed forum feature is not in use ###
# sdate=request.POST.get('sdate',"")
# shrs= request.POST.get('shrs',"")
# smts= request.POST.get('smts',"")
# edate= request.POST.get('edate',"")
# ehrs= request.POST.get('ehrs',"")
# emts=request.POST.get('emts',"")
# start_dt={}
# end_dt={}
# if not shrs:
# shrs=0
# if not smts:
# smts=0
# if sdate:
# sdate1=sdate.split("/")
# st_date = datetime.datetime(int(sdate1[2]),int(sdate1[0]),int(sdate1[1]),int(shrs),int(smts))
# start_dt[start_time.name]=st_date
# if not ehrs:
# ehrs=0
# if not emts:
# emts=0
# if edate:
# edate1=edate.split("/")
# en_date= datetime.datetime(int(edate1[2]),int(edate1[0]),int(edate1[1]),int(ehrs),int(emts))
# end_dt[end_time.name]=en_date
# colf.attribute_set.append(start_dt)
# colf.attribute_set.append(end_dt)
colf.save()
'''Code to send notification to all members of the group except those whose notification preference is turned OFF'''
link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(colf._id)
for each in colg.author_set:
bx=User.objects.filter(id=each)
if bx:
bx=User.objects.get(id=each)
else:
continue
activity="Added forum"
msg=usrname+" has added a forum in the group -'"+colg.name+"'\n"+"Please visit "+link+" to see the forum."
if bx:
auth = node_collection.one({'_type': 'Author', 'name': unicode(bx.username) })
if colg._id and auth:
no_check=forum_notification_status(colg._id,auth._id)
else:
no_check=True
if no_check:
ret = set_notif_val(request,colg._id,msg,activity,bx)
# returning response to ndf/forumdetails.html
return HttpResponseRedirect(reverse('show', kwargs={'group_id':group_id,'forum_id': colf._id }))
# variables=RequestContext(request,{'forum':colf})
# return render_to_response("ndf/forumdetails.html",variables)
# getting all the GSystem of forum to provide autocomplete/intellisence of forum names
available_nodes = node_collection.find({'_type': u'GSystem', 'member_of': ObjectId(forum_gst._id),'group_set': ObjectId(group_id) })
nodes_list = []
for each in available_nodes:
nodes_list.append(str((each.name).strip().lower()))
return render_to_response("ndf/create_forum.html",{'group_id':group_id,'groupid':group_id, 'nodes_list': nodes_list},RequestContext(request))
@login_required
@get_execution_time
def edit_forum(request,group_id,forum_id):
'''
Method to create forum and Retrieve all the forums
'''
forum = node_collection.one({ '_id': ObjectId(forum_id) })
# # method to convert group_id to ObjectId if it is groupname
# ins_objectid = ObjectId()
# if ins_objectid.is_valid(group_id) is False :
# group_ins = node_collection.find_one({'_type': "Group","name": group_id})
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
# if group_ins:
# group_id = str(group_ins._id)
# else :
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
# if auth :
# group_id = str(auth._id)
# else :
# pass
group_name, group_id = get_group_name_id(group_id)
# getting all the values from submitted form
if request.method == "POST":
colg = node_collection.one({'_id':ObjectId(group_id)}) # getting group ObjectId
colf = node_collection.one({'_id':ObjectId(forum_id)}) # creating new/empty GSystem object
name = unicode(request.POST.get('forum_name',"")).strip() # forum name
colf.name = name
content_org = request.POST.get('content_org',"") # forum content
if content_org:
colf.content_org = unicode(content_org)
usrname = request.user.username
filename = slugify(name) + "-" + usrname + "-"
colf.content = org2html(content_org, file_prefix=filename)
usrid = int(request.user.id)
usrname = unicode(request.user.username)
colf.modified_by = usrid
if usrid not in colf.contributors:
colf.contributors.append(usrid)
################# ADDED 14th July.Its done!
colf.access_policy = u"PUBLIC"
colf.url = set_all_urls(colf.member_of)
### currently timed forum feature is not in use ###
# sdate=request.POST.get('sdate',"")
# shrs= request.POST.get('shrs',"")
# smts= request.POST.get('smts',"")
# edate= request.POST.get('edate',"")
# ehrs= request.POST.get('ehrs',"")
# emts=request.POST.get('emts',"")
# start_dt={}
# end_dt={}
# if not shrs:
# shrs=0
# if not smts:
# smts=0
# if sdate:
# sdate1=sdate.split("/")
# st_date = datetime.datetime(int(sdate1[2]),int(sdate1[0]),int(sdate1[1]),int(shrs),int(smts))
# start_dt[start_time.name]=st_date
# if not ehrs:
# ehrs=0
# if not emts:
# emts=0
# if edate:
# edate1=edate.split("/")
# en_date= datetime.datetime(int(edate1[2]),int(edate1[0]),int(edate1[1]),int(ehrs),int(emts))
# end_dt[end_time.name]=en_date
# colf.attribute_set.append(start_dt)
# colf.attribute_set.append(end_dt)
colf.save()
'''Code to send notification to all members of the group except those whose notification preference is turned OFF'''
link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(colf._id)
for each in colg.author_set:
bx=User.objects.get(id=each)
activity="Edited forum"
msg=usrname+" has edited forum -" +colf.name+" in the group -'"+colg.name+"'\n"+"Please visit "+link+" to see the forum."
if bx:
auth = node_collection.one({'_type': 'Author', 'name': unicode(bx.username) })
if colg._id and auth:
no_check=forum_notification_status(colg._id,auth._id)
else:
no_check=True
if no_check:
ret = set_notif_val(request,colg._id,msg,activity,bx)
# returning response to ndf/forumdetails.html
return HttpResponseRedirect(reverse('show', kwargs={'group_id':group_id,'forum_id': colf._id }))
# variables=RequestContext(request,{'forum':colf})
# return render_to_response("ndf/forumdetails.html",variables)
# getting all the GSystem of forum to provide autocomplete/intellisence of forum names
available_nodes = node_collection.find({'_type': u'GSystem', 'member_of': ObjectId(forum_gst._id),'group_set': ObjectId(group_id) })
nodes_list = []
for each in available_nodes:
nodes_list.append(str((each.name).strip().lower()))
return render_to_response("ndf/edit_forum.html",{'group_id':group_id,'groupid':group_id, 'nodes_list': nodes_list,'forum':forum},RequestContext(request))
@get_execution_time
def display_forum(request,group_id,forum_id):
forum = node_collection.one({'_id': ObjectId(forum_id)})
usrname = User.objects.get(id=forum.created_by).username
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False :
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else :
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth :
group_id = str(auth._id)
else :
pass
forum_object = node_collection.one({'_id': ObjectId(forum_id)})
if forum_object._type == "GSystemType":
return forum(request, group_id, forum_id)
th_all=get_forum_twists(forum)
if th_all:
th_count=len(list(th_all))
else:
th_count=0
variables = RequestContext(request,{
'forum':forum,
'groupid':group_id,'group_id':group_id,
'forum_created_by':usrname,
'thread_count':th_count,
})
return render_to_response("ndf/forumdetails.html",variables)
@get_execution_time
def display_thread(request,group_id, thread_id, forum_id=None):
'''
Method to display thread and it's content
'''
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False :
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else :
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth :
group_id = str(auth._id)
else :
pass
try:
thread = node_collection.one({'_id': ObjectId(thread_id)})
rep_lst=get_all_replies(thread)
lst_rep=list(rep_lst)
if lst_rep:
reply_count=len(lst_rep)
else:
reply_count=0
# print "reply count=",reply_count
forum = ""
for each in thread.prior_node:
forum=node_collection.one({'$and':[{'member_of': {'$all': [forum_gst._id]}},{'_id':ObjectId(each)}]})
if forum:
usrname = User.objects.get(id=forum.created_by).username
variables = RequestContext(request,
{ 'forum':forum,
'thread':thread,
'groupid':group_id,
'group_id':group_id,
'eachrep':thread,
'user':request.user,
'reply_count':reply_count,
'forum_created_by':usrname
})
return render_to_response("ndf/thread_details.html",variables)
usrname = User.objects.get(id=thread.created_by).username
variables= RequestContext(request,
{ 'forum':thread,
'thread':None,
'groupid':group_id,
'group_id':group_id,
'eachrep':thread,
'user':request.user,
'reply_count':reply_count,
'forum_created_by':usrname
})
return render_to_response("ndf/thread_details.html",variables)
except Exception as e:
print "Exception in thread_details "+str(e)
pass
@login_required
@get_execution_time
def create_thread(request, group_id, forum_id):
'''
Method to create thread
'''
forum = node_collection.one({'_id': ObjectId(forum_id)})
# forum_data = {
# 'name':forum.name,
# 'content':forum.content,
# 'created_by':User.objects.get(id=forum.created_by).username
# }
# print forum_data
forum_threads = []
exstng_reply = node_collection.find({'$and':[{'_type':'GSystem'},{'prior_node':ObjectId(forum._id)}],'status':{'$nin':['HIDDEN']}})
exstng_reply.sort('created_at')
for each in exstng_reply:
forum_threads.append(each.name)
if request.method == "POST":
colg = node_collection.one({'_id':ObjectId(group_id)})
name = unicode(request.POST.get('thread_name',""))
content_org = request.POST.get('content_org',"")
# -------------------
colrep = node_collection.collection.GSystem()
colrep.member_of.append(twist_gst._id)
#### ADDED ON 14th July
colrep.access_policy = u"PUBLIC"
colrep.url = set_all_urls(colrep.member_of)
colrep.prior_node.append(forum._id)
colrep.name = name
if content_org:
colrep.content_org = unicode(content_org)
# Required to link temporary files with the current user who is modifying this document
usrname = request.user.username
filename = slugify(name) + "-" + usrname + "-"
colrep.content = org2html(content_org, file_prefix=filename)
print "content=",colrep.content
usrid=int(request.user.id)
colrep.created_by=usrid
colrep.modified_by = usrid
if usrid not in colrep.contributors:
colrep.contributors.append(usrid)
colrep.group_set.append(colg._id)
colrep.save()
'''Code to send notification to all members of the group except those whose notification preference is turned OFF'''
link="http://"+sitename+"/"+str(colg._id)+"/forum/thread/"+str(colrep._id)
for each in colg.author_set:
bx=User.objects.filter(id=each)
if bx:
bx=User.objects.get(id=each)
else:
continue
activity="Added thread"
msg=request.user.username+" has added a thread in the forum " + forum.name + " in the group -'" + colg.name+"'\n"+"Please visit "+link+" to see the thread."
if bx:
auth = node_collection.one({'_type': 'Author', 'name': unicode(bx.username) })
if colg._id and auth:
no_check=forum_notification_status(colg._id,auth._id)
else:
no_check=True
if no_check:
ret = set_notif_val(request,colg._id,msg,activity,bx)
variables = RequestContext(request,
{ 'forum':forum,
'thread':colrep,
'eachrep':colrep,
'groupid':group_id,
'group_id':group_id,
'user':request.user,
'reply_count':0,
'forum_threads': json.dumps(forum_threads),
'forum_created_by':User.objects.get(id=forum.created_by).username
})
return render_to_response("ndf/thread_details.html",variables)
else:
return render_to_response("ndf/create_thread.html",
{ 'group_id':group_id,
'groupid':group_id,
'forum': forum,
'forum_threads': json.dumps(forum_threads),
'forum_created_by':User.objects.get(id=forum.created_by).username
},
RequestContext(request))
@login_required
@get_execution_time
def add_node(request, group_id):
# ins_objectid = ObjectId()
# if ins_objectid.is_valid(group_id) is False :
# group_ins = node_collection.find_one({'_type': "Group","name": group_id})
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
# if group_ins:
# group_id = str(group_ins._id)
# else :
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
# if auth :
# group_id = str(auth._id)
# else :
# pass
group_name, group_id = get_group_name_id(group_id)
try:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
content_org = request.POST.get("reply","")
node = request.POST.get("node","")
thread = request.POST.get("thread","") # getting thread _id
forumid = request.POST.get("forumid","") # getting forum _id
sup_id = request.POST.get("supnode","") #getting _id of it's parent node
tw_name = request.POST.get("twistname","")
forumobj = ""
groupobj = ""
colg = node_collection.one({'_id':ObjectId(group_id)})
if forumid:
forumobj = node_collection.one({"_id": ObjectId(forumid)})
sup = node_collection.one({"_id": ObjectId(sup_id)})
if not sup :
return HttpResponse("failure")
colrep = node_collection.collection.GSystem()
if node == "Twist":
name = tw_name
colrep.member_of.append(twist_gst._id)
elif node == "Reply":
name = unicode("Reply of:"+str(sup._id))
colrep.member_of.append(reply_gst._id)
colrep.prior_node.append(sup._id)
colrep.name = name
if content_org:
colrep.content_org = unicode(content_org)
# Required to link temporary files with the current user who is modifying this document
usrname = request.user.username
filename = slugify(name) + "-" + usrname + "-"
colrep.content = org2html(content_org, file_prefix = filename)
usrid = int(request.user.id)
colrep.created_by = usrid
colrep.modified_by = usrid
if usrid not in colrep.contributors:
colrep.contributors.append(usrid)
colrep.prior_node.append(sup._id)
colrep.name = name
if content_org:
colrep.content_org = unicode(content_org)
# Required to link temporary files with the current user who is modifying this document
usrname = request.user.username
filename = slugify(name) + "-" + usrname + "-"
colrep.content = org2html(content_org, file_prefix=filename)
usrid=int(request.user.id)
colrep.created_by=usrid
colrep.modified_by = usrid
if usrid not in colrep.contributors:
colrep.contributors.append(usrid)
colrep.group_set.append(colg._id)
colrep.save()
# print "----------", colrep._id
groupname = colg.name
if node == "Twist" :
url="http://"+sitename+"/"+str(group_id)+"/forum/thread/"+str(colrep._id)
activity=request.user.username+" -added a thread '"
prefix="' on the forum '"+forumobj.name+"'"
nodename=name
if node == "Reply":
threadobj=node_collection.one({"_id": ObjectId(thread)})
url="http://"+sitename+"/"+str(group_id)+"/forum/thread/"+str(threadobj._id)
activity=request.user.username+" -added a reply "
prefix=" on the thread '"+threadobj.name+"' on the forum '"+forumobj.name+"'"
nodename=""
link = url
for each in colg.author_set:
if each != colg.created_by:
bx=User.objects.get(id=each)
msg=activity+"-"+nodename+prefix+" in the group '"+ groupname +"'\n"+"Please visit "+link+" to see the updated page"
if bx:
no_check=forum_notification_status(group_id,auth._id)
if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
bx=User.objects.get(id=colg.created_by)
msg=activity+"-"+nodename+prefix+" in the group '"+groupname+"' created by you"+"\n"+"Please visit "+link+" to see the updated page"
if bx:
no_check=forum_notification_status(group_id,auth._id)
if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
if node == "Reply":
# if exstng_reply:
# exstng_reply.prior_node =[]
# exstng_reply.prior_node.append(colrep._id)
# exstng_reply.save()
threadobj=node_collection.one({"_id": ObjectId(thread)})
variables=RequestContext(request,{'thread':threadobj,'user':request.user,'forum':forumobj,'groupid':group_id,'group_id':group_id})
return render_to_response("ndf/refreshtwist.html",variables)
else:
templ=get_template('ndf/refreshthread.html')
html = templ.render(Context({'forum':forumobj,'user':request.user,'groupid':group_id,'group_id':group_id}))
return HttpResponse(html)
except Exception as e:
return HttpResponse(""+str(e))
return HttpResponse("success")
@get_execution_time
def get_profile_pic(username):
auth = node_collection.one({'_type': 'Author', 'name': unicode(username) })
prof_pic = node_collection.one({'_type': u'RelationType', 'name': u'has_profile_pic'})
dbref_profile_pic = prof_pic.get_dbref()
collection_tr = db[Triple.collection_name]
prof_pic_rel = collection_tr.Triple.find({'_type': 'GRelation', 'subject': ObjectId(auth._id), 'relation_type': dbref_profile_pic })
# prof_pic_rel will get the cursor object of relation of user with its profile picture
if prof_pic_rel.count() :
index = prof_pic_rel[prof_pic_rel.count() - 1].right_subject
img_obj = node_collection.one({'_type': 'File', '_id': ObjectId(index) })
else:
img_obj = ""
return img_obj
@login_required
@check_delete
@get_execution_time
def delete_forum(request,group_id,node_id,relns=None):
""" Changing status of forum to HIDDEN
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False :
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else :
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth :
group_id = str(auth._id)
else :
pass
op = node_collection.collection.update({'_id': ObjectId(node_id)}, {'$set': {'status': u"HIDDEN"}})
node=node_collection.one({'_id':ObjectId(node_id)})
#send notifications to all group members
colg=node_collection.one({'_id':ObjectId(group_id)})
for each in colg.author_set:
if each != colg.created_by:
bx=get_userobject(each)
if bx:
activity=request.user.username+" -deleted forum "
msg=activity+"-"+node.name+"- in the group '"+ colg.name
# no_check=forum_notification_status(group_id,auth._id)
# if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
activity=request.user.username+" -deleted forum "
bx=get_userobject(colg.created_by)
if bx:
msg=activity+"-"+node.name+"- in the group '"+colg.name+"' created by you"
# no_check=forum_notification_status(group_id,auth._id)
# if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
return HttpResponseRedirect(reverse('forum', kwargs={'group_id': group_id}))
@login_required
@get_execution_time
def delete_thread(request,group_id,forum_id,node_id):
""" Changing status of thread to HIDDEN
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(node_id) :
thread=node_collection.one({'_id':ObjectId(node_id)})
else:
return
forum = node_collection.one({'_id': ObjectId(forum_id)})
if ins_objectid.is_valid(group_id) is False :
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else :
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth :
group_id = str(auth._id)
else :
pass
op = node_collection.collection.update({'_id': ObjectId(node_id)}, {'$set': {'status': u"HIDDEN"}})
node=node_collection.one({'_id':ObjectId(node_id)})
forum_threads = []
exstng_reply = node_collection.find({'$and':[{'_type':'GSystem'},{'prior_node':ObjectId(forum._id)}],'status':{'$nin':['HIDDEN']}})
exstng_reply.sort('created_at')
forum_node=node_collection.one({'_id':ObjectId(forum_id)})
for each in exstng_reply:
forum_threads.append(each.name)
#send notifications to all group members
colg=node_collection.one({'_id':ObjectId(group_id)})
for each in colg.author_set:
if each != colg.created_by:
bx=get_userobject(each)
if bx:
activity=request.user.username+" -deleted thread "
prefix=" in the forum "+forum_node.name
link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(forum_node._id)
msg=activity+"-"+node.name+prefix+"- in the group '"+colg.name+"' created by you."+"'\n"+"Please visit "+link+" to see the forum."
# no_check=forum_notification_status(group_id,auth._id)
# if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
activity=request.user.username+" -deleted thread "
prefix=" in the forum "+forum_node.name
bx=get_userobject(colg.created_by)
if bx:
link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(forum_node._id)
msg=activity+"-"+node.name+prefix+"- in the group '"+colg.name+"' created by you."+"'\n"+"Please visit "+link+" to see the forum."
# no_check=forum_notification_status(group_id,auth._id)
# if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
#send notification code ends here
variables = RequestContext(request,{
'forum':forum,
'groupid':group_id,'group_id':group_id,
'forum_created_by':User.objects.get(id=forum.created_by).username
})
return render_to_response("ndf/forumdetails.html",variables)
@login_required
@get_execution_time
def edit_thread(request,group_id,forum_id,thread_id):
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False :
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else :
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth :
group_id = str(auth._id)
else :
pass
forum=node_collection.one({'_id':ObjectId(forum_id)})
thread=node_collection.one({'_id':ObjectId(thread_id)})
exstng_reply = node_collection.find({'$and':[{'_type':'GSystem'},{'prior_node':ObjectId(forum._id)}]})
nodes=[]
exstng_reply.sort('created_at')
for each in exstng_reply:
nodes.append(each.name)
request.session['nodes']=json.dumps(nodes)
colg=node_collection.one({'_id':ObjectId(group_id)})
if request.method == 'POST':
name = unicode(request.POST.get('thread_name',"")) # thread name
thread.name = name
content_org = request.POST.get('content_org',"") # thread content
# print "content=",content_org
if content_org:
thread.content_org = unicode(content_org)
usrname = request.user.username
filename = slugify(name) + "-" + usrname + "-"
thread.content = org2html(content_org, file_prefix=filename)
thread.save()
link="http://"+sitename+"/"+str(colg._id)+"/forum/thread/"+str(thread._id)
for each in colg.author_set:
if each != colg.created_by:
bx=get_userobject(each)
if bx:
msg=request.user.username+" has edited thread- "+thread.name+"- in the forum " + forum.name + " in the group -'" + colg.name+"'\n"+"Please visit "+link+" to see the thread."
activity="Edited thread"
#auth = node_collection.one({'_type': 'Author', 'name': unicode(bx.username) })
#if colg._id and auth:
#no_check=forum_notification_status(colg._id,auth._id)
# else:
# no_check=True
# if no_check:
ret = set_notif_val(request,colg._id,msg,activity,bx)
activity=request.user.username+" edited thread -"
bx=get_userobject(colg.created_by)
prefix="-in the forum -"+forum.name
if bx:
msg=activity+"-"+thread.name+prefix+" in the group '"+colg.name+"' created by you"+"\n"+"Please visit "+link+" to see the thread"
# no_check=forum_notification_status(group_id,auth._id)
# if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
variables = RequestContext(request,{'group_id':group_id,'thread_id': thread._id,'nodes':json.dumps(nodes)})
return HttpResponseRedirect(reverse('thread', kwargs={'group_id':group_id,'thread_id': thread._id }))
else:
return render_to_response("ndf/edit_thread.html",
{ 'group_id':group_id,
'groupid':group_id,
'forum': forum,
'thread':thread,
'forum_created_by':User.objects.get(id=forum.created_by).username
},
RequestContext(request))
@login_required
@get_execution_time
def delete_reply(request,group_id,forum_id,thread_id,node_id):
# ins_objectid = ObjectId()
# if ins_objectid.is_valid(group_id) is False :
# group_ins = node_collection.find_one({'_type': "Group","name": group_id})
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
# if group_ins:
# group_id = str(group_ins._id)
# else :
# auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
# if auth :
# group_id = str(auth._id)
# else :
# pass
group_name, group_id = get_group_name_id(group_id)
activity = ""
op = node_collection.collection.update({'_id': ObjectId(node_id)}, {'$set': {'status': u"HIDDEN"}})
replyobj=node_collection.one({'_id':ObjectId(node_id)})
forumobj=node_collection.one({"_id": ObjectId(forum_id)})
threadobj=node_collection.one({"_id": ObjectId(thread_id)})
# notifications to all group members
colg=node_collection.one({'_id':ObjectId(group_id)})
link="http://"+sitename+"/"+str(colg._id)+"/forum/thread/"+str(threadobj._id)
for each in colg.author_set:
if each != colg.created_by:
bx=get_userobject(each)
if bx:
msg=request.user.username+" has deleted reply- "+replyobj.content_org+"- in the thread " + threadobj.name + " in the group -'" + colg.name+"'\n"+"Please visit "+link+" to see the thread."
activity="Deleted reply"
#auth = node_collection.one({'_type': 'Author', 'name': unicode(bx.username) })
#if colg._id and auth:
#no_check=forum_notification_status(colg._id,auth._id)
# else:
# no_check=True
# if no_check:
ret = set_notif_val(request,colg._id,msg,activity,bx)
prefix="-in the forum -"+forumobj.name
msg=request.user.username+" has deleted reply- "+replyobj.content_org+"- in the thread " + threadobj.name +prefix+ " in the group -'" + colg.name+"' created by you"+"\n Please visit "+link+" to see the thread."
bx=get_userobject(colg.created_by)
if bx:
# no_check=forum_notification_status(group_id,auth._id)
# if no_check:
ret = set_notif_val(request,group_id,msg,activity,bx)
variables=RequestContext(request,{'thread':threadobj,'user':request.user,'forum':forumobj,'groupid':group_id,'group_id':group_id})
return HttpResponseRedirect(reverse('thread', kwargs={'group_id':group_id,'thread_id': threadobj._id }))
# return render_to_response("ndf/replytwistrep.html",variables)
| agpl-3.0 |
kevin-coder/tensorflow-fork | tensorflow/python/keras/layers/normalization_test.py | 1 | 22900 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for normalization layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.layers import normalization
from tensorflow.python.keras.layers import normalization_v2
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class BatchNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm(self):
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'momentum': 0.9,
'epsilon': 0.1,
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
'moving_mean_initializer': 'zeros',
'moving_variance_initializer': 'ones'
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
@tf_test_util.run_in_graph_and_eager_modes
def test_batchnorm_weights(self):
layer = keras.layers.BatchNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 2)
layer = keras.layers.BatchNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 4)
@tf_test_util.run_in_graph_and_eager_modes
def test_batchnorm_regularization(self):
layer = keras.layers.BatchNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.BatchNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet(self):
if test.is_gpu_available(cuda_only=True):
with self.session(use_gpu=True):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=1, input_shape=(3, 4, 4), momentum=0.8)
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1))
np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=-1, input_shape=(4, 4, 3), momentum=0.8)
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_correctness(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float32')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_batchnorm_mixed_precision(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float16')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float16')
@tf_test_util.run_in_graph_and_eager_modes
def test_batchnorm_policy(self):
norm = keras.layers.BatchNormalization(
axis=-1,
input_shape=(4, 4, 3),
momentum=0.8,
dtype=policy.Policy('infer_float32_vars'))
x = np.random.normal(size=(10, 4, 4, 3)).astype('float16')
y = norm(x)
self.assertEqual(y.dtype, 'float16')
self.assertEqual(norm.beta.dtype.base_dtype, 'float32')
self.assertEqual(norm.gamma.dtype.base_dtype, 'float32')
class BatchNormalizationV1Test(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_v1_fused_attribute(self):
norm = normalization.BatchNormalization()
inp = keras.layers.Input((4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(2, 2, 2))
norm(inp)
self.assertEqual(norm.fused, False)
class BatchNormalizationV2Test(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm_v2(self):
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': True},
input_shape=(3, 3, 3, 3))
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': None},
input_shape=(3, 3, 3))
@tf_test_util.run_in_graph_and_eager_modes
def test_v2_fused_attribute(self):
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=True, axis=[3])
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
with self.assertRaisesRegexp(ValueError, 'fused.*renorm'):
normalization_v2.BatchNormalization(fused=True, renorm=True)
with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=2)
with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=[1, 3])
with self.assertRaisesRegexp(ValueError, 'fused.*virtual_batch_size'):
normalization_v2.BatchNormalization(fused=True, virtual_batch_size=2)
with self.assertRaisesRegexp(ValueError, 'fused.*adjustment'):
normalization_v2.BatchNormalization(fused=True,
adjustment=lambda _: (1, 0))
norm = normalization_v2.BatchNormalization(fused=True)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4))
with self.assertRaisesRegexp(ValueError, '4D input tensors'):
norm(inp)
def _run_batchnorm_correctness_test(layer, dtype='float32', fused=False):
model = keras.models.Sequential()
model.add(keras.Input(shape=(2, 2, 2), dtype=dtype))
norm = layer(momentum=0.8, fused=fused)
model.add(norm)
if dtype == 'float16':
# Keras models require float32 losses.
model.add(keras.layers.Lambda(lambda x: keras.backend.cast(x, 'float32')))
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1e-1)
@parameterized.parameters(
[normalization.BatchNormalization, normalization_v2.BatchNormalization])
class NormalizationLayersGraphModeOnlyTest(
test.TestCase, parameterized.TestCase):
def test_shared_batchnorm(self, layer):
"""Test that a BN layer can be shared across different data streams."""
with self.cached_session():
# Test single layer reuse
bn = layer()
x1 = keras.layers.Input(shape=(10,))
_ = bn(x1)
x2 = keras.layers.Input(shape=(10,))
y2 = bn(x2)
x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10))
model = keras.models.Model(x2, y2)
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
model.train_on_batch(x, x)
self.assertEqual(len(bn.updates), 4)
self.assertEqual(len(model.updates), 2)
self.assertEqual(len(model.get_updates_for(x2)), 2)
# Test model-level reuse
x3 = keras.layers.Input(shape=(10,))
y3 = model(x3)
new_model = keras.models.Model(x3, y3, name='new_model')
self.assertEqual(len(new_model.updates), 2)
self.assertEqual(len(model.updates), 4)
self.assertEqual(len(new_model.get_updates_for(x3)), 2)
new_model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
new_model.train_on_batch(x, x)
def test_that_trainable_disables_updates(self, layer):
with self.cached_session():
val_a = np.random.random((10, 4))
val_out = np.random.random((10, 4))
a = keras.layers.Input(shape=(4,))
layer = layer(input_shape=(4,))
b = layer(a)
model = keras.models.Model(a, b)
model.trainable = False
assert not model.updates
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
assert not model.updates
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
model.trainable = True
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
assert model.updates
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
assert np.abs(np.sum(x1 - x2)) > 1e-5
layer.trainable = False
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
assert not model.updates
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
@tf_test_util.run_deprecated_v1
def test_batchnorm_trainable(self, layer):
"""Tests that batchnorm layer is trainable when learning phase is enabled.
Computes mean and std for current inputs then
applies batch normalization using them.
Args:
layer: Either V1 or V2 of BatchNormalization layer.
"""
# TODO(fchollet): enable in all execution modes when issue with
# learning phase setting is resolved.
with self.cached_session():
bn_mean = 0.5
bn_std = 10.
val_a = np.expand_dims(np.arange(10.), axis=1)
def get_model(bn_mean, bn_std):
inp = keras.layers.Input(shape=(1,))
x = layer()(inp)
model1 = keras.models.Model(inp, x)
model1.set_weights([
np.array([1.]),
np.array([0.]),
np.array([bn_mean]),
np.array([bn_std**2])
])
return model1
# Simulates training-mode with trainable layer.
# Should use mini-batch statistics.
with keras.backend.learning_phase_scope(1):
model = get_model(bn_mean, bn_std)
model.compile(loss='mse', optimizer='rmsprop')
out = model.predict(val_a)
self.assertAllClose(
(val_a - np.mean(val_a)) / np.std(val_a), out, atol=1e-3)
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
norm = layer(input_shape=(2, 2, 2))
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1e-1)
class LayerNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_layernorm(self):
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
@tf_test_util.run_in_graph_and_eager_modes
def test_layernorm_weights(self):
layer = keras.layers.LayerNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 0)
layer = keras.layers.LayerNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 2)
@tf_test_util.run_in_graph_and_eager_modes
def test_layernorm_regularization(self):
layer = keras.layers.LayerNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.LayerNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_layernorm_convnet(self):
if test.is_gpu_available(cuda_only=True):
with self.session(use_gpu=True):
model = keras.models.Sequential()
norm = keras.layers.LayerNormalization(
input_shape=(3, 4, 4), params_axis=1)
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1))
np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_layernorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.LayerNormalization(input_shape=(4, 4, 3))
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_layernorm_correctness(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_layernorm_mixed_precision(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float16')
def doOutputTest(self,
input_shape,
tol=1e-5,
norm_axis=None,
params_axis=-1,
dtype=None):
ndim = len(input_shape)
if norm_axis is None:
moments_axis = range(1, ndim)
elif isinstance(norm_axis, int):
if norm_axis < 0:
moments_axis = [norm_axis + ndim]
else:
moments_axis = [norm_axis]
else:
moments_axis = []
for dim in norm_axis:
if dim < 0:
dim = dim + ndim
moments_axis.append(dim)
moments_axis = tuple(moments_axis)
expected_shape = []
for i in range(ndim):
if i not in moments_axis:
expected_shape.append(input_shape[i])
expected_mean = np.zeros(expected_shape)
expected_var = np.ones(expected_shape)
for mu in [0.0, 1e2]:
for sigma in [1.0, 0.1]:
inputs = np.random.randn(*input_shape) * sigma + mu
inputs_t = constant_op.constant(inputs, shape=input_shape)
layer = normalization.LayerNormalization(
norm_axis=norm_axis, params_axis=params_axis, dtype=dtype)
outputs = layer(inputs_t)
beta = layer.beta
gamma = layer.gamma
for weight in layer.weights:
self.evaluate(weight.initializer)
outputs = self.evaluate(outputs)
beta = self.evaluate(beta)
gamma = self.evaluate(gamma)
# The mean and variance of the output should be close to 0 and 1
# respectively.
# Make sure that there are no NaNs
self.assertFalse(np.isnan(outputs).any())
mean = np.mean(outputs, axis=moments_axis)
var = np.var(outputs, axis=moments_axis)
# Layer-norm implemented in numpy
eps = 1e-12
expected_out = (
(gamma * (inputs - np.mean(
inputs, axis=moments_axis, keepdims=True)) /
np.sqrt(eps + np.var(
inputs, axis=moments_axis, keepdims=True))) + beta)
self.assertAllClose(expected_mean, mean, atol=tol, rtol=tol)
self.assertAllClose(expected_var, var, atol=tol)
# The full computation gets a bigger tolerance
self.assertAllClose(expected_out, outputs, atol=5 * tol)
@tf_test_util.run_in_graph_and_eager_modes
def testOutput2DInput(self):
self.doOutputTest((10, 300))
self.doOutputTest((10, 300), norm_axis=[0])
self.doOutputTest((10, 300), params_axis=[0, 1])
@tf_test_util.run_in_graph_and_eager_modes
def testOutput2DInputDegenerateNormAxis(self):
with self.assertRaisesRegexp(ValueError, r'Invalid axis: 2'):
self.doOutputTest((10, 300), norm_axis=2)
@tf_test_util.run_in_graph_and_eager_modes
def testOutput4DInput(self):
self.doOutputTest((100, 10, 10, 3))
@tf_test_util.run_in_graph_and_eager_modes
def testOutput4DInputNormOnInnermostAxis(self):
# Equivalent tests
shape = (100, 10, 10, 3)
self.doOutputTest(
shape, norm_axis=list(range(3, len(shape))), tol=1e-4, dtype='float64')
self.doOutputTest(shape, norm_axis=-1, tol=1e-4, dtype='float64')
@tf_test_util.run_in_graph_and_eager_modes
def testOutputSmallInput(self):
self.doOutputTest((10, 10, 10, 30))
@tf_test_util.run_in_graph_and_eager_modes
def testOutputSmallInputNormOnInnermostAxis(self):
self.doOutputTest((10, 10, 10, 30), norm_axis=3)
@tf_test_util.run_in_graph_and_eager_modes
def testOutputSmallInputNormOnMixedAxes(self):
self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3])
self.doOutputTest((10, 10, 10, 30), params_axis=[-2, -1])
self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3],
params_axis=[-3, -2, -1])
@tf_test_util.run_in_graph_and_eager_modes
def testOutputBigInput(self):
self.doOutputTest((1, 100, 100, 1))
self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2])
self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2],
params_axis=[-2, -1])
if __name__ == '__main__':
test.main()
| apache-2.0 |
meisamhe/GPLshared | Programming/MPI — AMath 483 583, Spring 2013 1.0 documentation_files/s2.py | 1 | 1744 | import time
import threading
# @include
class SpellCheckService:
w_last = closest_to_last_word = None
lock = threading.Lock()
@staticmethod
def service(req, resp):
w = req.extract_word_to_check_from_request()
result = None
with SpellCheckService.lock:
if w == SpellCheckService.w_last:
result = SpellCheckService.closest_to_last_word.copy()
if result is None:
result = closest_in_dictionary(w)
with SpellCheckService.lock:
SpellCheckService.w_last = w
SpellCheckService.closest_to_last_word = result
resp.encode_into_response(result)
# @exclude
class ServiceRequest:
def __init__(self, s):
self.request = s
def extract_word_to_check_from_request(self):
return self.request
class ServiceResponse:
response = None
def encode_into_response(self, s):
self.response = s
def closest_in_dictionary(w):
time.sleep(0.2)
return [w + '_result']
class ServiceThread(threading.Thread):
def __init__(self, data):
super().__init__()
self.data = data
def run(self):
start_time = time.time()
req = ServiceRequest(self.data)
resp = ServiceResponse()
SpellCheckService.service(req, resp)
print(self.data, '->', resp.response, '(%.3f sec)' %
(time.time() - start_time))
def main():
i = 0
while True:
ServiceThread('req:%d' % (i + 1)).start()
if i > 0:
# while req:i+1 is computed we could return req:i from the cache
ServiceThread('req:%d' % i).start()
time.sleep(0.5)
i += 1
if __name__ == '__main__':
main()
| gpl-3.0 |
2mny/mylar | lib/simplejson/tests/test_fail.py | 136 | 3555 | from unittest import TestCase
import simplejson as json
# Fri Dec 30 18:57:26 2005
JSONDOCS = [
# http://json.org/JSON_checker/test/fail1.json
'"A JSON payload should be an object or array, not a string."',
# http://json.org/JSON_checker/test/fail2.json
'["Unclosed array"',
# http://json.org/JSON_checker/test/fail3.json
'{unquoted_key: "keys must be quoted}',
# http://json.org/JSON_checker/test/fail4.json
'["extra comma",]',
# http://json.org/JSON_checker/test/fail5.json
'["double extra comma",,]',
# http://json.org/JSON_checker/test/fail6.json
'[ , "<-- missing value"]',
# http://json.org/JSON_checker/test/fail7.json
'["Comma after the close"],',
# http://json.org/JSON_checker/test/fail8.json
'["Extra close"]]',
# http://json.org/JSON_checker/test/fail9.json
'{"Extra comma": true,}',
# http://json.org/JSON_checker/test/fail10.json
'{"Extra value after close": true} "misplaced quoted value"',
# http://json.org/JSON_checker/test/fail11.json
'{"Illegal expression": 1 + 2}',
# http://json.org/JSON_checker/test/fail12.json
'{"Illegal invocation": alert()}',
# http://json.org/JSON_checker/test/fail13.json
'{"Numbers cannot have leading zeroes": 013}',
# http://json.org/JSON_checker/test/fail14.json
'{"Numbers cannot be hex": 0x14}',
# http://json.org/JSON_checker/test/fail15.json
'["Illegal backslash escape: \\x15"]',
# http://json.org/JSON_checker/test/fail16.json
'["Illegal backslash escape: \\\'"]',
# http://json.org/JSON_checker/test/fail17.json
'["Illegal backslash escape: \\017"]',
# http://json.org/JSON_checker/test/fail18.json
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
# http://json.org/JSON_checker/test/fail19.json
'{"Missing colon" null}',
# http://json.org/JSON_checker/test/fail20.json
'{"Double colon":: null}',
# http://json.org/JSON_checker/test/fail21.json
'{"Comma instead of colon", null}',
# http://json.org/JSON_checker/test/fail22.json
'["Colon instead of comma": false]',
# http://json.org/JSON_checker/test/fail23.json
'["Bad value", truth]',
# http://json.org/JSON_checker/test/fail24.json
"['single quote']",
# http://code.google.com/p/simplejson/issues/detail?id=3
u'["A\u001FZ control characters in string"]',
]
SKIPS = {
1: "why not have a string payload?",
18: "spec doesn't specify any nesting limitations",
}
class TestFail(TestCase):
def test_failures(self):
for idx, doc in enumerate(JSONDOCS):
idx = idx + 1
if idx in SKIPS:
json.loads(doc)
continue
try:
json.loads(doc)
except json.JSONDecodeError:
pass
else:
#self.fail("Expected failure for fail{0}.json: {1!r}".format(idx, doc))
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
def test_array_decoder_issue46(self):
# http://code.google.com/p/simplejson/issues/detail?id=46
for doc in [u'[,]', '[,]']:
try:
json.loads(doc)
except json.JSONDecodeError, e:
self.assertEquals(e.pos, 1)
self.assertEquals(e.lineno, 1)
self.assertEquals(e.colno, 1)
except Exception, e:
self.fail("Unexpected exception raised %r %s" % (e, e))
else:
self.fail("Unexpected success parsing '[,]'") | gpl-3.0 |
Bysmyyr/chromium-crosswalk | tools/telemetry/telemetry/internal/actions/action_runner_unittest.py | 2 | 12090 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import exceptions
from telemetry import decorators
from telemetry.internal.actions import page_action
from telemetry.page import action_runner as action_runner_module
from telemetry.testing import tab_test_case
import mock
from telemetry.timeline import model
from telemetry.timeline import tracing_category_filter
from telemetry.timeline import tracing_options
from telemetry.web_perf import timeline_interaction_record as tir_module
class ActionRunnerInteractionTest(tab_test_case.TabTestCase):
def GetInteractionRecords(self, trace_data):
timeline_model = model.TimelineModel(trace_data)
renderer_thread = timeline_model.GetRendererThreadFromTabId(self._tab.id)
return [
tir_module.TimelineInteractionRecord.FromAsyncEvent(e)
for e in renderer_thread.async_slices
if tir_module.IsTimelineInteractionRecord(e.name)
]
def VerifyIssuingInteractionRecords(self, **interaction_kwargs):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('interaction_enabled_page.html')
action_runner.Wait(1)
options = tracing_options.TracingOptions()
options.enable_chrome_trace = True
self._browser.platform.tracing_controller.Start(
options, tracing_category_filter.CreateNoOverheadFilter())
with action_runner.CreateInteraction('InteractionName',
**interaction_kwargs):
pass
trace_data = self._browser.platform.tracing_controller.Stop()
records = self.GetInteractionRecords(trace_data)
self.assertEqual(
1, len(records),
'Failed to issue the interaction record on the tracing timeline.'
' Trace data:\n%s' % repr(trace_data._raw_data))
self.assertEqual('InteractionName', records[0].label)
for attribute_name in interaction_kwargs:
self.assertTrue(getattr(records[0], attribute_name))
# Test disabled for android: crbug.com/437057
# Test disabled for linux: crbug.com/513874
@decorators.Disabled('android', 'chromeos', 'linux')
def testIssuingMultipleMeasurementInteractionRecords(self):
self.VerifyIssuingInteractionRecords(repeatable=True)
class ActionRunnerTest(tab_test_case.TabTestCase):
def testExecuteJavaScript(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript('var testing = 42;')
self.assertEqual(42, self._tab.EvaluateJavaScript('testing'))
def testWaitForNavigate(self):
self.Navigate('page_with_link.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ClickElement('#clickme')
action_runner.WaitForNavigate()
self.assertTrue(self._tab.EvaluateJavaScript(
'document.readyState == "interactive" || '
'document.readyState == "complete"'))
self.assertEqual(
self._tab.EvaluateJavaScript('document.location.pathname;'),
'/blank.html')
def testWait(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript(
'window.setTimeout(function() { window.testing = 101; }, 50);')
action_runner.Wait(0.1)
self.assertEqual(101, self._tab.EvaluateJavaScript('window.testing'))
action_runner.ExecuteJavaScript(
'window.setTimeout(function() { window.testing = 102; }, 100);')
action_runner.Wait(0.2)
self.assertEqual(102, self._tab.EvaluateJavaScript('window.testing'))
def testWaitForJavaScriptCondition(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript('window.testing = 219;')
action_runner.WaitForJavaScriptCondition(
'window.testing == 219', timeout_in_seconds=0.1)
action_runner.ExecuteJavaScript(
'window.setTimeout(function() { window.testing = 220; }, 50);')
action_runner.WaitForJavaScriptCondition(
'window.testing == 220', timeout_in_seconds=0.1)
self.assertEqual(220, self._tab.EvaluateJavaScript('window.testing'))
def testWaitForElement(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript(
'(function() {'
' var el = document.createElement("div");'
' el.id = "test1";'
' el.textContent = "foo";'
' document.body.appendChild(el);'
'})()')
action_runner.WaitForElement('#test1', timeout_in_seconds=0.1)
action_runner.WaitForElement(text='foo', timeout_in_seconds=0.1)
action_runner.WaitForElement(
element_function='document.getElementById("test1")')
action_runner.ExecuteJavaScript(
'window.setTimeout(function() {'
' var el = document.createElement("div");'
' el.id = "test2";'
' document.body.appendChild(el);'
'}, 50)')
action_runner.WaitForElement('#test2', timeout_in_seconds=0.1)
action_runner.ExecuteJavaScript(
'window.setTimeout(function() {'
' document.getElementById("test2").textContent = "bar";'
'}, 50)')
action_runner.WaitForElement(text='bar', timeout_in_seconds=0.1)
action_runner.ExecuteJavaScript(
'window.setTimeout(function() {'
' var el = document.createElement("div");'
' el.id = "test3";'
' document.body.appendChild(el);'
'}, 50)')
action_runner.WaitForElement(
element_function='document.getElementById("test3")')
def testWaitForElementWithWrongText(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript(
'(function() {'
' var el = document.createElement("div");'
' el.id = "test1";'
' el.textContent = "foo";'
' document.body.appendChild(el);'
'})()')
action_runner.WaitForElement('#test1', timeout_in_seconds=0.2)
def WaitForElement():
action_runner.WaitForElement(text='oo', timeout_in_seconds=0.2)
self.assertRaises(exceptions.TimeoutException, WaitForElement)
def testClickElement(self):
self.Navigate('page_with_clickables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ExecuteJavaScript('valueSettableByTest = 1;')
action_runner.ClickElement('#test')
self.assertEqual(1, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 2;')
action_runner.ClickElement(text='Click/tap me')
self.assertEqual(2, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 3;')
action_runner.ClickElement(
element_function='document.body.firstElementChild;')
self.assertEqual(3, action_runner.EvaluateJavaScript('valueToTest'))
def WillFail():
action_runner.ClickElement('#notfound')
self.assertRaises(exceptions.EvaluateException, WillFail)
@decorators.Disabled('android', 'debug', # crbug.com/437068
'chromeos') # crbug.com/483212
def testTapElement(self):
self.Navigate('page_with_clickables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ExecuteJavaScript('valueSettableByTest = 1;')
action_runner.TapElement('#test')
self.assertEqual(1, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 2;')
action_runner.TapElement(text='Click/tap me')
self.assertEqual(2, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 3;')
action_runner.TapElement(
element_function='document.body.firstElementChild')
self.assertEqual(3, action_runner.EvaluateJavaScript('valueToTest'))
def WillFail():
action_runner.TapElement('#notfound')
self.assertRaises(exceptions.EvaluateException, WillFail)
@decorators.Disabled('android', # crbug.com/437065.
'chromeos') # crbug.com/483212.
def testScroll(self):
if not page_action.IsGestureSourceTypeSupported(
self._tab, 'touch'):
return
self.Navigate('page_with_swipeables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ScrollElement(
selector='#left-right', direction='right', left_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#left-right").scrollLeft') > 75)
action_runner.ScrollElement(
selector='#top-bottom', direction='down', top_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#top-bottom").scrollTop') > 75)
action_runner.ScrollPage(direction='right', left_start_ratio=0.9,
distance=100)
self.assertTrue(action_runner.EvaluateJavaScript(
'(document.scrollingElement || document.body).scrollLeft') > 75)
@decorators.Disabled('android', # crbug.com/437065.
'chromeos') # crbug.com/483212.
def testSwipe(self):
if not page_action.IsGestureSourceTypeSupported(
self._tab, 'touch'):
return
self.Navigate('page_with_swipeables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.SwipeElement(
selector='#left-right', direction='left', left_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#left-right").scrollLeft') > 75)
action_runner.SwipeElement(
selector='#top-bottom', direction='up', top_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#top-bottom").scrollTop') > 75)
action_runner.SwipePage(direction='left', left_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'(document.scrollingElement || document.body).scrollLeft') > 75)
class InteractionTest(unittest.TestCase):
def setUp(self):
self.mock_action_runner = mock.Mock(action_runner_module.ActionRunner)
def testIssuingInteractionRecordCommand(self):
with action_runner_module.Interaction(
self.mock_action_runner, label='ABC', flags=[]):
pass
expected_calls = [
mock.call.ExecuteJavaScript('console.time("Interaction.ABC");'),
mock.call.ExecuteJavaScript('console.timeEnd("Interaction.ABC");')]
self.assertEqual(expected_calls, self.mock_action_runner.mock_calls)
def testExceptionRaisedInWithInteraction(self):
class FooException(Exception):
pass
# Test that the Foo exception raised in the with block is propagated to the
# caller.
with self.assertRaises(FooException):
with action_runner_module.Interaction(
self.mock_action_runner, label='ABC', flags=[]):
raise FooException()
# Test that the end console.timeEnd(...) isn't called because exception was
# raised.
expected_calls = [
mock.call.ExecuteJavaScript('console.time("Interaction.ABC");')]
self.assertEqual(expected_calls, self.mock_action_runner.mock_calls)
| bsd-3-clause |
mancoast/CPythonPyc_test | cpython/279_test__osx_support.py | 44 | 11831 | """
Test suite for _osx_support: shared OS X support functions.
"""
import os
import platform
import shutil
import stat
import sys
import unittest
import test.test_support
import _osx_support
@unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X")
class Test_OSXSupport(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.prog_name = 'bogus_program_xxxx'
self.temp_path_dir = os.path.abspath(os.getcwd())
self.env = test.test_support.EnvironmentVarGuard()
self.addCleanup(self.env.__exit__)
for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS',
'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC',
'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS'):
if cv in self.env:
self.env.unset(cv)
def add_expected_saved_initial_values(self, config_vars, expected_vars):
# Ensure that the initial values for all modified config vars
# are also saved with modified keys.
expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k,
config_vars[k]) for k in config_vars
if config_vars[k] != expected_vars[k])
def test__find_executable(self):
if self.env['PATH']:
self.env['PATH'] = self.env['PATH'] + ':'
self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
test.test_support.unlink(self.prog_name)
self.assertIsNone(_osx_support._find_executable(self.prog_name))
self.addCleanup(test.test_support.unlink, self.prog_name)
with open(self.prog_name, 'w') as f:
f.write("#!/bin/sh\n/bin/echo OK\n")
os.chmod(self.prog_name, stat.S_IRWXU)
self.assertEqual(self.prog_name,
_osx_support._find_executable(self.prog_name))
def test__read_output(self):
if self.env['PATH']:
self.env['PATH'] = self.env['PATH'] + ':'
self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
test.test_support.unlink(self.prog_name)
self.addCleanup(test.test_support.unlink, self.prog_name)
with open(self.prog_name, 'w') as f:
f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n")
os.chmod(self.prog_name, stat.S_IRWXU)
self.assertEqual('ExpectedOutput',
_osx_support._read_output(self.prog_name))
def test__find_build_tool(self):
out = _osx_support._find_build_tool('cc')
self.assertTrue(os.path.isfile(out),
'cc not found - check xcode-select')
def test__get_system_version(self):
self.assertTrue(platform.mac_ver()[0].startswith(
_osx_support._get_system_version()))
def test__remove_original_values(self):
config_vars = {
'CC': 'gcc-test -pthreads',
}
expected_vars = {
'CC': 'clang -pthreads',
}
cv = 'CC'
newvalue = 'clang -pthreads'
_osx_support._save_modified_value(config_vars, cv, newvalue)
self.assertNotEqual(expected_vars, config_vars)
_osx_support._remove_original_values(config_vars)
self.assertEqual(expected_vars, config_vars)
def test__save_modified_value(self):
config_vars = {
'CC': 'gcc-test -pthreads',
}
expected_vars = {
'CC': 'clang -pthreads',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
cv = 'CC'
newvalue = 'clang -pthreads'
_osx_support._save_modified_value(config_vars, cv, newvalue)
self.assertEqual(expected_vars, config_vars)
def test__save_modified_value_unchanged(self):
config_vars = {
'CC': 'gcc-test -pthreads',
}
expected_vars = config_vars.copy()
cv = 'CC'
newvalue = 'gcc-test -pthreads'
_osx_support._save_modified_value(config_vars, cv, newvalue)
self.assertEqual(expected_vars, config_vars)
def test__supports_universal_builds(self):
import platform
mac_ver_tuple = tuple(int(i) for i in
platform.mac_ver()[0].split('.')[0:2])
self.assertEqual(mac_ver_tuple >= (10, 4),
_osx_support._supports_universal_builds())
def test__find_appropriate_compiler(self):
compilers = (
('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'),
('clang', 'clang version 3.1'),
)
config_vars = {
'CC': 'gcc-test -pthreads',
'CXX': 'cc++-test',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CC': 'clang -pthreads',
'CXX': 'clang++',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'clang -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
for c_name, c_output in compilers:
test.test_support.unlink(c_name)
self.addCleanup(test.test_support.unlink, c_name)
with open(c_name, 'w') as f:
f.write("#!/bin/sh\n/bin/echo " + c_output)
os.chmod(c_name, stat.S_IRWXU)
self.assertEqual(expected_vars,
_osx_support._find_appropriate_compiler(
config_vars))
def test__remove_universal_flags(self):
config_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 ',
'LDFLAGS': ' -g',
'CPPFLAGS': '-I. ',
'BLDSHARED': 'gcc-4.0 -bundle -g',
'LDSHARED': 'gcc-4.0 -bundle -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._remove_universal_flags(
config_vars))
def test__remove_unsupported_archs(self):
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch i386 ',
'LDFLAGS': ' -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
c_name = 'clang'
test.test_support.unlink(c_name)
self.addCleanup(test.test_support.unlink, c_name)
# exit status 255 means no PPC support in this compiler chain
with open(c_name, 'w') as f:
f.write("#!/bin/sh\nexit 255")
os.chmod(c_name, stat.S_IRWXU)
self.assertEqual(expected_vars,
_osx_support._remove_unsupported_archs(
config_vars))
def test__override_all_archs(self):
self.env['ARCHFLAGS'] = '-arch x86_64'
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch x86_64',
'LDFLAGS': ' -g -arch x86_64',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -g -arch x86_64',
'LDSHARED': 'gcc-4.0 -bundle -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._override_all_archs(
config_vars))
def test__check_for_unavailable_sdk(self):
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.1.sdk',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.1.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
' ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. ',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
' -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._check_for_unavailable_sdk(
config_vars))
def test_get_platform_osx(self):
# Note, get_platform_osx is currently tested more extensively
# indirectly by test_sysconfig and test_distutils
config_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.1.sdk',
'MACOSX_DEPLOYMENT_TARGET': '10.6',
}
result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ')
self.assertEqual(('macosx', '10.6', 'fat'), result)
def test_main():
if sys.platform == 'darwin':
test.test_support.run_unittest(Test_OSXSupport)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
foobacca/djangocms-text-ckeditor | djangocms_text_ckeditor/models.py | 2 | 1437 | from django.db import models
from cms.models import CMSPlugin
from django.utils.html import strip_tags
from django.utils.text import truncate_words
from django.utils.translation import ugettext_lazy as _
from djangocms_text_ckeditor.utils import plugin_tags_to_id_list, replace_plugin_tags
from djangocms_text_ckeditor.html import clean_html, extract_images
class Text(CMSPlugin):
"""Abstract Text Plugin Class"""
body = models.TextField(_("body"))
search_fields = ('body',)
def __unicode__(self):
return u"%s" % (truncate_words(strip_tags(self.body), 3)[:30] + "...")
def save(self, *args, **kwargs):
body = self.body
body = extract_images(body, self)
body = clean_html(body, full=False)
self.body = body
super(Text, self).save(*args, **kwargs)
def clean_plugins(self):
ids = plugin_tags_to_id_list(self.body)
plugins = CMSPlugin.objects.filter(parent=self)
for plugin in plugins:
if not plugin.pk in ids:
plugin.delete() #delete plugins that are not referenced in the text anymore
def post_copy(self, old_instance, ziplist):
"""
Fix references to plugins
"""
replace_ids = {}
for new, old in ziplist:
replace_ids[old.pk] = new.pk
self.body = replace_plugin_tags(old_instance.get_plugin_instance()[0].body, replace_ids)
self.save()
| bsd-3-clause |
gypsymauro/gestione-cantiere | build/lib.linux-x86_64-2.7/cantiere/admin.py | 1 | 1533 | from django.contrib import admin
# Register your models here.
from .models import Squadra
from .models import StatoSegnalazione
from .models import Segnalazione
from .models import StatoIntervento
from .models import Intervento
from .models import Risorsa
from .models import InterventoRisorsa
from .models import Costo
from .models import CentroCosto
from .models import Allegato
class InterventoRisorsaInline(admin.TabularInline):
model = InterventoRisorsa
exclude = ['created','created_by','modified','modified_by','deleted','note']
class RisorsaAdmin(admin.ModelAdmin):
inlines = (InterventoRisorsaInline,)
exclude = ['created','created_by','modified','modified_by','deleted']
class InterventoAdmin(admin.ModelAdmin):
inlines = (InterventoRisorsaInline,)
list_display = ['oggetto','data_inizio','stato','stampa_intervento']
list_editable = ['stato']
ordering = ['created']
exclude = ['created','created_by','modified','modified_by','deleted']
list_filter = ('stato','data_inizio','centro_costo','responsabile')
save_on_top = True
search_fields = ('oggetto','data_inizio')
admin.site.register(Squadra)
admin.site.register(StatoSegnalazione)
admin.site.register(Segnalazione)
admin.site.register(StatoIntervento)
admin.site.register(Intervento,InterventoAdmin)
admin.site.register(Risorsa,RisorsaAdmin)
admin.site.register(Costo)
admin.site.register(CentroCosto)
admin.site.register(Allegato)
#admin.site.register(InterventoMezzo)
#admin.site.register(InterventoPersona)
| gpl-2.0 |
chinmaygarde/mojo | testing/legion/examples/hello_world/controller_test.py | 29 | 2268 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple host test module.
This module runs on the host machine and is responsible for creating 2
task machines, waiting for them, and running RPC calls on them.
"""
import argparse
import logging
import os
import sys
import time
# Map the testing directory so we can import legion.legion_test.
TESTING_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..', '..', '..', '..', 'testing')
sys.path.append(TESTING_DIR)
from legion import legion_test_case
class ExampleTestController(legion_test_case.TestCase):
"""A simple example controller for a test."""
@classmethod
def CreateTestTask(cls):
"""Create a new task."""
parser = argparse.ArgumentParser()
parser.add_argument('--task-hash')
parser.add_argument('--os', default='Ubuntu-14.04')
args, _ = parser.parse_known_args()
task = cls.CreateTask(
isolated_hash=args.task_hash,
dimensions={'os': args.os},
idle_timeout_secs=90,
connection_timeout_secs=90,
verbosity=logging.DEBUG)
task.Create()
return task
@classmethod
def setUpClass(cls):
"""Creates the task machines and waits until they connect."""
cls.task1 = cls.CreateTestTask()
cls.task2 = cls.CreateTestTask()
cls.task1.WaitForConnection()
cls.task2.WaitForConnection()
def testCallEcho(self):
"""Tests rpc.Echo on a task."""
logging.info('Calling Echo on %s', self.task2.name)
self.assertEqual(self.task2.rpc.Echo('foo'), 'echo foo')
def testLaunchTaskBinary(self):
"""Call task_test.py 'name' on the tasks."""
self.VerifyTaskBinaryLaunched(self.task1)
self.VerifyTaskBinaryLaunched(self.task2)
def VerifyTaskBinaryLaunched(self, task):
logging.info(
'Calling Process to run "./task_test.py %s"', task.name)
proc = task.Process(['./task_test.py', task.name])
proc.Wait()
self.assertEqual(proc.GetReturncode(), 0)
self.assertIn(task.name, proc.ReadStdout())
self.assertEquals(proc.ReadStderr(), '')
proc.Delete()
if __name__ == '__main__':
legion_test_case.main()
| bsd-3-clause |
willthames/ansible | lib/ansible/modules/network/nxos/nxos_vxlan_vtep_vni.py | 22 | 12063 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community',
}
DOCUMENTATION = '''
---
module: nxos_vxlan_vtep_vni
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Creates a Virtual Network Identifier member (VNI)
description:
- Creates a Virtual Network Identifier member (VNI) for an NVE
overlay interface.
author: Gabriele Gerbino (@GGabriele)
notes:
- default, where supported, restores params default value.
options:
interface:
description:
- Interface name for the VXLAN Network Virtualization Endpoint.
required: true
vni:
description:
- ID of the Virtual Network Identifier.
required: true
assoc_vrf:
description:
- This attribute is used to identify and separate processing VNIs
that are associated with a VRF and used for routing. The VRF
and VNI specified with this command must match the configuration
of the VNI under the VRF.
required: false
choices: ['true','false']
default: null
ingress_replication:
description:
- Specifies mechanism for host reachability advertisement.
required: false
choices: ['bgp','static']
default: null
multicast_group:
description:
- The multicast group (range) of the VNI. Valid values are
string and keyword 'default'.
required: false
default: null
peer_list:
description:
- Set the ingress-replication static peer list. Valid values
are an array, a space-separated string of ip addresses,
or the keyword 'default'.
required: false
default: null
suppress_arp:
description:
- Suppress arp under layer 2 VNI.
required: false
choices: ['true','false']
default: null
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_vxlan_vtep_vni:
interface: nve1
vni: 6000
ingress_replication: default
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface nve1", "member vni 6000"]
'''
import re
from ansible.module_utils.nxos import get_config, load_config
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
BOOL_PARAMS = [
'assoc-vrf',
'suppress_arp',
]
PARAM_TO_COMMAND_KEYMAP = {
'assoc_vrf': 'associate-vrf',
'interface': 'interface',
'vni': 'member vni',
'ingress_replication': 'ingress-replication protocol',
'multicast_group': 'mcast-group',
'peer_list': 'peer-ip',
'suppress_arp': 'suppress-arp'
}
def get_value(arg, config, module):
command = PARAM_TO_COMMAND_KEYMAP[arg]
command_val_re = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(command), re.M)
if arg in BOOL_PARAMS:
command_re = re.compile(r'\s+{0}\s*$'.format(command), re.M)
value = False
if command_re.search(config):
value = True
elif arg == 'peer_list':
has_command_val = command_val_re.findall(config, re.M)
value = []
if has_command_val:
value = has_command_val
else:
value = ''
has_command_val = command_val_re.search(config, re.M)
if has_command_val:
value = has_command_val.group('value')
return value
def check_interface(module, netcfg):
config = str(netcfg)
has_interface = re.search(r'(?:interface nve)(?P<value>.*)$', config, re.M)
value = ''
if has_interface:
value = 'nve{0}'.format(has_interface.group('value'))
return value
def get_existing(module, args):
existing = {}
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
interface_exist = check_interface(module, netcfg)
if interface_exist:
parents = ['interface {0}'.format(interface_exist)]
temp_config = netcfg.get_section(parents)
if 'member vni {0} associate-vrf'.format(module.params['vni']) in temp_config:
parents.append('member vni {0} associate-vrf'.format(
module.params['vni']))
config = netcfg.get_section(parents)
elif "member vni {0}".format(module.params['vni']) in temp_config:
parents.append('member vni {0}'.format(module.params['vni']))
config = netcfg.get_section(parents)
else:
config = {}
if config:
for arg in args:
if arg not in ['interface', 'vni']:
existing[arg] = get_value(arg, config, module)
existing['interface'] = interface_exist
existing['vni'] = module.params['vni']
return existing, interface_exist
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = value
return new_dict
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if key == 'associate-vrf':
command = 'member vni {0} {1}'.format(module.params['vni'], key)
if not value:
command = 'no {0}'.format(command)
commands.append(command)
elif key == 'peer-ip' and value != 'default':
for peer in value:
commands.append('{0} {1}'.format(key, peer))
elif key == 'mcast-group' and value != existing_commands.get(key):
commands.append('no {0}'.format(key))
commands.append('{0} {1}'.format(key, value))
elif value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default':
if existing_commands.get(key):
existing_value = existing_commands.get(key)
if key == 'peer-ip':
for peer in existing_value:
commands.append('no {0} {1}'.format(key, peer))
else:
commands.append('no {0} {1}'.format(key, existing_value))
else:
if key.replace(' ', '_').replace('-', '_') in BOOL_PARAMS:
commands.append('no {0}'.format(key.lower()))
else:
command = '{0} {1}'.format(key, value.lower())
commands.append(command)
if commands:
vni_command = 'member vni {0}'.format(module.params['vni'])
ingress_replication_command = 'ingress-replication protocol static'
interface_command = 'interface {0}'.format(module.params['interface'])
if ingress_replication_command in commands:
static_level_cmds = [cmd for cmd in commands if 'peer' in cmd]
parents = [interface_command, vni_command, ingress_replication_command]
candidate.add(static_level_cmds, parents=parents)
commands = [cmd for cmd in commands if 'peer' not in cmd]
if vni_command in commands:
parents = [interface_command]
commands.remove(vni_command)
if module.params['assoc_vrf'] is None:
parents.append(vni_command)
candidate.add(commands, parents=parents)
def state_absent(module, existing, proposed, candidate):
if existing['assoc_vrf']:
commands = ['no member vni {0} associate-vrf'.format(
module.params['vni'])]
else:
commands = ['no member vni {0}'.format(module.params['vni'])]
parents = ['interface {0}'.format(module.params['interface'])]
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
interface=dict(required=True, type='str'),
vni=dict(required=True, type='str'),
assoc_vrf=dict(required=False, type='bool'),
multicast_group=dict(required=False, type='str'),
peer_list=dict(required=False, type='list'),
suppress_arp=dict(required=False, type='bool'),
ingress_replication=dict(required=False, type='str', choices=['bgp', 'static', 'default']),
state=dict(choices=['present', 'absent'], default='present', required=False),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False, 'commands': [], 'warnings': warnings}
if module.params['assoc_vrf']:
mutually_exclusive_params = ['multicast_group',
'suppress_arp',
'ingress_replication']
for param in mutually_exclusive_params:
if module.params[param]:
module.fail_json(msg='assoc_vrf cannot be used with '
'{0} param'.format(param))
if module.params['peer_list']:
if module.params['ingress_replication'] != 'static':
module.fail_json(msg='ingress_replication=static is required '
'when using peer_list param')
else:
peer_list = module.params['peer_list']
if peer_list[0] == 'default':
module.params['peer_list'] = 'default'
else:
stripped_peer_list = map(str.strip, peer_list)
module.params['peer_list'] = stripped_peer_list
state = module.params['state']
args = PARAM_TO_COMMAND_KEYMAP.keys()
existing, interface_exist = get_existing(module, args)
if state == 'present':
if not interface_exist:
module.fail_json(msg="The proposed NVE interface does not exist. Use nxos_interface to create it first.")
elif interface_exist != module.params['interface']:
module.fail_json(msg='Only 1 NVE interface is allowed on the switch.')
elif state == 'absent':
if interface_exist != module.params['interface']:
module.exit_json(**result)
elif existing and existing['vni'] != module.params['vni']:
module.fail_json(
msg="ERROR: VNI delete failed: Could not find vni node for {0}".format(module.params['vni']),
existing_vni=existing['vni']
)
proposed_args = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key != 'interface' and existing.get(key) != value:
proposed[key] = value
candidate = CustomNetworkConfig(indent=3)
if state == 'present':
state_present(module, existing, proposed, candidate)
elif state == 'absent':
state_absent(module, existing, proposed, candidate)
if candidate:
candidate = candidate.items_text()
result['changed'] = True
result['commands'] = candidate
if not module.check_mode:
load_config(module, candidate)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
dongjoon-hyun/tensorflow | tensorflow/python/util/future_api.py | 199 | 1367 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ensure compatibility with future tensorflow versions.
This ensures that your code will be minimally impacted by future tensorflow
API changes. Import the module to prevent accidental usage of stale APIs.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
delattr(tf, 'arg_max')
delattr(tf, 'arg_min')
delattr(tf, 'create_partitioned_variables')
delattr(tf, 'deserialize_many_sparse')
delattr(tf, 'lin_space')
delattr(tf, 'parse_single_sequence_example')
delattr(tf, 'serialize_many_sparse')
delattr(tf, 'serialize_sparse')
delattr(tf, 'sparse_matmul') # Use tf.matmul instead.
| apache-2.0 |
jackalchen/linux | scripts/gdb/linux/symbols.py | 588 | 6302 | #
# gdb helper commands and functions for Linux kernel debugging
#
# load kernel and module symbols
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
import os
import re
from linux import modules
if hasattr(gdb, 'Breakpoint'):
class LoadModuleBreakpoint(gdb.Breakpoint):
def __init__(self, spec, gdb_command):
super(LoadModuleBreakpoint, self).__init__(spec, internal=True)
self.silent = True
self.gdb_command = gdb_command
def stop(self):
module = gdb.parse_and_eval("mod")
module_name = module['name'].string()
cmd = self.gdb_command
# enforce update if object file is not found
cmd.module_files_updated = False
# Disable pagination while reporting symbol (re-)loading.
# The console input is blocked in this context so that we would
# get stuck waiting for the user to acknowledge paged output.
show_pagination = gdb.execute("show pagination", to_string=True)
pagination = show_pagination.endswith("on.\n")
gdb.execute("set pagination off")
if module_name in cmd.loaded_modules:
gdb.write("refreshing all symbols to reload module "
"'{0}'\n".format(module_name))
cmd.load_all_symbols()
else:
cmd.load_module_symbols(module)
# restore pagination state
gdb.execute("set pagination %s" % ("on" if pagination else "off"))
return False
class LxSymbols(gdb.Command):
"""(Re-)load symbols of Linux kernel and currently loaded modules.
The kernel (vmlinux) is taken from the current working directly. Modules (.ko)
are scanned recursively, starting in the same directory. Optionally, the module
search path can be extended by a space separated list of paths passed to the
lx-symbols command."""
module_paths = []
module_files = []
module_files_updated = False
loaded_modules = []
breakpoint = None
def __init__(self):
super(LxSymbols, self).__init__("lx-symbols", gdb.COMMAND_FILES,
gdb.COMPLETE_FILENAME)
def _update_module_files(self):
self.module_files = []
for path in self.module_paths:
gdb.write("scanning for modules in {0}\n".format(path))
for root, dirs, files in os.walk(path):
for name in files:
if name.endswith(".ko"):
self.module_files.append(root + "/" + name)
self.module_files_updated = True
def _get_module_file(self, module_name):
module_pattern = ".*/{0}\.ko$".format(
module_name.replace("_", r"[_\-]"))
for name in self.module_files:
if re.match(module_pattern, name) and os.path.exists(name):
return name
return None
def _section_arguments(self, module):
try:
sect_attrs = module['sect_attrs'].dereference()
except gdb.error:
return ""
attrs = sect_attrs['attrs']
section_name_to_address = {
attrs[n]['name'].string(): attrs[n]['address']
for n in range(int(sect_attrs['nsections']))}
args = []
for section_name in [".data", ".data..read_mostly", ".rodata", ".bss"]:
address = section_name_to_address.get(section_name)
if address:
args.append(" -s {name} {addr}".format(
name=section_name, addr=str(address)))
return "".join(args)
def load_module_symbols(self, module):
module_name = module['name'].string()
module_addr = str(module['module_core']).split()[0]
module_file = self._get_module_file(module_name)
if not module_file and not self.module_files_updated:
self._update_module_files()
module_file = self._get_module_file(module_name)
if module_file:
gdb.write("loading @{addr}: {filename}\n".format(
addr=module_addr, filename=module_file))
cmdline = "add-symbol-file {filename} {addr}{sections}".format(
filename=module_file,
addr=module_addr,
sections=self._section_arguments(module))
gdb.execute(cmdline, to_string=True)
if module_name not in self.loaded_modules:
self.loaded_modules.append(module_name)
else:
gdb.write("no module object found for '{0}'\n".format(module_name))
def load_all_symbols(self):
gdb.write("loading vmlinux\n")
# Dropping symbols will disable all breakpoints. So save their states
# and restore them afterward.
saved_states = []
if hasattr(gdb, 'breakpoints') and not gdb.breakpoints() is None:
for bp in gdb.breakpoints():
saved_states.append({'breakpoint': bp, 'enabled': bp.enabled})
# drop all current symbols and reload vmlinux
gdb.execute("symbol-file", to_string=True)
gdb.execute("symbol-file vmlinux")
self.loaded_modules = []
module_list = modules.module_list()
if not module_list:
gdb.write("no modules found\n")
else:
[self.load_module_symbols(module) for module in module_list]
for saved_state in saved_states:
saved_state['breakpoint'].enabled = saved_state['enabled']
def invoke(self, arg, from_tty):
self.module_paths = arg.split()
self.module_paths.append(os.getcwd())
# enforce update
self.module_files = []
self.module_files_updated = False
self.load_all_symbols()
if hasattr(gdb, 'Breakpoint'):
if self.breakpoint is not None:
self.breakpoint.delete()
self.breakpoint = None
self.breakpoint = LoadModuleBreakpoint(
"kernel/module.c:do_init_module", self)
else:
gdb.write("Note: symbol update on module loading not supported "
"with this gdb version\n")
LxSymbols()
| gpl-2.0 |
jose36/plugin.video.live.ProyectoLuzDigital. | servers/vk.py | 67 | 9258 | # -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para VK Server
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[uploadedto.py] test_video_exists(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url)
if "This video has been removed from public access" in data:
return False,"El archivo ya no esta disponible<br/>en VK (ha sido borrado)"
else:
return True,""
# Returns an array of possible video url's from the page_url
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[vk.py] get_video_url(page_url='%s')" % page_url)
# Lee la página y extrae el ID del vídeo
data = scrapertools.cache_page(page_url.replace("amp;",""))
videourl = ""
regexp =re.compile(r'vkid=([^\&]+)\&')
match = regexp.search(data)
vkid = ""
if match is not None:
vkid = match.group(1)
else:
data2 = data.replace("\\","")
patron = '"vkid":"([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data2)
if len(matches)>0:
vkid = matches[0]
else:
logger.info("no encontro vkid")
logger.info("vkid="+vkid)
# Extrae los parámetros del vídeo y añade las calidades a la lista
patron = "var video_host = '([^']+)'.*?"
patron += "var video_uid = '([^']+)'.*?"
patron += "var video_vtag = '([^']+)'.*?"
patron += "var video_no_flv = ([^;]+);.*?"
patron += "var video_max_hd = '([^']+)'"
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
if len(matches)>0:
#01:44:52 T:2957156352 NOTICE: video_host=http://cs509601.vk.com/, video_uid=149623387, video_vtag=1108941f4c, video_no_flv=1, video_max_hd=1
video_host = matches[0][0]
video_uid = matches[0][1]
video_vtag = matches[0][2]
video_no_flv = matches[0][3]
video_max_hd = matches[0][4]
else:
#{"uid":"97482389","vid":"161509127\",\"oid\":\"97482389\","host":"507214",\"vtag\":\"99bca9d028\",\"ltag\":\"l_26f55018\",\"vkid\":\"161509127\",\"md_title\":\"El Libro de La Selva - 1967 - tetelx - spanish\",\"md_author\":\"Tetelx Tete\",\"hd\":1,\"no_flv\":1,\"hd_def\":-1,\"dbg_on\":0,\"t\":\"\",\"thumb\":\"http:\\\/\\\/cs507214.vkontakte.ru\\\/u97482389\\\/video\\\/l_26f55018.jpg\",\"hash\":\"3a576695e9f0bfe3093eb21239bd322f\",\"hash2\":\"be750b8971933dd6\",\"is_vk\":\"1\",\"is_ext\":\"0\",\"lang_add\":\"Add to My Videos\",\"lang_share\":\"Share\",\"lang_like\":\"Like\",\"lang_volume_on\":\"Unmute\",\"lang_volume_off\":\"Mute\",\"lang_volume\":\"Volume\",\"lang_hdsd\":\"Change Video Quality\",\"lang_fullscreen\":\"Full Screen\",\"lang_window\":\"Minimize\",\"lang_rotate\":\"Rotate\",\"video_play_hd\":\"Watch in HD\",\"video_stop_loading\":\"Stop Download\",\"video_player_version\":\"VK Video Player\",\"video_player_author\":\"Author - Alexey Kharkov\",\"goto_orig_video\":\"Go to Video\",\"video_get_video_code\":\"Copy vdeo code\",\"video_load_error\":\"The video has not uploaded yet or the server is not available\",\"video_get_current_url\":\"Copy frame link\",\"nologo\":1,\"liked\":0,\"add_hash\":\"67cd39a080ad6e0ad7\",\"added\":1,\"use_p2p\":0,\"p2p_group_id\":\"fb2d8cfdcbea4f3c\"}
#01:46:05 T:2955558912 NOTICE: video_host=507214, video_uid=97482389, video_vtag=99bca9d028, video_no_flv=1, video_max_hd=1
data2 = data.replace("\\","")
video_host = scrapertools.get_match(data2,'"host":"([^"]+)"')
video_uid = scrapertools.get_match(data2,'"uid":"([^"]+)"')
video_vtag = scrapertools.get_match(data2,'"vtag":"([^"]+)"')
video_no_flv = scrapertools.get_match(data2,'"no_flv":([0-9]+)')
video_max_hd = scrapertools.get_match(data2,'"hd":([0-9]+)')
if not video_host.startswith("http://"):
video_host = "http://cs"+video_host+".vk.com/"
logger.info("video_host="+video_host+", video_uid="+video_uid+", video_vtag="+video_vtag+", video_no_flv="+video_no_flv+", video_max_hd="+video_max_hd)
video_urls = []
if video_no_flv.strip() == "0" and video_uid != "0":
tipo = "flv"
if "http://" in video_host:
videourl = "%s/u%s/video/%s.%s" % (video_host,video_uid,video_vtag,tipo)
else:
videourl = "http://%s/u%s/video/%s.%s" % (video_host,video_uid,video_vtag,tipo)
# Lo añade a la lista
video_urls.append( ["FLV [vk]",videourl])
elif video_uid== "0" and vkid != "": #http://447.gt3.vkadre.ru/assets/videos/2638f17ddd39-75081019.vk.flv
tipo = "flv"
if "http://" in video_host:
videourl = "%s/assets/videos/%s%s.vk.%s" % (video_host,video_vtag,vkid,tipo)
else:
videourl = "http://%s/assets/videos/%s%s.vk.%s" % (video_host,video_vtag,vkid,tipo)
# Lo añade a la lista
video_urls.append( ["FLV [vk]",videourl])
else: #http://cs12385.vkontakte.ru/u88260894/video/d09802a95b.360.mp4
#Si la calidad elegida en el setting es HD se reproducira a 480 o 720, caso contrario solo 360, este control es por la xbox
if video_max_hd=="0":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
elif video_max_hd=="1":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
elif video_max_hd=="2":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
video_urls.append( ["480p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"480.mp4")])
elif video_max_hd=="3":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
video_urls.append( ["480p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"480.mp4")])
video_urls.append( ["720p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"720.mp4")])
else:
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
for video_url in video_urls:
logger.info("[vk.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
def get_mp4_video_link(match0,match1,match2,tipo):
if match0.endswith("/"):
videourl = "%su%s/videos/%s.%s" % (match0,match1,match2,tipo)
else:
videourl = "%s/u%s/videos/%s.%s" % (match0,match1,match2,tipo)
return videourl
def find_videos(data):
encontrados = set()
devuelve = []
#http://vkontakte.ru/video_ext.php?oid=95855298&id=162902512&hash=4f0d023887f3648e
#http://vk.com/video_ext.php?oid=70712020&id=159787030&hash=88899d94685174af&hd=3"
#http://vk.com/video_ext.php?oid=161288347&id=162474656&hash=3b4e73a2c282f9b4&sd
#http://vk.com/video_ext.php?oid=146263567&id=163818182&hash=2dafe3b87a4da653&sd
#http://vk.com/video_ext.php?oid=146263567&id=163818182&hash=2dafe3b87a4da653
#http://vk.com/video_ext.php?oid=-34450039&id=161977144&hash=0305047ffe3c55a8&hd=3
data = data.replace("&","&")
data = data.replace("&","&")
patronvideos = '(/video_ext.php\?oid=[^&]+&id=[^&]+&hash=[a-z0-9]+)'
logger.info("[vk.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos).findall(data)
for match in matches:
titulo = "[vk]"
url = "http://vk.com"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'vk' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
# http://vk.com/video97482389_161509127?section=all
patronvideos = '(vk\.[a-z]+\/video[0-9]+_[0-9]+)'
logger.info("[vk.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
#print data
for match in matches:
titulo = "[vk]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'vk' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://vk.com/video_ext.php?oid=190230445&id=164616513&hash=ef16fcd83b58b192&hd=1")
return len(video_urls)>0 | gpl-2.0 |
mano3m/CouchPotatoServer | libs/oauthlib/common.py | 112 | 4551 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
oauthlib.common
~~~~~~~~~~~~~~
This module provides data structures and utilities common
to all implementations of OAuth.
"""
import re
import urllib
import urlparse
always_safe = (u'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
u'abcdefghijklmnopqrstuvwxyz'
u'0123456789' u'_.-')
def quote(s, safe=u'/'):
encoded = s.encode("utf-8")
quoted = urllib.quote(encoded, safe)
return quoted.decode("utf-8")
def unquote(s):
encoded = s.encode("utf-8")
unquoted = urllib.unquote(encoded)
return unquoted.decode("utf-8")
def urlencode(params):
utf8_params = encode_params_utf8(params)
urlencoded = urllib.urlencode(utf8_params)
return urlencoded.decode("utf-8")
def encode_params_utf8(params):
"""Ensures that all parameters in a list of 2-element tuples are encoded to
bytestrings using UTF-8
"""
encoded = []
for k, v in params:
encoded.append((
k.encode('utf-8') if isinstance(k, unicode) else k,
v.encode('utf-8') if isinstance(v, unicode) else v))
return encoded
def decode_params_utf8(params):
"""Ensures that all parameters in a list of 2-element tuples are decoded to
unicode using UTF-8.
"""
decoded = []
for k, v in params:
decoded.append((
k.decode('utf-8') if isinstance(k, str) else k,
v.decode('utf-8') if isinstance(v, str) else v))
return decoded
urlencoded = set(always_safe) | set(u'=&;%+~')
def urldecode(query):
"""Decode a query string in x-www-form-urlencoded format into a sequence
of two-element tuples.
Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce
correct formatting of the query string by validation. If validation fails
a ValueError will be raised. urllib.parse_qsl will only raise errors if
any of name-value pairs omits the equals sign.
"""
# Check if query contains invalid characters
if query and not set(query) <= urlencoded:
raise ValueError('Invalid characters in query string.')
# Check for correctly hex encoded values using a regular expression
# All encoded values begin with % followed by two hex characters
# correct = %00, %A0, %0A, %FF
# invalid = %G0, %5H, %PO
invalid_hex = u'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]'
if len(re.findall(invalid_hex, query)):
raise ValueError('Invalid hex encoding in query string.')
query = query.decode('utf-8') if isinstance(query, str) else query
# We want to allow queries such as "c2" whereas urlparse.parse_qsl
# with the strict_parsing flag will not.
params = urlparse.parse_qsl(query, keep_blank_values=True)
# unicode all the things
return decode_params_utf8(params)
def extract_params(raw):
"""Extract parameters and return them as a list of 2-tuples.
Will successfully extract parameters from urlencoded query strings,
dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an
empty list of parameters. Any other input will result in a return
value of None.
"""
if isinstance(raw, basestring):
try:
params = urldecode(raw)
except ValueError:
params = None
elif hasattr(raw, '__iter__'):
try:
dict(raw)
except ValueError:
params = None
except TypeError:
params = None
else:
params = list(raw.items() if isinstance(raw, dict) else raw)
params = decode_params_utf8(params)
else:
params = None
return params
class Request(object):
"""A malleable representation of a signable HTTP request.
Body argument may contain any data, but parameters will only be decoded if
they are one of:
* urlencoded query string
* dict
* list of 2-tuples
Anything else will be treated as raw body data to be passed through
unmolested.
"""
def __init__(self, uri, http_method=u'GET', body=None, headers=None):
self.uri = uri
self.http_method = http_method
self.headers = headers or {}
self.body = body
self.decoded_body = extract_params(body)
self.oauth_params = []
@property
def uri_query(self):
return urlparse.urlparse(self.uri).query
@property
def uri_query_params(self):
return urlparse.parse_qsl(self.uri_query, keep_blank_values=True,
strict_parsing=True)
| gpl-3.0 |
keedio/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/xforms.py | 96 | 1231 | # -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import XFORMSNS
from element import Element
# ODF 1.0 section 11.2
# XForms is designed to be embedded in another XML format.
# Autogenerated
def Model(**args):
return Element(qname = (XFORMSNS,'model'), **args)
def Instance(**args):
return Element(qname = (XFORMSNS,'instance'), **args)
def Bind(**args):
return Element(qname = (XFORMSNS,'bind'), **args)
| apache-2.0 |
381426068/MissionPlanner | Lib/site-packages/numpy/oldnumeric/ufuncs.py | 102 | 1231 | __all__ = ['less', 'cosh', 'arcsinh', 'add', 'ceil', 'arctan2', 'floor_divide',
'fmod', 'hypot', 'logical_and', 'power', 'sinh', 'remainder', 'cos',
'equal', 'arccos', 'less_equal', 'divide', 'bitwise_or',
'bitwise_and', 'logical_xor', 'log', 'subtract', 'invert',
'negative', 'log10', 'arcsin', 'arctanh', 'logical_not',
'not_equal', 'tanh', 'true_divide', 'maximum', 'arccosh',
'logical_or', 'minimum', 'conjugate', 'tan', 'greater',
'bitwise_xor', 'fabs', 'floor', 'sqrt', 'arctan', 'right_shift',
'absolute', 'sin', 'multiply', 'greater_equal', 'left_shift',
'exp', 'divide_safe']
from numpy import less, cosh, arcsinh, add, ceil, arctan2, floor_divide, \
fmod, hypot, logical_and, power, sinh, remainder, cos, \
equal, arccos, less_equal, divide, bitwise_or, bitwise_and, \
logical_xor, log, subtract, invert, negative, log10, arcsin, \
arctanh, logical_not, not_equal, tanh, true_divide, maximum, \
arccosh, logical_or, minimum, conjugate, tan, greater, bitwise_xor, \
fabs, floor, sqrt, arctan, right_shift, absolute, sin, \
multiply, greater_equal, left_shift, exp, divide as divide_safe
| gpl-3.0 |
proxysh/Safejumper-for-Desktop | buildlinux/env32/lib/python2.7/site-packages/Crypto/Hash/MD5.py | 123 | 2863 | # -*- coding: utf-8 -*-
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""MD5 cryptographic hash algorithm.
MD5 is specified in RFC1321_ and produces the 128 bit digest of a message.
>>> from Crypto.Hash import MD5
>>>
>>> h = MD5.new()
>>> h.update(b'Hello')
>>> print h.hexdigest()
MD5 stand for Message Digest version 5, and it was invented by Rivest in 1991.
This algorithm is insecure. Do not use it for new designs.
.. _RFC1321: http://tools.ietf.org/html/rfc1321
"""
_revision__ = "$Id$"
__all__ = ['new', 'digest_size', 'MD5Hash' ]
from Crypto.Util.py3compat import *
from Crypto.Hash.hashalgo import HashAlgo
try:
# The md5 module is deprecated in Python 2.6, so use hashlib when possible.
import hashlib
hashFactory = hashlib.md5
except ImportError:
import md5
hashFactory = md5
class MD5Hash(HashAlgo):
"""Class that implements an MD5 hash
:undocumented: block_size
"""
#: ASN.1 Object identifier (OID)::
#:
#: id-md5 OBJECT IDENTIFIER ::= {
#: iso(1) member-body(2) us(840) rsadsi(113549)
#: digestAlgorithm(2) 5
#: }
#:
#: This value uniquely identifies the MD5 algorithm.
oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05')
digest_size = 16
block_size = 64
def __init__(self, data=None):
HashAlgo.__init__(self, hashFactory, data)
def new(self, data=None):
return MD5Hash(data)
def new(data=None):
"""Return a fresh instance of the hash object.
:Parameters:
data : byte string
The very first chunk of the message to hash.
It is equivalent to an early call to `MD5Hash.update()`.
Optional.
:Return: A `MD5Hash` object
"""
return MD5Hash().new(data)
#: The size of the resulting hash in bytes.
digest_size = MD5Hash.digest_size
#: The internal block size of the hash algorithm in bytes.
block_size = MD5Hash.block_size
| gpl-2.0 |
muntasirsyed/intellij-community | python/lib/Lib/encodings/unicode_internal.py | 827 | 1196 | """ Python 'unicode-internal' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_internal_encode
decode = codecs.unicode_internal_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_internal_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_internal_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-internal',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| apache-2.0 |
thanatoskira/AndroGuard | androguard/decompiler/decompiler.py | 6 | 9695 | # This file is part of Androguard.
#
# Copyright (C) 2011, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen, PIPE, STDOUT
import tempfile
import os
from androguard.core.androconf import rrmdir
from androguard.decompiler.dad import decompile
PYGMENTS = True
try:
from pygments.filter import Filter
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter, TerminalFormatter
from pygments.token import Token, Text, STANDARD_TYPES
except ImportError:
PYGMENTS = False
class Filter:
pass
class DecompilerDex2Jad :
def __init__(self, vm, path_dex2jar = "./decompiler/dex2jar/", bin_dex2jar = "dex2jar.sh", path_jad="./decompiler/jad/", bin_jad="jad", tmp_dir="/tmp/") :
self.classes = {}
self.classes_failed = []
pathtmp = tmp_dir
if not os.path.exists(pathtmp) :
os.makedirs( pathtmp )
fd, fdname = tempfile.mkstemp( dir=pathtmp )
fd = os.fdopen(fd, "w+b")
fd.write( vm.get_buff() )
fd.flush()
fd.close()
compile = Popen([ path_dex2jar + bin_dex2jar, fdname ], stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
os.unlink( fdname )
pathclasses = fdname + "dex2jar/"
compile = Popen([ "unzip", fdname + "_dex2jar.jar", "-d", pathclasses ], stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
os.unlink( fdname + "_dex2jar.jar" )
for root, dirs, files in os.walk( pathclasses, followlinks=True ) :
if files != [] :
for f in files :
real_filename = root
if real_filename[-1] != "/" :
real_filename += "/"
real_filename += f
compile = Popen([ path_jad + bin_jad, "-o", "-d", root, real_filename ], stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
for i in vm.get_classes() :
fname = pathclasses + "/" + i.get_name()[1:-1] + ".jad"
if os.path.isfile(fname) == True :
fd = open(fname, "r")
self.classes[ i.get_name() ] = fd.read()
fd.close()
else :
self.classes_failed.append( i.get_name() )
rrmdir( pathclasses )
def get_source_method(self, method):
class_name = method.get_class_name()
method_name = method.get_name()
if class_name not in self.classes:
return ""
if PYGMENTS:
lexer = get_lexer_by_name("java", stripall=True)
lexer.add_filter(MethodFilter(method_name=method_name))
formatter = TerminalFormatter()
result = highlight(self.classes[class_name], lexer, formatter)
return result
return self.classes[class_name]
def display_source(self, method):
print self.get_source_method(method)
def get_all(self, class_name) :
if class_name not in self.classes :
return ""
if PYGMENTS:
lexer = get_lexer_by_name("java", stripall=True)
formatter = TerminalFormatter()
result = highlight(self.classes[class_name], lexer, formatter)
return result
return self.classes[class_name]
def display_all(self, _class) :
print self.get_all( _class.get_name() )
class DecompilerDed :
def __init__(self, vm, path="./decompiler/ded/", bin_ded = "ded.sh", tmp_dir="/tmp/") :
self.classes = {}
self.classes_failed = []
pathtmp = tmp_dir
if not os.path.exists(pathtmp) :
os.makedirs( pathtmp )
fd, fdname = tempfile.mkstemp( dir=pathtmp )
fd = os.fdopen(fd, "w+b")
fd.write( vm.get_buff() )
fd.flush()
fd.close()
dirname = tempfile.mkdtemp(prefix=fdname + "-src")
compile = Popen([ path + bin_ded, "-c", "-o", "-d", dirname, fdname ], stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
os.unlink( fdname )
findsrc = None
for root, dirs, files in os.walk( dirname + "/optimized-decompiled/" ) :
if dirs != [] :
for f in dirs :
if f == "src" :
findsrc = root
if findsrc[-1] != "/" :
findsrc += "/"
findsrc += f
break
if findsrc != None :
break
for i in vm.get_classes() :
fname = findsrc + "/" + i.get_name()[1:-1] + ".java"
#print fname
if os.path.isfile(fname) == True :
fd = open(fname, "r")
self.classes[ i.get_name() ] = fd.read()
fd.close()
else :
self.classes_failed.append( i.get_name() )
rrmdir( dirname )
def get_source_method(self, method):
class_name = method.get_class_name()
method_name = method.get_name()
if class_name not in self.classes:
return ""
lexer = get_lexer_by_name("java", stripall=True)
lexer.add_filter(MethodFilter(method_name=method_name))
formatter = TerminalFormatter()
result = highlight(self.classes[class_name], lexer, formatter)
return result
def display_source(self, method) :
print self.get_source_method(method)
def get_all(self, class_name) :
if class_name not in self.classes :
return ""
lexer = get_lexer_by_name("java", stripall=True)
formatter = TerminalFormatter()
result = highlight(self.classes[class_name], lexer, formatter)
return result
def display_all(self, _class) :
print self.get_all( _class.get_name() )
class MethodFilter(Filter):
def __init__(self, **options):
Filter.__init__(self, **options)
self.method_name = options["method_name"]
#self.descriptor = options["descriptor"]
self.present = False
self.get_desc = True #False
def filter(self, lexer, stream) :
a = []
l = []
rep = []
for ttype, value in stream:
if self.method_name == value and (ttype is Token.Name.Function or ttype is Token.Name) :
#print ttype, value
item_decl = -1
for i in range(len(a)-1, 0, -1) :
if a[i][0] is Token.Keyword.Declaration :
if a[i][1] != "class" :
item_decl = i
break
if item_decl != -1 :
self.present = True
l.extend( a[item_decl:] )
if self.present and ttype is Token.Keyword.Declaration :
item_end = -1
for i in range(len(l)-1, 0, -1) :
if l[i][0] is Token.Operator and l[i][1] == "}" :
item_end = i
break
if item_end != -1 :
rep.extend( l[:item_end+1] )
l = []
self.present = False
if self.present :
l.append( (ttype, value) )
a.append( (ttype, value) )
if self.present :
nb = 0
item_end = -1
for i in range(len(l)-1, 0, -1) :
if l[i][0] is Token.Operator and l[i][1] == "}" :
nb += 1
if nb == 2 :
item_end = i
break
rep.extend( l[:item_end+1] )
return rep
class DecompilerDAD:
def __init__(self, vm, vmx):
self.vm = vm
self.vmx = vmx
def get_source_method(self, m):
mx = self.vmx.get_method(m)
z = decompile.DvMethod(mx)
z.process()
result = z.get_source()
return result
def display_source(self, m):
result = self.get_source_method(m)
if PYGMENTS:
lexer = get_lexer_by_name("java", stripall=True)
formatter = TerminalFormatter()
result = highlight(result, lexer, formatter)
print result
def get_source_class(self, _class):
c = decompile.DvClass(_class, self.vmx)
c.process()
result = c.get_source()
return result
def display_all(self, _class):
result = self.get_source_class(_class)
if PYGMENTS:
lexer = get_lexer_by_name("java", stripall=True)
formatter = TerminalFormatter()
result = highlight(result, lexer, formatter)
print result
def get_all(self, class_name):
pass
| lgpl-3.0 |
Kazade/NeHe-Website | google_appengine/lib/django-1.5/django/contrib/gis/geos/geometry.py | 103 | 25896 | """
This module contains the 'base' GEOSGeometry object -- all GEOS Geometries
inherit from this object.
"""
from __future__ import unicode_literals
# Python, ctypes and types dependencies.
from ctypes import addressof, byref, c_double
from django.contrib.gis import memoryview
# super-class for mutable list behavior
from django.contrib.gis.geos.mutable_list import ListMixin
# GEOS-related dependencies.
from django.contrib.gis.geos.base import GEOSBase, gdal
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.mutable_list import ListMixin
# All other functions in this module come from the ctypes
# prototypes module -- which handles all interaction with
# the underlying GEOS library.
from django.contrib.gis.geos import prototypes as capi
# These functions provide access to a thread-local instance
# of their corresponding GEOS I/O class.
from django.contrib.gis.geos.prototypes.io import wkt_r, wkt_w, wkb_r, wkb_w, ewkb_w
# For recognizing geometry input.
from django.contrib.gis.geometry.regex import hex_regex, wkt_regex, json_regex
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class GEOSGeometry(GEOSBase, ListMixin):
"A class that, generally, encapsulates a GEOS geometry."
# Raise GEOSIndexError instead of plain IndexError
# (see ticket #4740 and GEOSIndexError docstring)
_IndexError = GEOSIndexError
ptr_type = GEOM_PTR
#### Python 'magic' routines ####
def __init__(self, geo_input, srid=None):
"""
The base constructor for GEOS geometry objects, and may take the
following inputs:
* strings:
- WKT
- HEXEWKB (a PostGIS-specific canonical form)
- GeoJSON (requires GDAL)
* buffer:
- WKB
The `srid` keyword is used to specify the Source Reference Identifier
(SRID) number for this Geometry. If not set, the SRID will be None.
"""
if isinstance(geo_input, bytes):
geo_input = force_text(geo_input)
if isinstance(geo_input, six.string_types):
wkt_m = wkt_regex.match(geo_input)
if wkt_m:
# Handling WKT input.
if wkt_m.group('srid'): srid = int(wkt_m.group('srid'))
g = wkt_r().read(force_bytes(wkt_m.group('wkt')))
elif hex_regex.match(geo_input):
# Handling HEXEWKB input.
g = wkb_r().read(force_bytes(geo_input))
elif gdal.HAS_GDAL and json_regex.match(geo_input):
# Handling GeoJSON input.
g = wkb_r().read(gdal.OGRGeometry(geo_input).wkb)
else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geomtry (GEOM_PTR).
g = geo_input
elif isinstance(geo_input, memoryview):
# When the input is a buffer (WKB).
g = wkb_r().read(geo_input)
elif isinstance(geo_input, GEOSGeometry):
g = capi.geom_clone(geo_input.ptr)
else:
# Invalid geometry type.
raise TypeError('Improper geometry input type: %s' % str(type(geo_input)))
if bool(g):
# Setting the pointer object with a valid pointer.
self.ptr = g
else:
raise GEOSException('Could not initialize GEOS Geometry with given input.')
# Post-initialization setup.
self._post_init(srid)
def _post_init(self, srid):
"Helper routine for performing post-initialization setup."
# Setting the SRID, if given.
if srid and isinstance(srid, int): self.srid = srid
# Setting the class type (e.g., Point, Polygon, etc.)
self.__class__ = GEOS_CLASSES[self.geom_typeid]
# Setting the coordinate sequence for the geometry (will be None on
# geometries that do not have coordinate sequences)
self._set_cs()
def __del__(self):
"""
Destroys this Geometry; in other words, frees the memory used by the
GEOS C++ object.
"""
if self._ptr: capi.destroy_geom(self._ptr)
def __copy__(self):
"""
Returns a clone because the copy of a GEOSGeometry may contain an
invalid pointer location if the original is garbage collected.
"""
return self.clone()
def __deepcopy__(self, memodict):
"""
The `deepcopy` routine is used by the `Node` class of django.utils.tree;
thus, the protocol routine needs to be implemented to return correct
copies (clones) of these GEOS objects, which use C pointers.
"""
return self.clone()
def __str__(self):
"WKT is used for the string representation."
return self.wkt
def __repr__(self):
"Short-hand representation because WKT may be very large."
return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr)))
# Pickling support
def __getstate__(self):
# The pickled state is simply a tuple of the WKB (in string form)
# and the SRID.
return bytes(self.wkb), self.srid
def __setstate__(self, state):
# Instantiating from the tuple state that was pickled.
wkb, srid = state
ptr = wkb_r().read(memoryview(wkb))
if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.')
self.ptr = ptr
self._post_init(srid)
# Comparison operators
def __eq__(self, other):
"""
Equivalence testing, a Geometry may be compared with another Geometry
or a WKT representation.
"""
if isinstance(other, six.string_types):
return self.wkt == other
elif isinstance(other, GEOSGeometry):
return self.equals_exact(other)
else:
return False
def __ne__(self, other):
"The not equals operator."
return not (self == other)
### Geometry set-like operations ###
# Thanks to Sean Gillies for inspiration:
# http://lists.gispython.org/pipermail/community/2007-July/001034.html
# g = g1 | g2
def __or__(self, other):
"Returns the union of this Geometry and the other."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
#### Coordinate Sequence Routines ####
@property
def has_cs(self):
"Returns True if this Geometry has a coordinate sequence, False if not."
# Only these geometries are allowed to have coordinate sequences.
if isinstance(self, (Point, LineString, LinearRing)):
return True
else:
return False
def _set_cs(self):
"Sets the coordinate sequence for this Geometry."
if self.has_cs:
self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz)
else:
self._cs = None
@property
def coord_seq(self):
"Returns a clone of the coordinate sequence for this Geometry."
if self.has_cs:
return self._cs.clone()
#### Geometry Info ####
@property
def geom_type(self):
"Returns a string representing the Geometry type, e.g. 'Polygon'"
return capi.geos_type(self.ptr).decode()
@property
def geom_typeid(self):
"Returns an integer representing the Geometry type."
return capi.geos_typeid(self.ptr)
@property
def num_geom(self):
"Returns the number of geometries in the Geometry."
return capi.get_num_geoms(self.ptr)
@property
def num_coords(self):
"Returns the number of coordinates in the Geometry."
return capi.get_num_coords(self.ptr)
@property
def num_points(self):
"Returns the number points, or coordinates, in the Geometry."
return self.num_coords
@property
def dims(self):
"Returns the dimension of this Geometry (0=point, 1=line, 2=surface)."
return capi.get_dims(self.ptr)
def normalize(self):
"Converts this Geometry to normal form (or canonical form)."
return capi.geos_normalize(self.ptr)
#### Unary predicates ####
@property
def empty(self):
"""
Returns a boolean indicating whether the set of points in this Geometry
are empty.
"""
return capi.geos_isempty(self.ptr)
@property
def hasz(self):
"Returns whether the geometry has a 3D dimension."
return capi.geos_hasz(self.ptr)
@property
def ring(self):
"Returns whether or not the geometry is a ring."
return capi.geos_isring(self.ptr)
@property
def simple(self):
"Returns false if the Geometry not simple."
return capi.geos_issimple(self.ptr)
@property
def valid(self):
"This property tests the validity of this Geometry."
return capi.geos_isvalid(self.ptr)
@property
def valid_reason(self):
"""
Returns a string containing the reason for any invalidity.
"""
if not GEOS_PREPARE:
raise GEOSException('Upgrade GEOS to 3.1 to get validity reason.')
return capi.geos_isvalidreason(self.ptr).decode()
#### Binary predicates. ####
def contains(self, other):
"Returns true if other.within(this) returns true."
return capi.geos_contains(self.ptr, other.ptr)
def crosses(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T****** (for a point and a curve,a point and an area or a line and
an area) 0******** (for two curves).
"""
return capi.geos_crosses(self.ptr, other.ptr)
def disjoint(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FF*FF****.
"""
return capi.geos_disjoint(self.ptr, other.ptr)
def equals(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**FFF*.
"""
return capi.geos_equals(self.ptr, other.ptr)
def equals_exact(self, other, tolerance=0):
"""
Returns true if the two Geometries are exactly equal, up to a
specified tolerance.
"""
return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance))
def intersects(self, other):
"Returns true if disjoint returns false."
return capi.geos_intersects(self.ptr, other.ptr)
def overlaps(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves).
"""
return capi.geos_overlaps(self.ptr, other.ptr)
def relate_pattern(self, other, pattern):
"""
Returns true if the elements in the DE-9IM intersection matrix for the
two Geometries match the elements in pattern.
"""
if not isinstance(pattern, six.string_types) or len(pattern) > 9:
raise GEOSException('invalid intersection matrix pattern')
return capi.geos_relatepattern(self.ptr, other.ptr, force_bytes(pattern))
def touches(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FT*******, F**T***** or F***T****.
"""
return capi.geos_touches(self.ptr, other.ptr)
def within(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**F***.
"""
return capi.geos_within(self.ptr, other.ptr)
#### SRID Routines ####
def get_srid(self):
"Gets the SRID for the geometry, returns None if no SRID is set."
s = capi.geos_get_srid(self.ptr)
if s == 0: return None
else: return s
def set_srid(self, srid):
"Sets the SRID for the geometry."
capi.geos_set_srid(self.ptr, srid)
srid = property(get_srid, set_srid)
#### Output Routines ####
@property
def ewkt(self):
"""
Returns the EWKT (WKT + SRID) of the Geometry. Note that Z values
are *not* included in this representation because GEOS does not yet
support serializing them.
"""
if self.get_srid(): return 'SRID=%s;%s' % (self.srid, self.wkt)
else: return self.wkt
@property
def wkt(self):
"Returns the WKT (Well-Known Text) representation of this Geometry."
return wkt_w().write(self).decode()
@property
def hex(self):
"""
Returns the WKB of this Geometry in hexadecimal form. Please note
that the SRID is not included in this representation because it is not
a part of the OGC specification (use the `hexewkb` property instead).
"""
# A possible faster, all-python, implementation:
# str(self.wkb).encode('hex')
return wkb_w(self.hasz and 3 or 2).write_hex(self)
@property
def hexewkb(self):
"""
Returns the EWKB of this Geometry in hexadecimal form. This is an
extension of the WKB specification that includes SRID value that are
a part of this geometry.
"""
if self.hasz and not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D HEXEWKB.')
return ewkb_w(self.hasz and 3 or 2).write_hex(self)
@property
def json(self):
"""
Returns GeoJSON representation of this Geometry if GDAL is installed.
"""
if gdal.HAS_GDAL:
return self.ogr.json
else:
raise GEOSException('GeoJSON output only supported when GDAL is installed.')
geojson = json
@property
def wkb(self):
"""
Returns the WKB (Well-Known Binary) representation of this Geometry
as a Python buffer. SRID and Z values are not included, use the
`ewkb` property instead.
"""
return wkb_w(self.hasz and 3 or 2).write(self)
@property
def ewkb(self):
"""
Return the EWKB representation of this Geometry as a Python buffer.
This is an extension of the WKB specification that includes any SRID
value that are a part of this geometry.
"""
if self.hasz and not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D EWKB.')
return ewkb_w(self.hasz and 3 or 2).write(self)
@property
def kml(self):
"Returns the KML representation of this Geometry."
gtype = self.geom_type
return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype)
@property
def prepared(self):
"""
Returns a PreparedGeometry corresponding to this geometry -- it is
optimized for the contains, intersects, and covers operations.
"""
if GEOS_PREPARE:
return PreparedGeometry(self)
else:
raise GEOSException('GEOS 3.1+ required for prepared geometry support.')
#### GDAL-specific output routines ####
@property
def ogr(self):
"Returns the OGR Geometry for this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.OGRGeometry(self.wkb, self.srid)
else:
return gdal.OGRGeometry(self.wkb)
else:
raise GEOSException('GDAL required to convert to an OGRGeometry.')
@property
def srs(self):
"Returns the OSR SpatialReference for SRID of this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.SpatialReference(self.srid)
else:
return None
else:
raise GEOSException('GDAL required to return a SpatialReference object.')
@property
def crs(self):
"Alias for `srs` property."
return self.srs
def transform(self, ct, clone=False):
"""
Requires GDAL. Transforms the geometry according to the given
transformation object, which may be an integer SRID, and WKT or
PROJ.4 string. By default, the geometry is transformed in-place and
nothing is returned. However if the `clone` keyword is set, then this
geometry will not be modified and a transformed clone will be returned
instead.
"""
srid = self.srid
if ct == srid:
# short-circuit where source & dest SRIDs match
if clone:
return self.clone()
else:
return
if (srid is None) or (srid < 0):
raise GEOSException("Calling transform() with no SRID set is not supported")
if not gdal.HAS_GDAL:
raise GEOSException("GDAL library is not available to transform() geometry.")
# Creating an OGR Geometry, which is then transformed.
g = self.ogr
g.transform(ct)
# Getting a new GEOS pointer
ptr = wkb_r().read(g.wkb)
if clone:
# User wants a cloned transformed geometry returned.
return GEOSGeometry(ptr, srid=g.srid)
if ptr:
# Reassigning pointer, and performing post-initialization setup
# again due to the reassignment.
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(g.srid)
else:
raise GEOSException('Transformed WKB was invalid.')
#### Topology Routines ####
def _topology(self, gptr):
"Helper routine to return Geometry from the given pointer."
return GEOSGeometry(gptr, srid=self.srid)
@property
def boundary(self):
"Returns the boundary as a newly allocated Geometry object."
return self._topology(capi.geos_boundary(self.ptr))
def buffer(self, width, quadsegs=8):
"""
Returns a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
Spatial Reference System of this Geometry. The optional third parameter sets
the number of segment used to approximate a quarter circle (defaults to 8).
(Text from PostGIS documentation at ch. 6.1.3)
"""
return self._topology(capi.geos_buffer(self.ptr, width, quadsegs))
@property
def centroid(self):
"""
The centroid is equal to the centroid of the set of component Geometries
of highest dimension (since the lower-dimension geometries contribute zero
"weight" to the centroid).
"""
return self._topology(capi.geos_centroid(self.ptr))
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points
in the Geometry.
"""
return self._topology(capi.geos_convexhull(self.ptr))
def difference(self, other):
"""
Returns a Geometry representing the points making up this Geometry
that do not make up other.
"""
return self._topology(capi.geos_difference(self.ptr, other.ptr))
@property
def envelope(self):
"Return the envelope for this geometry (a polygon)."
return self._topology(capi.geos_envelope(self.ptr))
def interpolate(self, distance):
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('interpolate only works on LineString and MultiLineString geometries')
if not hasattr(capi, 'geos_interpolate'):
raise NotImplementedError('interpolate requires GEOS 3.2+')
return self._topology(capi.geos_interpolate(self.ptr, distance))
def interpolate_normalized(self, distance):
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('interpolate only works on LineString and MultiLineString geometries')
if not hasattr(capi, 'geos_interpolate_normalized'):
raise NotImplementedError('interpolate_normalized requires GEOS 3.2+')
return self._topology(capi.geos_interpolate_normalized(self.ptr, distance))
def intersection(self, other):
"Returns a Geometry representing the points shared by this Geometry and other."
return self._topology(capi.geos_intersection(self.ptr, other.ptr))
@property
def point_on_surface(self):
"Computes an interior point of this Geometry."
return self._topology(capi.geos_pointonsurface(self.ptr))
def project(self, point):
if not isinstance(point, Point):
raise TypeError('locate_point argument must be a Point')
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('locate_point only works on LineString and MultiLineString geometries')
if not hasattr(capi, 'geos_project'):
raise NotImplementedError('geos_project requires GEOS 3.2+')
return capi.geos_project(self.ptr, point.ptr)
def project_normalized(self, point):
if not isinstance(point, Point):
raise TypeError('locate_point argument must be a Point')
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('locate_point only works on LineString and MultiLineString geometries')
if not hasattr(capi, 'geos_project_normalized'):
raise NotImplementedError('project_normalized requires GEOS 3.2+')
return capi.geos_project_normalized(self.ptr, point.ptr)
def relate(self, other):
"Returns the DE-9IM intersection matrix for this Geometry and the other."
return capi.geos_relate(self.ptr, other.ptr).decode()
def simplify(self, tolerance=0.0, preserve_topology=False):
"""
Returns the Geometry, simplified using the Douglas-Peucker algorithm
to the specified tolerance (higher tolerance => less points). If no
tolerance provided, defaults to 0.
By default, this function does not preserve topology - e.g. polygons can
be split, collapse to lines or disappear holes can be created or
disappear, and lines can cross. By specifying preserve_topology=True,
the result will have the same dimension and number of components as the
input. This is significantly slower.
"""
if preserve_topology:
return self._topology(capi.geos_preservesimplify(self.ptr, tolerance))
else:
return self._topology(capi.geos_simplify(self.ptr, tolerance))
def sym_difference(self, other):
"""
Returns a set combining the points in this Geometry not in other,
and the points in other not in this Geometry.
"""
return self._topology(capi.geos_symdifference(self.ptr, other.ptr))
def union(self, other):
"Returns a Geometry representing all the points in this Geometry and other."
return self._topology(capi.geos_union(self.ptr, other.ptr))
#### Other Routines ####
@property
def area(self):
"Returns the area of the Geometry."
return capi.geos_area(self.ptr, byref(c_double()))
def distance(self, other):
"""
Returns the distance between the closest points on this Geometry
and the other. Units will be in those of the coordinate system of
the Geometry.
"""
if not isinstance(other, GEOSGeometry):
raise TypeError('distance() works only on other GEOS Geometries.')
return capi.geos_distance(self.ptr, other.ptr, byref(c_double()))
@property
def extent(self):
"""
Returns the extent of this geometry as a 4-tuple, consisting of
(xmin, ymin, xmax, ymax).
"""
env = self.envelope
if isinstance(env, Point):
xmin, ymin = env.tuple
xmax, ymax = xmin, ymin
else:
xmin, ymin = env[0][0]
xmax, ymax = env[0][2]
return (xmin, ymin, xmax, ymax)
@property
def length(self):
"""
Returns the length of this Geometry (e.g., 0 for point, or the
circumfrence of a Polygon).
"""
return capi.geos_length(self.ptr, byref(c_double()))
def clone(self):
"Clones this Geometry."
return GEOSGeometry(capi.geom_clone(self.ptr), srid=self.srid)
# Class mapping dictionary. Has to be at the end to avoid import
# conflicts with GEOSGeometry.
from django.contrib.gis.geos.linestring import LineString, LinearRing
from django.contrib.gis.geos.point import Point
from django.contrib.gis.geos.polygon import Polygon
from django.contrib.gis.geos.collections import GeometryCollection, MultiPoint, MultiLineString, MultiPolygon
GEOS_CLASSES = {0 : Point,
1 : LineString,
2 : LinearRing,
3 : Polygon,
4 : MultiPoint,
5 : MultiLineString,
6 : MultiPolygon,
7 : GeometryCollection,
}
# If supported, import the PreparedGeometry class.
if GEOS_PREPARE:
from django.contrib.gis.geos.prepared import PreparedGeometry
| bsd-3-clause |
vishrutshah/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyFile/auto_rest_swagger_bat_file_service/models/error.py | 432 | 1285 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class Error(Model):
"""Error.
:param status:
:type status: int
:param message:
:type message: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, status=None, message=None):
self.status = status
self.message = message
class ErrorException(HttpOperationError):
"""Server responsed with exception of type: 'Error'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(ErrorException, self).__init__(deserialize, response, 'Error', *args)
| mit |
SembeiNorimaki/Bioinformatics | EulerianCycle.py | 1 | 1903 | # Test passed :)
# TODO: split right here before the conditional.
import sys
def handle_input_output():
# handle input
graph = {}
while True:
try:
line = sys.stdin.readline().rstrip('\n')
left, right = line.split(' -> ')
if left in graph.keys():
graph[left].append(right)
else:
graph[left] = right.split(',')
except:
break # EOF
#print(graph)
# Execute main function
r = EulerianCycle(graph)
# handle output
print('->'.join(r))
def EulerianCycle(graph):
stack = []
location = None
circuit = []
# since it's an Eulerian Cycle we can start at any vertex
location = list(graph)[0]
# Repeat until the current vertex has no more out-going edges (neighbors)
# and the stack is empty.
while len(graph[location]) > 0 or len(stack) > 0:
if len(graph[location]) == 0: # If current vertex has no out-going edges
circuit.append(location) # add it to circuit
location = stack.pop() # remove the last vertex from the stack and set it as the current one
else: # otherwise
stack.append(location) # add the vertex to the stack
location = graph[location].pop() # take any of its neighbors
# remove the edge between that vertex and selected neighbor
# and set that neighbor as the current vertex
# Here we must append the first element at the end to close the cycle
# but since circuit is reversed, we append the last element at the beginning
circuit.insert(0, circuit[-1])
return circuit[::-1] # return the reversed circuit
if __name__ == '__main__':
handle_input_output() | mit |
dano/asyncio-sse | sse/protocol.py | 2 | 1594 | import aiohttp.server
import asyncio
import json
from . import exceptions
__all__ = ['SseServerProtocol', 'Response']
class Response(aiohttp.Response):
def send(self, data, id=None, retry=None, event=None):
assert data
if retry is not None:
self.write('retry: {0}\n'.format(retry).encode('utf-8'))
if id is not None:
self.write('id: {0}\n'.format(id).encode('utf-8'))
if event is not None:
self.write('event: {0}\n'.format(event).encode('utf-8'))
if not isinstance(data, str):
data = json.dumps(data)
for chunk in data.split('\n'):
self.write('data: {0}\n'.format(chunk).encode('utf-8'))
self.write(b'\n')
class SseServerProtocol(aiohttp.server.ServerHttpProtocol):
def __init__(self, sse_handler=None, **kwargs):
self.handler_class = sse_handler
super().__init__(**kwargs)
@asyncio.coroutine
def handle_request(self, request, payload):
response = Response(self.writer, 200)
handler = self.handler_class(self, request, response, payload)
try:
handler.validate_sse()
except exceptions.SseException as e:
response.status = e.status
if e.headers:
for header in e.headers:
response.add_header(*header)
response.send_headers()
response.write_eof()
return
handler.prepare_response()
response.send_headers()
yield from handler.handle_request()
handler.response.write_eof()
| bsd-3-clause |
kittiu/account-payment | account_payment_return/models/payment_return.py | 1 | 15028 | # Copyright 2011-2012 7 i TRIA <http://www.7itria.cat>
# Copyright 2011-2012 Avanzosc <http://www.avanzosc.com>
# Copyright 2013 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# Copyright 2014 Markus Schneider <markus.schneider@initos.com>
# Copyright 2016 Carlos Dauden <carlos.dauden@tecnativa.com>
# Copyright 2017 Luis M. Ontalba <luis.martinez@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, fields, models
from odoo.exceptions import Warning as UserError
import odoo.addons.decimal_precision as dp
class PaymentReturn(models.Model):
_name = "payment.return"
_inherit = ['mail.thread']
_description = 'Payment return'
_order = 'date DESC, id DESC'
company_id = fields.Many2one(
'res.company', string='Company', required=True,
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]},
default=lambda self: self.env['res.company']._company_default_get(
'account'))
date = fields.Date(
string='Return date',
help="This date will be used as the account entry date.",
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]},
default=lambda x: fields.Date.today())
name = fields.Char(
string="Reference", required=True,
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]},
default=lambda self: self.env['ir.sequence'].next_by_code(
'payment.return'))
line_ids = fields.One2many(
comodel_name='payment.return.line', inverse_name='return_id',
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]})
journal_id = fields.Many2one(
comodel_name='account.journal', string='Bank journal', required=True,
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]})
move_id = fields.Many2one(
comodel_name='account.move',
string='Reference to the created journal entry',
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]})
state = fields.Selection(
selection=[('draft', 'Draft'),
('imported', 'Imported'),
('done', 'Done'),
('cancelled', 'Cancelled')],
string='State', readonly=True, default='draft',
track_visibility='onchange')
@api.multi
@api.constrains('line_ids')
def _check_duplicate_move_line(self):
def append_error(error_line):
error_list.append(
_("Payment Line: %s (%s) in Payment Return: %s") % (
', '.join(error_line.mapped('move_line_ids.name')),
error_line.partner_id.name,
error_line.return_id.name
)
)
error_list = []
all_move_lines = self.env['account.move.line']
for line in self.mapped('line_ids'):
for move_line in line.move_line_ids:
if move_line in all_move_lines:
append_error(line)
all_move_lines |= move_line
if (not error_list) and all_move_lines:
duplicate_lines = self.env['payment.return.line'].search([
('move_line_ids', 'in', all_move_lines.ids),
('return_id.state', '=', 'done'),
])
if duplicate_lines:
for line in duplicate_lines:
append_error(line)
if error_list:
raise UserError(
_("Payment reference must be unique"
"\n%s") % '\n'.join(error_list)
)
def _get_move_amount(self, return_line):
return return_line.amount
def _prepare_invoice_returned_vals(self):
return {'returned_payment': True}
@api.multi
def unlink(self):
if self.filtered(lambda x: x.state == 'done'):
raise UserError(_(
"You can not remove a payment return if state is 'Done'"))
return super(PaymentReturn, self).unlink()
@api.multi
def button_match(self):
self.mapped('line_ids').filtered(lambda x: (
(not x.move_line_ids) and x.reference))._find_match()
self._check_duplicate_move_line()
@api.multi
def _prepare_return_move_vals(self):
"""Prepare the values for the journal entry created from the return.
:return: Dictionary with the record values.
"""
self.ensure_one()
return {
'name': '/',
'ref': _('Return %s') % self.name,
'journal_id': self.journal_id.id,
'date': self.date,
'company_id': self.company_id.id,
}
@api.multi
def action_confirm(self):
self.ensure_one()
# Check for incomplete lines
if self.line_ids.filtered(lambda x: not x.move_line_ids):
raise UserError(
_("You must input all moves references in the payment "
"return."))
invoices = self.env['account.invoice']
move_line_obj = self.env['account.move.line']
move = self.env['account.move'].create(
self._prepare_return_move_vals()
)
total_amount = 0.0
for return_line in self.line_ids:
move_amount = self._get_move_amount(return_line)
move_line2 = self.env['account.move.line'].with_context(
check_move_validity=False).create({
'name': move.ref,
'debit': move_amount,
'credit': 0.0,
'account_id': return_line.move_line_ids[0].account_id.id,
'move_id': move.id,
'partner_id': return_line.partner_id.id,
'journal_id': move.journal_id.id,
})
total_amount += move_amount
for move_line in return_line.move_line_ids:
returned_moves = move_line.matched_debit_ids.mapped(
'debit_move_id')
invoices |= returned_moves.mapped('invoice_id')
move_line.remove_move_reconcile()
(move_line | move_line2).reconcile()
return_line.move_line_ids.mapped('matched_debit_ids').write(
{'origin_returned_move_ids': [(6, 0, returned_moves.ids)]})
if return_line.expense_amount:
expense_lines_vals = []
expense_lines_vals.append({
'name': move.ref,
'move_id': move.id,
'debit': 0.0,
'credit': return_line.expense_amount,
'partner_id': return_line.expense_partner_id.id,
'account_id': (return_line.return_id.journal_id.
default_credit_account_id.id),
})
expense_lines_vals.append({
'move_id': move.id,
'debit': return_line.expense_amount,
'name': move.ref,
'credit': 0.0,
'partner_id': return_line.expense_partner_id.id,
'account_id': return_line.expense_account.id,
})
for expense_line_vals in expense_lines_vals:
move_line_obj.with_context(
check_move_validity=False).create(expense_line_vals)
extra_lines_vals = return_line._prepare_extra_move_lines(move)
for extra_line_vals in extra_lines_vals:
move_line_obj.create(extra_line_vals)
move_line_obj.create({
'name': move.ref,
'debit': 0.0,
'credit': total_amount,
'account_id': self.journal_id.default_credit_account_id.id,
'move_id': move.id,
'journal_id': move.journal_id.id,
})
# Write directly because we returned payments just now
invoices.write(self._prepare_invoice_returned_vals())
move.post()
self.write({'state': 'done', 'move_id': move.id})
return True
@api.multi
def action_cancel(self):
invoices = self.env['account.invoice']
for move_line in self.mapped('move_id.line_ids').filtered(
lambda x: x.user_type_id.type == 'receivable'):
for partial_line in move_line.matched_credit_ids:
invoices |= partial_line.origin_returned_move_ids.mapped(
'invoice_id')
lines2reconcile = (partial_line.origin_returned_move_ids |
partial_line.credit_move_id)
partial_line.credit_move_id.remove_move_reconcile()
lines2reconcile.reconcile()
self.move_id.button_cancel()
self.move_id.unlink()
self.write({'state': 'cancelled', 'move_id': False})
invoices.check_payment_return()
return True
@api.multi
def action_draft(self):
self.write({'state': 'draft'})
return True
class PaymentReturnLine(models.Model):
_name = "payment.return.line"
_description = 'Payment return lines'
return_id = fields.Many2one(
comodel_name='payment.return', string='Payment return',
required=True, ondelete='cascade')
concept = fields.Char(
string='Concept',
help="Read from imported file. Only for reference.")
reason_id = fields.Many2one(
comodel_name='payment.return.reason',
oldname="reason",
string='Return reason',
)
reference = fields.Char(
string='Reference',
help="Reference to match moves from related documents")
move_line_ids = fields.Many2many(
comodel_name='account.move.line', string='Payment Reference')
date = fields.Date(
string='Return date', help="Only for reference",
)
partner_name = fields.Char(
string='Partner name', readonly=True,
help="Read from imported file. Only for reference.")
partner_id = fields.Many2one(
comodel_name='res.partner', string='Customer',
domain="[('customer', '=', True)]")
amount = fields.Float(
string='Amount',
help="Returned amount. Can be different from the move amount",
digits=dp.get_precision('Account'))
expense_account = fields.Many2one(
comodel_name='account.account', string='Charges Account')
expense_amount = fields.Float(string='Charges Amount')
expense_partner_id = fields.Many2one(
comodel_name="res.partner", string="Charges Partner",
domain=[('supplier', '=', True)],
)
@api.multi
def _compute_amount(self):
for line in self:
line.amount = sum(line.move_line_ids.mapped('credit'))
@api.multi
def _get_partner_from_move(self):
for line in self.filtered(lambda x: not x.partner_id):
partners = line.move_line_ids.mapped('partner_id')
if len(partners) > 1:
raise UserError(
_("All payments must be owned by the same partner"))
line.partner_id = partners[:1].id
line.partner_name = partners[:1].name
@api.onchange('move_line_ids')
def _onchange_move_line(self):
self._compute_amount()
@api.onchange('expense_amount')
def _onchange_expense_amount(self):
if self.expense_amount:
journal = self.return_id.journal_id
self.expense_account = journal.default_expense_account_id
self.expense_partner_id = journal.default_expense_partner_id
@api.multi
def match_invoice(self):
for line in self:
domain = line.partner_id and [
('partner_id', '=', line.partner_id.id)] or []
domain.append(('number', '=', line.reference))
invoice = self.env['account.invoice'].search(domain)
if invoice:
payments = invoice.payment_move_line_ids
if payments:
line.move_line_ids = payments[0].ids
if not line.concept:
line.concept = _('Invoice: %s') % invoice.number
@api.multi
def match_move_lines(self):
for line in self:
domain = line.partner_id and [
('partner_id', '=', line.partner_id.id)] or []
if line.return_id.journal_id:
domain.append(('journal_id', '=',
line.return_id.journal_id.id))
domain.extend([
('account_id.internal_type', '=', 'receivable'),
('reconciled', '=', True),
'|',
('name', '=', line.reference),
('ref', '=', line.reference),
])
move_lines = self.env['account.move.line'].search(domain)
if move_lines:
line.move_line_ids = move_lines.ids
if not line.concept:
line.concept = (_('Move lines: %s') %
', '.join(move_lines.mapped('name')))
@api.multi
def match_move(self):
for line in self:
domain = line.partner_id and [
('partner_id', '=', line.partner_id.id)] or []
domain.append(('name', '=', line.reference))
move = self.env['account.move'].search(domain)
if move:
if len(move) > 1:
raise UserError(
_("More than one matches to move reference: %s") %
self.reference)
line.move_line_ids = move.line_ids.filtered(lambda l: (
l.user_type_id.type == 'receivable' and l.reconciled
)).ids
if not line.concept:
line.concept = _('Move: %s') % move.ref
@api.multi
def _find_match(self):
# we filter again to remove all ready matched lines in inheritance
lines2match = self.filtered(lambda x: (
(not x.move_line_ids) and x.reference))
lines2match.match_invoice()
lines2match = lines2match.filtered(lambda x: (
(not x.move_line_ids) and x.reference))
lines2match.match_move_lines()
lines2match = lines2match.filtered(lambda x: (
(not x.move_line_ids) and x.reference))
lines2match.match_move()
self._get_partner_from_move()
self.filtered(lambda x: not x.amount)._compute_amount()
@api.multi
def _prepare_extra_move_lines(self, move):
"""Include possible extra lines in the return journal entry for other
return concepts.
:param self: Reference to the payment return line.
:param move: Reference to the journal entry created for the return.
:return: A list with dictionaries of the extra move lines to add
"""
self.ensure_one()
return []
| agpl-3.0 |
viniciusgama/blog_gae | django/contrib/staticfiles/finders.py | 156 | 9183 | import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import default_storage, Storage, FileSystemStorage
from django.utils.datastructures import SortedDict
from django.utils.functional import memoize, LazyObject
from django.utils.importlib import import_module
from django.utils._os import safe_join
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.storage import AppStaticStorage
_finders = SortedDict()
class BaseFinder(object):
"""
A base file finder to be used for custom staticfiles finder classes.
"""
def find(self, path, all=False):
"""
Given a relative file path this ought to find an
absolute file path.
If the ``all`` parameter is ``False`` (default) only
the first found file path will be returned; if set
to ``True`` a list of all found files paths is returned.
"""
raise NotImplementedError()
def list(self, ignore_patterns=[]):
"""
Given an optional list of paths to ignore, this should return
a two item iterable consisting of the relative path and storage
instance.
"""
raise NotImplementedError()
class FileSystemFinder(BaseFinder):
"""
A static files finder that uses the ``STATICFILES_DIRS`` setting
to locate files.
"""
def __init__(self, apps=None, *args, **kwargs):
# List of locations with static files
self.locations = []
# Maps dir paths to an appropriate storage instance
self.storages = SortedDict()
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
raise ImproperlyConfigured(
"Your STATICFILES_DIRS setting is not a tuple or list; "
"perhaps you forgot a trailing comma?")
for root in settings.STATICFILES_DIRS:
if isinstance(root, (list, tuple)):
prefix, root = root
else:
prefix = ''
if os.path.abspath(settings.STATIC_ROOT) == os.path.abspath(root):
raise ImproperlyConfigured(
"The STATICFILES_DIRS setting should "
"not contain the STATIC_ROOT setting")
if (prefix, root) not in self.locations:
self.locations.append((prefix, root))
for prefix, root in self.locations:
filesystem_storage = FileSystemStorage(location=root)
filesystem_storage.prefix = prefix
self.storages[root] = filesystem_storage
super(FileSystemFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the extra locations
as defined in ``STATICFILES_DIRS``.
"""
matches = []
for prefix, root in self.locations:
matched_path = self.find_location(root, path, prefix)
if matched_path:
if not all:
return matched_path
matches.append(matched_path)
return matches
def find_location(self, root, path, prefix=None):
"""
Finds a requested static file in a location, returning the found
absolute path (or ``None`` if no match).
"""
if prefix:
prefix = '%s%s' % (prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
path = safe_join(root, path)
if os.path.exists(path):
return path
def list(self, ignore_patterns):
"""
List all files in all locations.
"""
for prefix, root in self.locations:
storage = self.storages[root]
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
class AppDirectoriesFinder(BaseFinder):
"""
A static files finder that looks in the directory of each app as
specified in the source_dir attribute of the given storage class.
"""
storage_class = AppStaticStorage
def __init__(self, apps=None, *args, **kwargs):
# The list of apps that are handled
self.apps = []
# Mapping of app module paths to storage instances
self.storages = SortedDict()
if apps is None:
apps = settings.INSTALLED_APPS
for app in apps:
app_storage = self.storage_class(app)
if os.path.isdir(app_storage.location):
self.storages[app] = app_storage
if app not in self.apps:
self.apps.append(app)
super(AppDirectoriesFinder, self).__init__(*args, **kwargs)
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in self.storages.itervalues():
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
def find(self, path, all=False):
"""
Looks for files in the app directories.
"""
matches = []
for app in self.apps:
match = self.find_in_app(app, path)
if match:
if not all:
return match
matches.append(match)
return matches
def find_in_app(self, app, path):
"""
Find a requested static file in an app's static locations.
"""
storage = self.storages.get(app, None)
if storage:
if storage.prefix:
prefix = '%s%s' % (storage.prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
# only try to find a file if the source dir actually exists
if storage.exists(path):
matched_path = storage.path(path)
if matched_path:
return matched_path
class BaseStorageFinder(BaseFinder):
"""
A base static files finder to be used to extended
with an own storage class.
"""
storage = None
def __init__(self, storage=None, *args, **kwargs):
if storage is not None:
self.storage = storage
if self.storage is None:
raise ImproperlyConfigured("The staticfiles storage finder %r "
"doesn't have a storage class "
"assigned." % self.__class__)
# Make sure we have an storage instance here.
if not isinstance(self.storage, (Storage, LazyObject)):
self.storage = self.storage()
super(BaseStorageFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the default file storage, if it's local.
"""
try:
self.storage.path('')
except NotImplementedError:
pass
else:
if self.storage.exists(path):
match = self.storage.path(path)
if all:
match = [match]
return match
return []
def list(self, ignore_patterns):
"""
List all files of the storage.
"""
for path in utils.get_files(self.storage, ignore_patterns):
yield path, self.storage
class DefaultStorageFinder(BaseStorageFinder):
"""
A static files finder that uses the default storage backend.
"""
storage = default_storage
def find(path, all=False):
"""
Find a static file with the given path using all enabled finders.
If ``all`` is ``False`` (default), return the first matching
absolute path (or ``None`` if no match). Otherwise return a list.
"""
matches = []
for finder in get_finders():
result = finder.find(path, all=all)
if not all and result:
return result
if not isinstance(result, (list, tuple)):
result = [result]
matches.extend(result)
if matches:
return matches
# No match.
return all and [] or None
def get_finders():
for finder_path in settings.STATICFILES_FINDERS:
yield get_finder(finder_path)
def _get_finder(import_path):
"""
Imports the staticfiles finder class described by import_path, where
import_path is the full Python path to the class.
"""
module, attr = import_path.rsplit('.', 1)
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error importing module %s: "%s"' %
(module, e))
try:
Finder = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" '
'class.' % (module, attr))
if not issubclass(Finder, BaseFinder):
raise ImproperlyConfigured('Finder "%s" is not a subclass of "%s"' %
(Finder, BaseFinder))
return Finder()
get_finder = memoize(_get_finder, _finders, 1)
| bsd-3-clause |
vadimtk/chrome4sdp | tools/telemetry/third_party/gsutilz/third_party/boto/tests/unit/ec2/autoscale/test_group.py | 90 | 36594 | #!/usr/bin/env python
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import base64
from datetime import datetime
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.ec2.autoscale import AutoScaleConnection
from boto.ec2.autoscale.group import AutoScalingGroup
from boto.ec2.autoscale.policy import ScalingPolicy
from boto.ec2.autoscale.tag import Tag
from boto.ec2.blockdevicemapping import EBSBlockDeviceType, BlockDeviceMapping
from boto.ec2.autoscale import launchconfig, LaunchConfiguration
class TestAutoScaleGroup(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestAutoScaleGroup, self).setUp()
def default_body(self):
return b"""
<CreateLaunchConfigurationResponse>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</CreateLaunchConfigurationResponse>
"""
def test_autoscaling_group_with_termination_policies(self):
self.set_http_response(status_code=200)
autoscale = AutoScalingGroup(
name='foo', launch_config='lauch_config',
min_size=1, max_size=2,
termination_policies=['OldestInstance', 'OldestLaunchConfiguration'],
instance_id='test-id')
self.service_connection.create_auto_scaling_group(autoscale)
self.assert_request_parameters({
'Action': 'CreateAutoScalingGroup',
'AutoScalingGroupName': 'foo',
'LaunchConfigurationName': 'lauch_config',
'MaxSize': 2,
'MinSize': 1,
'TerminationPolicies.member.1': 'OldestInstance',
'TerminationPolicies.member.2': 'OldestLaunchConfiguration',
'InstanceId': 'test-id',
}, ignore_params_values=['Version'])
def test_autoscaling_group_single_vpc_zone_identifier(self):
self.set_http_response(status_code=200)
autoscale = AutoScalingGroup(
name='foo',
vpc_zone_identifier='vpc_zone_1')
self.service_connection.create_auto_scaling_group(autoscale)
self.assert_request_parameters({
'Action': 'CreateAutoScalingGroup',
'AutoScalingGroupName': 'foo',
'VPCZoneIdentifier': 'vpc_zone_1',
}, ignore_params_values=['MaxSize', 'MinSize', 'LaunchConfigurationName', 'Version'])
def test_autoscaling_group_vpc_zone_identifier_list(self):
self.set_http_response(status_code=200)
autoscale = AutoScalingGroup(
name='foo',
vpc_zone_identifier=['vpc_zone_1', 'vpc_zone_2'])
self.service_connection.create_auto_scaling_group(autoscale)
self.assert_request_parameters({
'Action': 'CreateAutoScalingGroup',
'AutoScalingGroupName': 'foo',
'VPCZoneIdentifier': 'vpc_zone_1,vpc_zone_2',
}, ignore_params_values=['MaxSize', 'MinSize', 'LaunchConfigurationName', 'Version'])
def test_autoscaling_group_vpc_zone_identifier_multi(self):
self.set_http_response(status_code=200)
autoscale = AutoScalingGroup(
name='foo',
vpc_zone_identifier='vpc_zone_1,vpc_zone_2')
self.service_connection.create_auto_scaling_group(autoscale)
self.assert_request_parameters({
'Action': 'CreateAutoScalingGroup',
'AutoScalingGroupName': 'foo',
'VPCZoneIdentifier': 'vpc_zone_1,vpc_zone_2',
}, ignore_params_values=['MaxSize', 'MinSize', 'LaunchConfigurationName', 'Version'])
class TestAutoScaleGroupHonorCooldown(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
return b"""
<SetDesiredCapacityResponse>
<ResponseMetadata>
<RequestId>9fb7e2db-6998-11e2-a985-57c82EXAMPLE</RequestId>
</ResponseMetadata>
</SetDesiredCapacityResponse>
"""
def test_honor_cooldown(self):
self.set_http_response(status_code=200)
self.service_connection.set_desired_capacity('foo', 10, True)
self.assert_request_parameters({
'Action': 'SetDesiredCapacity',
'AutoScalingGroupName': 'foo',
'DesiredCapacity': 10,
'HonorCooldown': 'true',
}, ignore_params_values=['Version'])
class TestScheduledGroup(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestScheduledGroup, self).setUp()
def default_body(self):
return b"""
<PutScheduledUpdateGroupActionResponse>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</PutScheduledUpdateGroupActionResponse>
"""
def test_scheduled_group_creation(self):
self.set_http_response(status_code=200)
self.service_connection.create_scheduled_group_action('foo',
'scheduled-foo',
desired_capacity=1,
start_time=datetime(2013, 1, 1, 22, 55, 31),
end_time=datetime(2013, 2, 1, 22, 55, 31),
min_size=1,
max_size=2,
recurrence='0 10 * * *')
self.assert_request_parameters({
'Action': 'PutScheduledUpdateGroupAction',
'AutoScalingGroupName': 'foo',
'ScheduledActionName': 'scheduled-foo',
'MaxSize': 2,
'MinSize': 1,
'DesiredCapacity': 1,
'EndTime': '2013-02-01T22:55:31',
'StartTime': '2013-01-01T22:55:31',
'Recurrence': '0 10 * * *',
}, ignore_params_values=['Version'])
class TestParseAutoScaleGroupResponse(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
return b"""
<DescribeAutoScalingGroupsResult>
<AutoScalingGroups>
<member>
<Tags/>
<SuspendedProcesses/>
<AutoScalingGroupName>test_group</AutoScalingGroupName>
<HealthCheckType>EC2</HealthCheckType>
<CreatedTime>2012-09-27T20:19:47.082Z</CreatedTime>
<EnabledMetrics/>
<LaunchConfigurationName>test_launchconfig</LaunchConfigurationName>
<Instances>
<member>
<HealthStatus>Healthy</HealthStatus>
<AvailabilityZone>us-east-1a</AvailabilityZone>
<InstanceId>i-z118d054</InstanceId>
<LaunchConfigurationName>test_launchconfig</LaunchConfigurationName>
<LifecycleState>InService</LifecycleState>
</member>
</Instances>
<DesiredCapacity>1</DesiredCapacity>
<AvailabilityZones>
<member>us-east-1c</member>
<member>us-east-1a</member>
</AvailabilityZones>
<LoadBalancerNames/>
<MinSize>1</MinSize>
<VPCZoneIdentifier/>
<HealthCheckGracePeriod>0</HealthCheckGracePeriod>
<DefaultCooldown>300</DefaultCooldown>
<AutoScalingGroupARN>myarn</AutoScalingGroupARN>
<TerminationPolicies>
<member>OldestInstance</member>
<member>OldestLaunchConfiguration</member>
</TerminationPolicies>
<MaxSize>2</MaxSize>
<InstanceId>Something</InstanceId>
</member>
</AutoScalingGroups>
</DescribeAutoScalingGroupsResult>
"""
def test_get_all_groups_is_parsed_correctly(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_all_groups(names=['test_group'])
self.assertEqual(len(response), 1, response)
as_group = response[0]
self.assertEqual(as_group.availability_zones, ['us-east-1c', 'us-east-1a'])
self.assertEqual(as_group.default_cooldown, 300)
self.assertEqual(as_group.desired_capacity, 1)
self.assertEqual(as_group.enabled_metrics, [])
self.assertEqual(as_group.health_check_period, 0)
self.assertEqual(as_group.health_check_type, 'EC2')
self.assertEqual(as_group.launch_config_name, 'test_launchconfig')
self.assertEqual(as_group.load_balancers, [])
self.assertEqual(as_group.min_size, 1)
self.assertEqual(as_group.max_size, 2)
self.assertEqual(as_group.name, 'test_group')
self.assertEqual(as_group.suspended_processes, [])
self.assertEqual(as_group.tags, [])
self.assertEqual(as_group.termination_policies,
['OldestInstance', 'OldestLaunchConfiguration'])
self.assertEqual(as_group.instance_id, 'Something')
class TestDescribeTerminationPolicies(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
return b"""
<DescribeTerminationPolicyTypesResponse>
<DescribeTerminationPolicyTypesResult>
<TerminationPolicyTypes>
<member>ClosestToNextInstanceHour</member>
<member>Default</member>
<member>NewestInstance</member>
<member>OldestInstance</member>
<member>OldestLaunchConfiguration</member>
</TerminationPolicyTypes>
</DescribeTerminationPolicyTypesResult>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</DescribeTerminationPolicyTypesResponse>
"""
def test_autoscaling_group_with_termination_policies(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_termination_policies()
self.assertListEqual(
response,
['ClosestToNextInstanceHour', 'Default',
'NewestInstance', 'OldestInstance', 'OldestLaunchConfiguration'])
class TestLaunchConfigurationDescribe(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
# This is a dummy response
return b"""
<DescribeLaunchConfigurationsResponse>
<DescribeLaunchConfigurationsResult>
<LaunchConfigurations>
<member>
<AssociatePublicIpAddress>true</AssociatePublicIpAddress>
<SecurityGroups/>
<CreatedTime>2013-01-21T23:04:42.200Z</CreatedTime>
<KernelId/>
<LaunchConfigurationName>my-test-lc</LaunchConfigurationName>
<UserData/>
<InstanceType>m1.small</InstanceType>
<LaunchConfigurationARN>arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc</LaunchConfigurationARN>
<BlockDeviceMappings/>
<ImageId>ami-514ac838</ImageId>
<KeyName/>
<RamdiskId/>
<InstanceMonitoring>
<Enabled>true</Enabled>
</InstanceMonitoring>
<EbsOptimized>false</EbsOptimized>
<ClassicLinkVPCId>vpc-12345</ClassicLinkVPCId>
<ClassicLinkVPCSecurityGroups>
<member>sg-1234</member>
</ClassicLinkVPCSecurityGroups>
</member>
</LaunchConfigurations>
</DescribeLaunchConfigurationsResult>
<ResponseMetadata>
<RequestId>d05a22f8-b690-11e2-bf8e-2113fEXAMPLE</RequestId>
</ResponseMetadata>
</DescribeLaunchConfigurationsResponse>
"""
def test_get_all_launch_configurations(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_all_launch_configurations()
self.assertTrue(isinstance(response, list))
self.assertEqual(len(response), 1)
self.assertTrue(isinstance(response[0], LaunchConfiguration))
self.assertEqual(response[0].associate_public_ip_address, True)
self.assertEqual(response[0].name, "my-test-lc")
self.assertEqual(response[0].instance_type, "m1.small")
self.assertEqual(response[0].launch_configuration_arn, "arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc")
self.assertEqual(response[0].image_id, "ami-514ac838")
self.assertTrue(isinstance(response[0].instance_monitoring, launchconfig.InstanceMonitoring))
self.assertEqual(response[0].instance_monitoring.enabled, 'true')
self.assertEqual(response[0].ebs_optimized, False)
self.assertEqual(response[0].block_device_mappings, [])
self.assertEqual(response[0].classic_link_vpc_id, 'vpc-12345')
self.assertEqual(response[0].classic_link_vpc_security_groups,
['sg-1234'])
self.assert_request_parameters({
'Action': 'DescribeLaunchConfigurations',
}, ignore_params_values=['Version'])
def test_get_all_configuration_limited(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_all_launch_configurations(max_records=10, names=["my-test1", "my-test2"])
self.assert_request_parameters({
'Action': 'DescribeLaunchConfigurations',
'MaxRecords': 10,
'LaunchConfigurationNames.member.1': 'my-test1',
'LaunchConfigurationNames.member.2': 'my-test2'
}, ignore_params_values=['Version'])
class TestLaunchConfiguration(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
# This is a dummy response
return b"""
<DescribeLaunchConfigurationsResponse>
</DescribeLaunchConfigurationsResponse>
"""
def test_launch_config(self):
# This unit test is based on #753 and #1343
self.set_http_response(status_code=200)
dev_sdf = EBSBlockDeviceType(snapshot_id='snap-12345')
bdm = BlockDeviceMapping()
bdm['/dev/sdf'] = dev_sdf
lc = launchconfig.LaunchConfiguration(
connection=self.service_connection,
name='launch_config',
image_id='123456',
instance_type='m1.large',
user_data='#!/bin/bash',
security_groups=['group1'],
spot_price='price',
block_device_mappings=[bdm],
associate_public_ip_address=True,
volume_type='atype',
delete_on_termination=False,
iops=3000,
classic_link_vpc_id='vpc-1234',
classic_link_vpc_security_groups=['classic_link_group']
)
response = self.service_connection.create_launch_configuration(lc)
self.assert_request_parameters({
'Action': 'CreateLaunchConfiguration',
'BlockDeviceMappings.member.1.DeviceName': '/dev/sdf',
'BlockDeviceMappings.member.1.Ebs.DeleteOnTermination': 'false',
'BlockDeviceMappings.member.1.Ebs.SnapshotId': 'snap-12345',
'EbsOptimized': 'false',
'LaunchConfigurationName': 'launch_config',
'ImageId': '123456',
'UserData': base64.b64encode(b'#!/bin/bash').decode('utf-8'),
'InstanceMonitoring.Enabled': 'false',
'InstanceType': 'm1.large',
'SecurityGroups.member.1': 'group1',
'SpotPrice': 'price',
'AssociatePublicIpAddress': 'true',
'VolumeType': 'atype',
'DeleteOnTermination': 'false',
'Iops': 3000,
'ClassicLinkVPCId': 'vpc-1234',
'ClassicLinkVPCSecurityGroups.member.1': 'classic_link_group'
}, ignore_params_values=['Version'])
class TestCreateAutoScalePolicy(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestCreateAutoScalePolicy, self).setUp()
def default_body(self):
return b"""
<PutScalingPolicyResponse xmlns="http://autoscaling.amazonaws.com\
/doc/2011-01-01/">
<PutScalingPolicyResult>
<PolicyARN>arn:aws:autoscaling:us-east-1:803981987763:scaling\
Policy:b0dcf5e8
-02e6-4e31-9719-0675d0dc31ae:autoScalingGroupName/my-test-asg:\
policyName/my-scal
eout-policy</PolicyARN>
</PutScalingPolicyResult>
<ResponseMetadata>
<RequestId>3cfc6fef-c08b-11e2-a697-2922EXAMPLE</RequestId>
</ResponseMetadata>
</PutScalingPolicyResponse>
"""
def test_scaling_policy_with_min_adjustment_step(self):
self.set_http_response(status_code=200)
policy = ScalingPolicy(
name='foo', as_name='bar',
adjustment_type='PercentChangeInCapacity', scaling_adjustment=50,
min_adjustment_step=30)
self.service_connection.create_scaling_policy(policy)
self.assert_request_parameters({
'Action': 'PutScalingPolicy',
'PolicyName': 'foo',
'AutoScalingGroupName': 'bar',
'AdjustmentType': 'PercentChangeInCapacity',
'ScalingAdjustment': 50,
'MinAdjustmentStep': 30
}, ignore_params_values=['Version'])
def test_scaling_policy_with_wrong_adjustment_type(self):
self.set_http_response(status_code=200)
policy = ScalingPolicy(
name='foo', as_name='bar',
adjustment_type='ChangeInCapacity', scaling_adjustment=50,
min_adjustment_step=30)
self.service_connection.create_scaling_policy(policy)
self.assert_request_parameters({
'Action': 'PutScalingPolicy',
'PolicyName': 'foo',
'AutoScalingGroupName': 'bar',
'AdjustmentType': 'ChangeInCapacity',
'ScalingAdjustment': 50
}, ignore_params_values=['Version'])
def test_scaling_policy_without_min_adjustment_step(self):
self.set_http_response(status_code=200)
policy = ScalingPolicy(
name='foo', as_name='bar',
adjustment_type='PercentChangeInCapacity', scaling_adjustment=50)
self.service_connection.create_scaling_policy(policy)
self.assert_request_parameters({
'Action': 'PutScalingPolicy',
'PolicyName': 'foo',
'AutoScalingGroupName': 'bar',
'AdjustmentType': 'PercentChangeInCapacity',
'ScalingAdjustment': 50
}, ignore_params_values=['Version'])
class TestPutNotificationConfiguration(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestPutNotificationConfiguration, self).setUp()
def default_body(self):
return b"""
<PutNotificationConfigurationResponse>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</PutNotificationConfigurationResponse>
"""
def test_autoscaling_group_put_notification_configuration(self):
self.set_http_response(status_code=200)
autoscale = AutoScalingGroup(
name='ana', launch_config='lauch_config',
min_size=1, max_size=2,
termination_policies=['OldestInstance', 'OldestLaunchConfiguration'])
self.service_connection.put_notification_configuration(autoscale, 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up', ['autoscaling:EC2_INSTANCE_LAUNCH'])
self.assert_request_parameters({
'Action': 'PutNotificationConfiguration',
'AutoScalingGroupName': 'ana',
'NotificationTypes.member.1': 'autoscaling:EC2_INSTANCE_LAUNCH',
'TopicARN': 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up',
}, ignore_params_values=['Version'])
class TestDeleteNotificationConfiguration(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestDeleteNotificationConfiguration, self).setUp()
def default_body(self):
return b"""
<DeleteNotificationConfigurationResponse>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</DeleteNotificationConfigurationResponse>
"""
def test_autoscaling_group_put_notification_configuration(self):
self.set_http_response(status_code=200)
autoscale = AutoScalingGroup(
name='ana', launch_config='lauch_config',
min_size=1, max_size=2,
termination_policies=['OldestInstance', 'OldestLaunchConfiguration'])
self.service_connection.delete_notification_configuration(autoscale, 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up')
self.assert_request_parameters({
'Action': 'DeleteNotificationConfiguration',
'AutoScalingGroupName': 'ana',
'TopicARN': 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up',
}, ignore_params_values=['Version'])
class TestAutoScalingTag(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
return b"""
<CreateOrUpdateTagsResponse>
<ResponseMetadata>
<RequestId>requestId</RequestId>
</ResponseMetadata>
</CreateOrUpdateTagsResponse>
"""
def test_create_or_update_tags(self):
self.set_http_response(status_code=200)
tags = [
Tag(
connection=self.service_connection,
key='alpha',
value='tango',
resource_id='sg-00000000',
resource_type='auto-scaling-group',
propagate_at_launch=True
),
Tag(
connection=self.service_connection,
key='bravo',
value='sierra',
resource_id='sg-00000000',
resource_type='auto-scaling-group',
propagate_at_launch=False
)]
response = self.service_connection.create_or_update_tags(tags)
self.assert_request_parameters({
'Action': 'CreateOrUpdateTags',
'Tags.member.1.ResourceType': 'auto-scaling-group',
'Tags.member.1.ResourceId': 'sg-00000000',
'Tags.member.1.Key': 'alpha',
'Tags.member.1.Value': 'tango',
'Tags.member.1.PropagateAtLaunch': 'true',
'Tags.member.2.ResourceType': 'auto-scaling-group',
'Tags.member.2.ResourceId': 'sg-00000000',
'Tags.member.2.Key': 'bravo',
'Tags.member.2.Value': 'sierra',
'Tags.member.2.PropagateAtLaunch': 'false'
}, ignore_params_values=['Version'])
def test_endElement(self):
for i in [
('Key', 'mykey', 'key'),
('Value', 'myvalue', 'value'),
('ResourceType', 'auto-scaling-group', 'resource_type'),
('ResourceId', 'sg-01234567', 'resource_id'),
('PropagateAtLaunch', 'true', 'propagate_at_launch')]:
self.check_tag_attributes_set(i[0], i[1], i[2])
def check_tag_attributes_set(self, name, value, attr):
tag = Tag()
tag.endElement(name, value, None)
if value == 'true':
self.assertEqual(getattr(tag, attr), True)
else:
self.assertEqual(getattr(tag, attr), value)
class TestAttachInstances(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestAttachInstances, self).setUp()
def default_body(self):
return b"""
<AttachInstancesResponse>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</AttachInstancesResponse>
"""
def test_attach_instances(self):
self.set_http_response(status_code=200)
self.service_connection.attach_instances(
'autoscale',
['inst2', 'inst1', 'inst4']
)
self.assert_request_parameters({
'Action': 'AttachInstances',
'AutoScalingGroupName': 'autoscale',
'InstanceIds.member.1': 'inst2',
'InstanceIds.member.2': 'inst1',
'InstanceIds.member.3': 'inst4',
}, ignore_params_values=['Version'])
class TestDetachInstances(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestDetachInstances, self).setUp()
def default_body(self):
return b"""
<DetachInstancesResponse>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</DetachInstancesResponse>
"""
def test_detach_instances(self):
self.set_http_response(status_code=200)
self.service_connection.detach_instances(
'autoscale',
['inst2', 'inst1', 'inst4']
)
self.assert_request_parameters({
'Action': 'DetachInstances',
'AutoScalingGroupName': 'autoscale',
'InstanceIds.member.1': 'inst2',
'InstanceIds.member.2': 'inst1',
'InstanceIds.member.3': 'inst4',
'ShouldDecrementDesiredCapacity': 'true',
}, ignore_params_values=['Version'])
def test_detach_instances_with_decrement_desired_capacity(self):
self.set_http_response(status_code=200)
self.service_connection.detach_instances(
'autoscale',
['inst2', 'inst1', 'inst4'],
True
)
self.assert_request_parameters({
'Action': 'DetachInstances',
'AutoScalingGroupName': 'autoscale',
'InstanceIds.member.1': 'inst2',
'InstanceIds.member.2': 'inst1',
'InstanceIds.member.3': 'inst4',
'ShouldDecrementDesiredCapacity': 'true',
}, ignore_params_values=['Version'])
def test_detach_instances_without_decrement_desired_capacity(self):
self.set_http_response(status_code=200)
self.service_connection.detach_instances(
'autoscale',
['inst2', 'inst1', 'inst4'],
False
)
self.assert_request_parameters({
'Action': 'DetachInstances',
'AutoScalingGroupName': 'autoscale',
'InstanceIds.member.1': 'inst2',
'InstanceIds.member.2': 'inst1',
'InstanceIds.member.3': 'inst4',
'ShouldDecrementDesiredCapacity': 'false',
}, ignore_params_values=['Version'])
class TestGetAccountLimits(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestGetAccountLimits, self).setUp()
def default_body(self):
return b"""
<DescribeAccountLimitsAnswer>
<MaxNumberOfAutoScalingGroups>6</MaxNumberOfAutoScalingGroups>
<MaxNumberOfLaunchConfigurations>3</MaxNumberOfLaunchConfigurations>
<ResponseMetadata>
<RequestId>requestid</RequestId>
</ResponseMetadata>
</DescribeAccountLimitsAnswer>
"""
def test_autoscaling_group_put_notification_configuration(self):
self.set_http_response(status_code=200)
limits = self.service_connection.get_account_limits()
self.assert_request_parameters({
'Action': 'DescribeAccountLimits',
}, ignore_params_values=['Version'])
self.assertEqual(limits.max_autoscaling_groups, 6)
self.assertEqual(limits.max_launch_configurations, 3)
class TestGetAdjustmentTypes(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def setUp(self):
super(TestGetAdjustmentTypes, self).setUp()
def default_body(self):
return b"""
<DescribeAdjustmentTypesResponse xmlns="http://autoscaling.amazonaws.com/doc/201-01-01/">
<DescribeAdjustmentTypesResult>
<AdjustmentTypes>
<member>
<AdjustmentType>ChangeInCapacity</AdjustmentType>
</member>
<member>
<AdjustmentType>ExactCapacity</AdjustmentType>
</member>
<member>
<AdjustmentType>PercentChangeInCapacity</AdjustmentType>
</member>
</AdjustmentTypes>
</DescribeAdjustmentTypesResult>
<ResponseMetadata>
<RequestId>requestId</RequestId>
</ResponseMetadata>
</DescribeAdjustmentTypesResponse>
"""
def test_autoscaling_adjustment_types(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_all_adjustment_types()
self.assert_request_parameters({
'Action': 'DescribeAdjustmentTypes'
}, ignore_params_values=['Version'])
self.assertTrue(isinstance(response, list))
self.assertEqual(response[0].adjustment_type, "ChangeInCapacity")
self.assertEqual(response[1].adjustment_type, "ExactCapacity")
self.assertEqual(response[2].adjustment_type, "PercentChangeInCapacity")
class TestLaunchConfigurationDescribeWithBlockDeviceTypes(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
def default_body(self):
# This is a dummy response
return b"""
<DescribeLaunchConfigurationsResponse>
<DescribeLaunchConfigurationsResult>
<LaunchConfigurations>
<member>
<AssociatePublicIpAddress>true</AssociatePublicIpAddress>
<SecurityGroups/>
<CreatedTime>2013-01-21T23:04:42.200Z</CreatedTime>
<KernelId/>
<LaunchConfigurationName>my-test-lc</LaunchConfigurationName>
<UserData/>
<InstanceType>m1.small</InstanceType>
<LaunchConfigurationARN>arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc</LaunchConfigurationARN>
<BlockDeviceMappings>
<member>
<DeviceName>/dev/xvdp</DeviceName>
<Ebs>
<SnapshotId>snap-1234abcd</SnapshotId>
<Iops>1000</Iops>
<DeleteOnTermination>true</DeleteOnTermination>
<VolumeType>io1</VolumeType>
<VolumeSize>100</VolumeSize>
</Ebs>
</member>
<member>
<VirtualName>ephemeral1</VirtualName>
<DeviceName>/dev/xvdc</DeviceName>
</member>
<member>
<VirtualName>ephemeral0</VirtualName>
<DeviceName>/dev/xvdb</DeviceName>
</member>
<member>
<DeviceName>/dev/xvdh</DeviceName>
<Ebs>
<Iops>2000</Iops>
<DeleteOnTermination>false</DeleteOnTermination>
<VolumeType>io1</VolumeType>
<VolumeSize>200</VolumeSize>
</Ebs>
</member>
</BlockDeviceMappings>
<ImageId>ami-514ac838</ImageId>
<KeyName/>
<RamdiskId/>
<InstanceMonitoring>
<Enabled>true</Enabled>
</InstanceMonitoring>
<EbsOptimized>false</EbsOptimized>
</member>
</LaunchConfigurations>
</DescribeLaunchConfigurationsResult>
<ResponseMetadata>
<RequestId>d05a22f8-b690-11e2-bf8e-2113fEXAMPLE</RequestId>
</ResponseMetadata>
</DescribeLaunchConfigurationsResponse>
"""
def test_get_all_launch_configurations_with_block_device_types(self):
self.set_http_response(status_code=200)
self.service_connection.use_block_device_types = True
response = self.service_connection.get_all_launch_configurations()
self.assertTrue(isinstance(response, list))
self.assertEqual(len(response), 1)
self.assertTrue(isinstance(response[0], LaunchConfiguration))
self.assertEqual(response[0].associate_public_ip_address, True)
self.assertEqual(response[0].name, "my-test-lc")
self.assertEqual(response[0].instance_type, "m1.small")
self.assertEqual(response[0].launch_configuration_arn, "arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc")
self.assertEqual(response[0].image_id, "ami-514ac838")
self.assertTrue(isinstance(response[0].instance_monitoring, launchconfig.InstanceMonitoring))
self.assertEqual(response[0].instance_monitoring.enabled, 'true')
self.assertEqual(response[0].ebs_optimized, False)
self.assertEqual(response[0].block_device_mappings['/dev/xvdb'].ephemeral_name, 'ephemeral0')
self.assertEqual(response[0].block_device_mappings['/dev/xvdc'].ephemeral_name, 'ephemeral1')
self.assertEqual(response[0].block_device_mappings['/dev/xvdp'].snapshot_id, 'snap-1234abcd')
self.assertEqual(response[0].block_device_mappings['/dev/xvdp'].delete_on_termination, True)
self.assertEqual(response[0].block_device_mappings['/dev/xvdp'].iops, 1000)
self.assertEqual(response[0].block_device_mappings['/dev/xvdp'].size, 100)
self.assertEqual(response[0].block_device_mappings['/dev/xvdp'].volume_type, 'io1')
self.assertEqual(response[0].block_device_mappings['/dev/xvdh'].delete_on_termination, False)
self.assertEqual(response[0].block_device_mappings['/dev/xvdh'].iops, 2000)
self.assertEqual(response[0].block_device_mappings['/dev/xvdh'].size, 200)
self.assertEqual(response[0].block_device_mappings['/dev/xvdh'].volume_type, 'io1')
self.assert_request_parameters({
'Action': 'DescribeLaunchConfigurations',
}, ignore_params_values=['Version'])
def test_get_all_configuration_limited(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_all_launch_configurations(max_records=10, names=["my-test1", "my-test2"])
self.assert_request_parameters({
'Action': 'DescribeLaunchConfigurations',
'MaxRecords': 10,
'LaunchConfigurationNames.member.1': 'my-test1',
'LaunchConfigurationNames.member.2': 'my-test2'
}, ignore_params_values=['Version'])
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
sodexis/odoo | addons/account_payment/wizard/account_payment_order.py | 338 | 5906 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
from openerp.tools.translate import _
class payment_order_create(osv.osv_memory):
"""
Create a payment object with lines corresponding to the account move line
to pay according to the date and the mode provided by the user.
Hypothesis:
- Small number of non-reconciled move line, payment mode and bank account type,
- Big number of partner and bank account.
If a type is given, unsuitable account Entry lines are ignored.
"""
_name = 'payment.order.create'
_description = 'payment.order.create'
_columns = {
'duedate': fields.date('Due Date', required=True),
'entries': fields.many2many('account.move.line', 'line_pay_rel', 'pay_id', 'line_id', 'Entries')
}
_defaults = {
'duedate': lambda *a: time.strftime('%Y-%m-%d'),
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if not context: context = {}
res = super(payment_order_create, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False)
if context and 'line_ids' in context:
doc = etree.XML(res['arch'])
nodes = doc.xpath("//field[@name='entries']")
for node in nodes:
node.set('domain', '[("id", "in", '+ str(context['line_ids'])+')]')
res['arch'] = etree.tostring(doc)
return res
def create_payment(self, cr, uid, ids, context=None):
order_obj = self.pool.get('payment.order')
line_obj = self.pool.get('account.move.line')
payment_obj = self.pool.get('payment.line')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
line_ids = [entry.id for entry in data.entries]
if not line_ids:
return {'type': 'ir.actions.act_window_close'}
payment = order_obj.browse(cr, uid, context['active_id'], context=context)
t = None
line2bank = line_obj.line2bank(cr, uid, line_ids, t, context)
## Finally populate the current payment with new lines:
for line in line_obj.browse(cr, uid, line_ids, context=context):
if payment.date_prefered == "now":
#no payment date => immediate payment
date_to_pay = False
elif payment.date_prefered == 'due':
date_to_pay = line.date_maturity
elif payment.date_prefered == 'fixed':
date_to_pay = payment.date_scheduled
payment_obj.create(cr, uid,{
'move_line_id': line.id,
'amount_currency': line.amount_residual_currency,
'bank_id': line2bank.get(line.id),
'order_id': payment.id,
'partner_id': line.partner_id and line.partner_id.id or False,
'communication': line.ref or '/',
'state': line.invoice and line.invoice.reference_type != 'none' and 'structured' or 'normal',
'date': date_to_pay,
'currency': (line.invoice and line.invoice.currency_id.id) or line.journal_id.currency.id or line.journal_id.company_id.currency_id.id,
}, context=context)
return {'type': 'ir.actions.act_window_close'}
def search_entries(self, cr, uid, ids, context=None):
line_obj = self.pool.get('account.move.line')
mod_obj = self.pool.get('ir.model.data')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
search_due_date = data.duedate
# payment = self.pool.get('payment.order').browse(cr, uid, context['active_id'], context=context)
# Search for move line to pay:
domain = [('reconcile_id', '=', False), ('account_id.type', '=', 'payable'), ('credit', '>', 0), ('account_id.reconcile', '=', True)]
domain = domain + ['|', ('date_maturity', '<=', search_due_date), ('date_maturity', '=', False)]
line_ids = line_obj.search(cr, uid, domain, context=context)
context = dict(context, line_ids=line_ids)
model_data_ids = mod_obj.search(cr, uid,[('model', '=', 'ir.ui.view'), ('name', '=', 'view_create_payment_order_lines')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {'name': _('Entry Lines'),
'context': context,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'payment.order.create',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dpshelio/sunpy | examples/units_and_coordinates/planet_locations.py | 1 | 1252 | """
===================================
Getting the location of the planets
===================================
How to get the position of planetary bodies im the solar system using
`astropy's solar system ephemeris <http://docs.astropy.org/en/stable/coordinates/solarsystem.html#solar-system-ephemerides>`__ information and SunPy.
"""
import matplotlib.pyplot as plt
from astropy.time import Time
from sunpy.coordinates import get_body_heliographic_stonyhurst
##############################################################################
# Lets grab the positions of each of the planets in Heliographic Stonyhurst
# coordinates.
obstime = Time('2014-05-15T07:54:00.005')
planet_list = ['earth', 'venus', 'mars', 'mercury', 'jupiter', 'neptune', 'uranus', 'sun']
planet_coord = [get_body_heliographic_stonyhurst(this_planet, time=obstime) for this_planet in planet_list]
##############################################################################
# Let's plot the results. Remember the Sun is at the center of this coordinate
# system.
ax = plt.subplot(projection='polar')
for this_planet, this_coord in zip(planet_list, planet_coord):
plt.polar(this_coord.lon.to('rad'), this_coord.radius, 'o', label=this_planet)
plt.legend()
plt.show()
| bsd-2-clause |
kohout/djangocms-getaweb-topstory | djangocms_topstory/south_migrations/0008_auto__chg_field_topstoryitem_object_id.py | 1 | 5765 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'TopStoryItem.object_id'
db.alter_column(u'djangocms_topstory_topstoryitem', 'object_id', self.gf('gfklookupwidget.fields.GfkLookupField')(null=True))
def backwards(self, orm):
# Changing field 'TopStoryItem.object_id'
db.alter_column(u'djangocms_topstory_topstoryitem', 'object_id', self.gf('gfklookupwidget.fields.GfkLookupField')(default=''))
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'djangocms_topstory.topstory': {
'Meta': {'object_name': 'TopStory', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'height': ('django.db.models.fields.CharField', [], {'default': "'434px'", 'max_length': '10'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.CharField', [], {'default': "'100%'", 'max_length': '10'})
},
u'djangocms_topstory.topstoryitem': {
'Meta': {'ordering': "['ordering']", 'object_name': 'TopStoryItem'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'external_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'focal_point_x': ('django.db.models.fields.PositiveIntegerField', [], {'default': '50'}),
'focal_point_y': ('django.db.models.fields.PositiveIntegerField', [], {'default': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'image_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}),
'image_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}),
'object_id': ('gfklookupwidget.fields.GfkLookupField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'ordering': ('django.db.models.fields.PositiveIntegerField', [], {}),
'size': ('django.db.models.fields.CharField', [], {'default': "'fullscreen'", 'max_length': '50'}),
'teaser_layout': ('django.db.models.fields.CharField', [], {'default': "'white'", 'max_length': '100'}),
'teaser_position': ('django.db.models.fields.CharField', [], {'default': "'left'", 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'topstory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'topstory_items'", 'to': u"orm['djangocms_topstory.TopStory']"})
}
}
complete_apps = ['djangocms_topstory'] | unlicense |
nhicher/ansible | lib/ansible/modules/cloud/vmware/vmware_guest_snapshot_facts.py | 39 | 4970 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_snapshot_facts
short_description: Gather facts about virtual machine's snapshots in vCenter
description:
- This module can be used to gather facts about virtual machine's snapshots.
version_added: 2.6
author:
- Abhijeet Kasurde (@Akasurde) <akasurde@redhat.com>
notes:
- Tested on vSphere 6.0 and 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the VM to work with.
- This is required if C(uuid) is not supplied.
uuid:
description:
- UUID of the instance to manage if known, this value is VMware's unique identifier.
- This is required if C(name) is not supplied.
- The C(folder) is ignored, if C(uuid) is provided.
folder:
description:
- Destination folder, absolute or relative path to find an existing guest.
- This is required only, if multiple virtual machines with same name are found on given vCenter.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
datacenter:
description:
- Name of the datacenter.
required: True
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Gather snapshot facts about the virtual machine in the given vCenter
vmware_guest_snapshot_facts:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
name: "{{ guest_name }}"
delegate_to: localhost
register: snapshot_facts
'''
RETURN = """
guest_snapshots:
description: metadata about the snapshot facts
returned: always
type: dict
sample: {
"current_snapshot": {
"creation_time": "2018-02-10T14:48:31.999459+00:00",
"description": "",
"id": 28,
"name": "snap_0003",
"state": "poweredOff"
},
"snapshots": [
{
"creation_time": "2018-02-10T14:48:31.999459+00:00",
"description": "",
"id": 28,
"name": "snap_0003",
"state": "poweredOff"
}
]
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, list_snapshots, vmware_argument_spec
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
@staticmethod
def gather_guest_snapshot_facts(vm_obj=None):
"""
Function to return snpashot related facts about given virtual machine
Args:
vm_obj: Virtual Machine Managed object
Returns: Dictionary containing snapshot facts
"""
if vm_obj is None:
return {}
return list_snapshots(vm=vm_obj)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
uuid=dict(type='str'),
folder=dict(type='str'),
datacenter=dict(required=True, type='str'),
)
module = AnsibleModule(argument_spec=argument_spec,
required_together=[['name', 'folder']],
required_one_of=[['name', 'uuid']],
)
if module.params['folder']:
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.get_vm()
if not vm:
# If UUID is set, getvm select UUID, show error message accordingly.
module.fail_json(msg="Unable to gather facts about snapshots for"
" non-existing VM ['%s']" % (module.params.get('uuid') or
module.params.get('name')))
results = dict(changed=False, guest_snapshots=pyv.gather_guest_snapshot_facts(vm_obj=vm))
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
mattrobenolt/pip | tests/lib/__init__.py | 7 | 17037 | from __future__ import absolute_import
import os
import sys
import re
import textwrap
import site
import scripttest
import virtualenv
from tests.lib.path import Path, curdir, u
DATA_DIR = Path(__file__).folder.folder.join("data").abspath
SRC_DIR = Path(__file__).abspath.folder.folder.folder
pyversion = sys.version[:3]
def path_to_url(path):
"""
Convert a path to URI. The path will be made absolute and
will not have quoted path parts.
(adapted from pip.util)
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join(filepath)
if drive:
return 'file:///' + drive + url
return 'file://' + url
class TestData(object):
"""
Represents a bundle of pre-created test data.
This copies a pristine set of test data into a root location that is
designed to be test specific. The reason for this is when running the tests
concurrently errors can be generated because the related tooling uses
the directory as a work space. This leads to two concurrent processes
trampling over each other. This class gets around that by copying all
data into a directory and operating on the copied data.
"""
def __init__(self, root, source=None):
self.source = source or DATA_DIR
self.root = Path(root).abspath
@classmethod
def copy(cls, root):
obj = cls(root)
obj.reset()
return obj
def reset(self):
self.root.rmtree()
self.source.copytree(self.root)
@property
def packages(self):
return self.root.join("packages")
@property
def packages2(self):
return self.root.join("packages2")
@property
def packages3(self):
return self.root.join("packages3")
@property
def src(self):
return self.root.join("src")
@property
def indexes(self):
return self.root.join("indexes")
@property
def reqfiles(self):
return self.root.join("reqfiles")
@property
def find_links(self):
return path_to_url(self.packages)
@property
def find_links2(self):
return path_to_url(self.packages2)
@property
def find_links3(self):
return path_to_url(self.packages3)
def index_url(self, index="simple"):
return path_to_url(self.root.join("indexes", index))
class TestFailure(AssertionError):
"""
An "assertion" failed during testing.
"""
pass
class TestPipResult(object):
def __init__(self, impl, verbose=False):
self._impl = impl
if verbose:
print(self.stdout)
if self.stderr:
print('======= stderr ========')
print(self.stderr)
print('=======================')
def __getattr__(self, attr):
return getattr(self._impl, attr)
if sys.platform == 'win32':
@property
def stdout(self):
return self._impl.stdout.replace('\r\n', '\n')
@property
def stderr(self):
return self._impl.stderr.replace('\r\n', '\n')
def __str__(self):
return str(self._impl).replace('\r\n', '\n')
else:
# Python doesn't automatically forward __str__ through __getattr__
def __str__(self):
return str(self._impl)
def assert_installed(self, pkg_name, editable=True, with_files=[],
without_files=[], without_egg_link=False,
use_user_site=False, sub_dir=False):
e = self.test_env
if editable:
pkg_dir = e.venv / 'src' / pkg_name.lower()
# If package was installed in a sub directory
if sub_dir:
pkg_dir = pkg_dir / sub_dir
else:
without_egg_link = True
pkg_dir = e.site_packages / pkg_name
if use_user_site:
egg_link_path = e.user_site / pkg_name + '.egg-link'
else:
egg_link_path = e.site_packages / pkg_name + '.egg-link'
if without_egg_link:
if egg_link_path in self.files_created:
raise TestFailure(
'unexpected egg link file created: %r\n%s' %
(egg_link_path, self)
)
else:
if egg_link_path not in self.files_created:
raise TestFailure(
'expected egg link file missing: %r\n%s' %
(egg_link_path, self)
)
egg_link_file = self.files_created[egg_link_path]
# FIXME: I don't understand why there's a trailing . here
if not (egg_link_file.bytes.endswith('\n.')
and egg_link_file.bytes[:-2].endswith(pkg_dir)):
raise TestFailure(textwrap.dedent(u('''\
Incorrect egg_link file %r
Expected ending: %r
------- Actual contents -------
%s
-------------------------------''' % (
egg_link_file,
pkg_dir + '\n.',
repr(egg_link_file.bytes))
)))
if use_user_site:
pth_file = e.user_site / 'easy-install.pth'
else:
pth_file = e.site_packages / 'easy-install.pth'
if (pth_file in self.files_updated) == without_egg_link:
raise TestFailure('%r unexpectedly %supdated by install' % (
pth_file, (not without_egg_link and 'not ' or '')))
if (pkg_dir in self.files_created) == (curdir in without_files):
raise TestFailure(textwrap.dedent('''\
expected package directory %r %sto be created
actually created:
%s
''') % (
pkg_dir,
(curdir in without_files and 'not ' or ''),
sorted(self.files_created.keys())))
for f in with_files:
if not (pkg_dir / f).normpath in self.files_created:
raise TestFailure(
'Package directory %r missing expected content %r' %
(pkg_dir, f)
)
for f in without_files:
if (pkg_dir / f).normpath in self.files_created:
raise TestFailure(
'Package directory %r has unexpected content %f' %
(pkg_dir, f)
)
class PipTestEnvironment(scripttest.TestFileEnvironment):
"""
A specialized TestFileEnvironment for testing pip
"""
#
# Attribute naming convention
# ---------------------------
#
# Instances of this class have many attributes representing paths
# in the filesystem. To keep things straight, absolute paths have
# a name of the form xxxx_path and relative paths have a name that
# does not end in '_path'.
exe = sys.platform == 'win32' and '.exe' or ''
verbose = False
def __init__(self, base_path, *args, **kwargs):
# Make our base_path a test.lib.path.Path object
base_path = Path(base_path)
# Store paths related to the virtual environment
_virtualenv = kwargs.pop("virtualenv")
venv, lib, include, bin = virtualenv.path_locations(_virtualenv)
# workaround for https://github.com/pypa/virtualenv/issues/306
if hasattr(sys, "pypy_version_info"):
lib = os.path.join(venv, 'lib-python', pyversion)
self.venv_path = venv
self.lib_path = lib
self.include_path = include
self.bin_path = bin
if hasattr(sys, "pypy_version_info"):
self.site_packages_path = self.venv_path.join("site-packages")
else:
self.site_packages_path = self.lib_path.join("site-packages")
self.user_base_path = self.venv_path.join("user")
self.user_bin_path = self.user_base_path.join(
self.bin_path - self.venv_path
)
self.user_site_path = self.venv_path.join(
"user",
site.USER_SITE[len(site.USER_BASE) + 1:],
)
# Create a Directory to use as a scratch pad
self.scratch_path = base_path.join("scratch").mkdir()
# Set our default working directory
kwargs.setdefault("cwd", self.scratch_path)
# Setup our environment
environ = kwargs.get("environ")
if environ is None:
environ = os.environ.copy()
environ["PATH"] = Path.pathsep.join(
[self.bin_path] + [environ.get("PATH", [])],
)
environ["PYTHONUSERBASE"] = self.user_base_path
# Writing bytecode can mess up updated file detection
environ["PYTHONDONTWRITEBYTECODE"] = "1"
kwargs["environ"] = environ
# Call the TestFileEnvironment __init__
super(PipTestEnvironment, self).__init__(base_path, *args, **kwargs)
# Expand our absolute path directories into relative
for name in ["base", "venv", "lib", "include", "bin", "site_packages",
"user_base", "user_site", "user_bin", "scratch"]:
real_name = "%s_path" % name
setattr(self, name, getattr(self, real_name) - self.base_path)
# Make sure temp_path is a Path object
self.temp_path = Path(self.temp_path)
# Ensure the tmp dir exists, things break horribly if it doesn't
self.temp_path.mkdir()
# create easy-install.pth in user_site, so we always have it updated
# instead of created
self.user_site_path.makedirs()
self.user_site_path.join("easy-install.pth").touch()
def _ignore_file(self, fn):
if fn.endswith('__pycache__') or fn.endswith(".pyc"):
result = True
else:
result = super(PipTestEnvironment, self)._ignore_file(fn)
return result
def run(self, *args, **kw):
if self.verbose:
print('>> running %s %s' % (args, kw))
cwd = kw.pop('cwd', None)
run_from = kw.pop('run_from', None)
assert not cwd or not run_from, "Don't use run_from; it's going away"
cwd = cwd or run_from or self.cwd
return TestPipResult(
super(PipTestEnvironment, self).run(cwd=cwd, *args, **kw),
verbose=self.verbose,
)
def pip(self, *args, **kwargs):
return self.run("pip", *args, **kwargs)
def pip_install_local(self, *args, **kwargs):
return self.pip(
"install", "--no-index",
"--find-links", path_to_url(os.path.join(DATA_DIR, "packages")),
*args, **kwargs
)
# FIXME ScriptTest does something similar, but only within a single
# ProcResult; this generalizes it so states can be compared across
# multiple commands. Maybe should be rolled into ScriptTest?
def diff_states(start, end, ignore=None):
"""
Differences two "filesystem states" as represented by dictionaries
of FoundFile and FoundDir objects.
Returns a dictionary with following keys:
``deleted``
Dictionary of files/directories found only in the start state.
``created``
Dictionary of files/directories found only in the end state.
``updated``
Dictionary of files whose size has changed (FIXME not entirely
reliable, but comparing contents is not possible because
FoundFile.bytes is lazy, and comparing mtime doesn't help if
we want to know if a file has been returned to its earlier
state).
Ignores mtime and other file attributes; only presence/absence and
size are considered.
"""
ignore = ignore or []
def prefix_match(path, prefix):
if path == prefix:
return True
prefix = prefix.rstrip(os.path.sep) + os.path.sep
return path.startswith(prefix)
start_keys = set([k for k in start.keys()
if not any([prefix_match(k, i) for i in ignore])])
end_keys = set([k for k in end.keys()
if not any([prefix_match(k, i) for i in ignore])])
deleted = dict([(k, start[k]) for k in start_keys.difference(end_keys)])
created = dict([(k, end[k]) for k in end_keys.difference(start_keys)])
updated = {}
for k in start_keys.intersection(end_keys):
if (start[k].size != end[k].size):
updated[k] = end[k]
return dict(deleted=deleted, created=created, updated=updated)
def assert_all_changes(start_state, end_state, expected_changes):
"""
Fails if anything changed that isn't listed in the
expected_changes.
start_state is either a dict mapping paths to
scripttest.[FoundFile|FoundDir] objects or a TestPipResult whose
files_before we'll test. end_state is either a similar dict or a
TestPipResult whose files_after we'll test.
Note: listing a directory means anything below
that directory can be expected to have changed.
"""
__tracebackhide__ = True
start_files = start_state
end_files = end_state
if isinstance(start_state, TestPipResult):
start_files = start_state.files_before
if isinstance(end_state, TestPipResult):
end_files = end_state.files_after
diff = diff_states(start_files, end_files, ignore=expected_changes)
if list(diff.values()) != [{}, {}, {}]:
raise TestFailure('Unexpected changes:\n' + '\n'.join(
[k + ': ' + ', '.join(v.keys()) for k, v in diff.items()]))
# Don't throw away this potentially useful information
return diff
def _create_test_package_with_subdirectory(script, subdirectory):
script.scratch_path.join("version_pkg").mkdir()
version_pkg_path = script.scratch_path / 'version_pkg'
version_pkg_path.join("version_pkg.py").write(textwrap.dedent("""
def main():
print('0.1')
"""))
version_pkg_path.join("setup.py").write(
textwrap.dedent("""
from setuptools import setup, find_packages
setup(name='version_pkg',
version='0.1',
packages=find_packages(),
py_modules=['version_pkg'],
entry_points=dict(console_scripts=['version_pkg=version_pkg:main']))
"""))
subdirectory_path = version_pkg_path.join(subdirectory)
subdirectory_path.mkdir()
subdirectory_path.join('version_subpkg.py').write(textwrap.dedent("""
def main():
print('0.1')
"""))
subdirectory_path.join('setup.py').write(
textwrap.dedent("""
from setuptools import setup, find_packages
setup(name='version_subpkg',
version='0.1',
packages=find_packages(),
py_modules=['version_subpkg'],
entry_points=dict(console_scripts=['version_pkg=version_subpkg:main']))
"""))
script.run('git', 'init', cwd=version_pkg_path)
script.run('git', 'add', '.', cwd=version_pkg_path)
script.run(
'git', 'commit', '-q',
'--author', 'pip <pypa-dev@googlegroups.com>',
'-am', 'initial version', cwd=version_pkg_path
)
return version_pkg_path
def _create_test_package(script):
script.scratch_path.join("version_pkg").mkdir()
version_pkg_path = script.scratch_path / 'version_pkg'
version_pkg_path.join("version_pkg.py").write(textwrap.dedent("""
def main():
print('0.1')
"""))
version_pkg_path.join("setup.py").write(textwrap.dedent("""
from setuptools import setup, find_packages
setup(
name='version_pkg',
version='0.1',
packages=find_packages(),
py_modules=['version_pkg'],
entry_points=dict(console_scripts=['version_pkg=version_pkg:main'])
)
"""))
script.run('git', 'init', cwd=version_pkg_path)
script.run('git', 'add', '.', cwd=version_pkg_path)
script.run(
'git', 'commit', '-q',
'--author', 'pip <pypa-dev@googlegroups.com>',
'-am', 'initial version', cwd=version_pkg_path,
)
return version_pkg_path
def _change_test_package_version(script, version_pkg_path):
version_pkg_path.join("version_pkg.py").write(textwrap.dedent('''\
def main():
print("some different version")'''))
script.run(
'git', 'clean', '-qfdx',
cwd=version_pkg_path,
expect_stderr=True,
)
script.run(
'git', 'commit', '-q',
'--author', 'pip <pypa-dev@googlegroups.com>',
'-am', 'messed version',
cwd=version_pkg_path,
expect_stderr=True,
)
def assert_raises_regexp(exception, reg, run, *args, **kwargs):
"""Like assertRaisesRegexp in unittest"""
__tracebackhide__ = True
try:
run(*args, **kwargs)
assert False, "%s should have been thrown" % exception
except Exception:
e = sys.exc_info()[1]
p = re.compile(reg)
assert p.search(str(e)), str(e)
| mit |
BeATz-UnKNoWN/python-for-android | python-build/python-libs/gdata/src/atom/http.py | 136 | 10927 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""HttpClients in this module use httplib to make HTTP requests.
This module make HTTP requests based on httplib, but there are environments
in which an httplib based approach will not work (if running in Google App
Engine for example). In those cases, higher level classes (like AtomService
and GDataService) can swap out the HttpClient to transparently use a
different mechanism for making HTTP requests.
HttpClient: Contains a request method which performs an HTTP call to the
server.
ProxiedHttpClient: Contains a request method which connects to a proxy using
settings stored in operating system environment variables then
performs an HTTP call to the endpoint server.
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import types
import os
import httplib
import atom.url
import atom.http_interface
import socket
import base64
import atom.http_core
class ProxyError(atom.http_interface.Error):
pass
class TestConfigurationError(Exception):
pass
DEFAULT_CONTENT_TYPE = 'application/atom+xml'
class HttpClient(atom.http_interface.GenericHttpClient):
# Added to allow old v1 HttpClient objects to use the new
# http_code.HttpClient. Used in unit tests to inject a mock client.
v2_http_client = None
def __init__(self, headers=None):
self.debug = False
self.headers = headers or {}
def request(self, operation, url, data=None, headers=None):
"""Performs an HTTP call to the server, supports GET, POST, PUT, and
DELETE.
Usage example, perform and HTTP GET on http://www.google.com/:
import atom.http
client = atom.http.HttpClient()
http_response = client.request('GET', 'http://www.google.com/')
Args:
operation: str The HTTP operation to be performed. This is usually one
of 'GET', 'POST', 'PUT', or 'DELETE'
data: filestream, list of parts, or other object which can be converted
to a string. Should be set to None when performing a GET or DELETE.
If data is a file-like object which can be read, this method will
read a chunk of 100K bytes at a time and send them.
If the data is a list of parts to be sent, each part will be
evaluated and sent.
url: The full URL to which the request should be sent. Can be a string
or atom.url.Url.
headers: dict of strings. HTTP headers which should be sent
in the request.
"""
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
# If the list of headers does not include a Content-Length, attempt to
# calculate it based on the data object.
if data and 'Content-Length' not in all_headers:
if isinstance(data, types.StringTypes):
all_headers['Content-Length'] = str(len(data))
else:
raise atom.http_interface.ContentLengthRequired('Unable to calculate '
'the length of the data parameter. Specify a value for '
'Content-Length')
# Set the content type to the default value if none was set.
if 'Content-Type' not in all_headers:
all_headers['Content-Type'] = DEFAULT_CONTENT_TYPE
if self.v2_http_client is not None:
http_request = atom.http_core.HttpRequest(method=operation)
atom.http_core.Uri.parse_uri(str(url)).modify_request(http_request)
http_request.headers = all_headers
if data:
http_request._body_parts.append(data)
return self.v2_http_client.request(http_request=http_request)
if not isinstance(url, atom.url.Url):
if isinstance(url, types.StringTypes):
url = atom.url.parse_url(url)
else:
raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
'parameter because it was not a string or atom.url.Url')
connection = self._prepare_connection(url, all_headers)
if self.debug:
connection.debuglevel = 1
connection.putrequest(operation, self._get_access_url(url),
skip_host=True)
if url.port is not None:
connection.putheader('Host', '%s:%s' % (url.host, url.port))
else:
connection.putheader('Host', url.host)
# Overcome a bug in Python 2.4 and 2.5
# httplib.HTTPConnection.putrequest adding
# HTTP request header 'Host: www.google.com:443' instead of
# 'Host: www.google.com', and thus resulting the error message
# 'Token invalid - AuthSub token has wrong scope' in the HTTP response.
if (url.protocol == 'https' and int(url.port or 443) == 443 and
hasattr(connection, '_buffer') and
isinstance(connection._buffer, list)):
header_line = 'Host: %s:443' % url.host
replacement_header_line = 'Host: %s' % url.host
try:
connection._buffer[connection._buffer.index(header_line)] = (
replacement_header_line)
except ValueError: # header_line missing from connection._buffer
pass
# Send the HTTP headers.
for header_name in all_headers:
connection.putheader(header_name, all_headers[header_name])
connection.endheaders()
# If there is data, send it in the request.
if data:
if isinstance(data, list):
for data_part in data:
_send_data_part(data_part, connection)
else:
_send_data_part(data, connection)
# Return the HTTP Response from the server.
return connection.getresponse()
def _prepare_connection(self, url, headers):
if not isinstance(url, atom.url.Url):
if isinstance(url, types.StringTypes):
url = atom.url.parse_url(url)
else:
raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
'parameter because it was not a string or atom.url.Url')
if url.protocol == 'https':
if not url.port:
return httplib.HTTPSConnection(url.host)
return httplib.HTTPSConnection(url.host, int(url.port))
else:
if not url.port:
return httplib.HTTPConnection(url.host)
return httplib.HTTPConnection(url.host, int(url.port))
def _get_access_url(self, url):
return url.to_string()
class ProxiedHttpClient(HttpClient):
"""Performs an HTTP request through a proxy.
The proxy settings are obtained from enviroment variables. The URL of the
proxy server is assumed to be stored in the environment variables
'https_proxy' and 'http_proxy' respectively. If the proxy server requires
a Basic Auth authorization header, the username and password are expected to
be in the 'proxy-username' or 'proxy_username' variable and the
'proxy-password' or 'proxy_password' variable.
After connecting to the proxy server, the request is completed as in
HttpClient.request.
"""
def _prepare_connection(self, url, headers):
proxy_auth = _get_proxy_auth()
if url.protocol == 'https':
# destination is https
proxy = os.environ.get('https_proxy')
if proxy:
# Set any proxy auth headers
if proxy_auth:
proxy_auth = 'Proxy-authorization: %s' % proxy_auth
# Construct the proxy connect command.
port = url.port
if not port:
port = '443'
proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (url.host, port)
# Set the user agent to send to the proxy
if headers and 'User-Agent' in headers:
user_agent = 'User-Agent: %s\r\n' % (headers['User-Agent'])
else:
user_agent = ''
proxy_pieces = '%s%s%s\r\n' % (proxy_connect, proxy_auth, user_agent)
# Find the proxy host and port.
proxy_url = atom.url.parse_url(proxy)
if not proxy_url.port:
proxy_url.port = '80'
# Connect to the proxy server, very simple recv and error checking
p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
p_sock.connect((proxy_url.host, int(proxy_url.port)))
p_sock.sendall(proxy_pieces)
response = ''
# Wait for the full response.
while response.find("\r\n\r\n") == -1:
response += p_sock.recv(8192)
p_status = response.split()[1]
if p_status != str(200):
raise ProxyError('Error status=%s' % str(p_status))
# Trivial setup for ssl socket.
ssl = socket.ssl(p_sock, None, None)
fake_sock = httplib.FakeSocket(p_sock, ssl)
# Initalize httplib and replace with the proxy socket.
connection = httplib.HTTPConnection(proxy_url.host)
connection.sock=fake_sock
return connection
else:
# The request was HTTPS, but there was no https_proxy set.
return HttpClient._prepare_connection(self, url, headers)
else:
proxy = os.environ.get('http_proxy')
if proxy:
# Find the proxy host and port.
proxy_url = atom.url.parse_url(proxy)
if not proxy_url.port:
proxy_url.port = '80'
if proxy_auth:
headers['Proxy-Authorization'] = proxy_auth.strip()
return httplib.HTTPConnection(proxy_url.host, int(proxy_url.port))
else:
# The request was HTTP, but there was no http_proxy set.
return HttpClient._prepare_connection(self, url, headers)
def _get_access_url(self, url):
return url.to_string()
def _get_proxy_auth():
proxy_username = os.environ.get('proxy-username')
if not proxy_username:
proxy_username = os.environ.get('proxy_username')
proxy_password = os.environ.get('proxy-password')
if not proxy_password:
proxy_password = os.environ.get('proxy_password')
if proxy_username:
user_auth = base64.encodestring('%s:%s' % (proxy_username,
proxy_password))
return 'Basic %s\r\n' % (user_auth.strip())
else:
return ''
def _send_data_part(data, connection):
if isinstance(data, types.StringTypes):
connection.send(data)
return
# Check to see if data is a file-like object that has a read method.
elif hasattr(data, 'read'):
# Read the file and send it a chunk at a time.
while 1:
binarydata = data.read(100000)
if binarydata == '': break
connection.send(binarydata)
return
else:
# The data object was not a file.
# Try to convert to a string and send the data.
connection.send(str(data))
return
| apache-2.0 |
sinuos/shadowsocks | tests/test.py | 1016 | 5029 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import sys
import os
import signal
import select
import time
import argparse
from subprocess import Popen, PIPE
python = ['python']
default_url = 'http://localhost/'
parser = argparse.ArgumentParser(description='test Shadowsocks')
parser.add_argument('-c', '--client-conf', type=str, default=None)
parser.add_argument('-s', '--server-conf', type=str, default=None)
parser.add_argument('-a', '--client-args', type=str, default=None)
parser.add_argument('-b', '--server-args', type=str, default=None)
parser.add_argument('--with-coverage', action='store_true', default=None)
parser.add_argument('--should-fail', action='store_true', default=None)
parser.add_argument('--tcp-only', action='store_true', default=None)
parser.add_argument('--url', type=str, default=default_url)
parser.add_argument('--dns', type=str, default='8.8.8.8')
config = parser.parse_args()
if config.with_coverage:
python = ['coverage', 'run', '-p', '-a']
client_args = python + ['shadowsocks/local.py', '-v']
server_args = python + ['shadowsocks/server.py', '-v']
if config.client_conf:
client_args.extend(['-c', config.client_conf])
if config.server_conf:
server_args.extend(['-c', config.server_conf])
else:
server_args.extend(['-c', config.client_conf])
if config.client_args:
client_args.extend(config.client_args.split())
if config.server_args:
server_args.extend(config.server_args.split())
else:
server_args.extend(config.client_args.split())
if config.url == default_url:
server_args.extend(['--forbidden-ip', ''])
p1 = Popen(server_args, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
p2 = Popen(client_args, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
p3 = None
p4 = None
p3_fin = False
p4_fin = False
# 1 shadowsocks started
# 2 curl started
# 3 curl finished
# 4 dig started
# 5 dig finished
stage = 1
try:
local_ready = False
server_ready = False
fdset = [p1.stdout, p2.stdout, p1.stderr, p2.stderr]
while True:
r, w, e = select.select(fdset, [], fdset)
if e:
break
for fd in r:
line = fd.readline()
if not line:
if stage == 2 and fd == p3.stdout:
stage = 3
if stage == 4 and fd == p4.stdout:
stage = 5
if bytes != str:
line = str(line, 'utf8')
sys.stderr.write(line)
if line.find('starting local') >= 0:
local_ready = True
if line.find('starting server') >= 0:
server_ready = True
if stage == 1:
time.sleep(2)
p3 = Popen(['curl', config.url, '-v', '-L',
'--socks5-hostname', '127.0.0.1:1081',
'-m', '15', '--connect-timeout', '10'],
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
if p3 is not None:
fdset.append(p3.stdout)
fdset.append(p3.stderr)
stage = 2
else:
sys.exit(1)
if stage == 3 and p3 is not None:
fdset.remove(p3.stdout)
fdset.remove(p3.stderr)
r = p3.wait()
if config.should_fail:
if r == 0:
sys.exit(1)
else:
if r != 0:
sys.exit(1)
if config.tcp_only:
break
p4 = Popen(['socksify', 'dig', '@%s' % config.dns,
'www.google.com'],
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
if p4 is not None:
fdset.append(p4.stdout)
fdset.append(p4.stderr)
stage = 4
else:
sys.exit(1)
if stage == 5:
r = p4.wait()
if config.should_fail:
if r == 0:
sys.exit(1)
print('test passed (expecting failure)')
else:
if r != 0:
sys.exit(1)
print('test passed')
break
finally:
for p in [p1, p2]:
try:
os.kill(p.pid, signal.SIGINT)
os.waitpid(p.pid, 0)
except OSError:
pass
| apache-2.0 |
wtgme/labeldoc2vec | gensim/test/test_logentropy_model.py | 18 | 2917 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
import logging
import unittest
import os
import os.path
import tempfile
import six
import numpy
import scipy.linalg
from gensim.corpora import mmcorpus, Dictionary
from gensim.models import logentropy_model
from gensim import matutils
module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder
datapath = lambda fname: os.path.join(module_path, 'test_data', fname)
# set up vars used in testing ("Deerwester" from the web tutorial)
texts = [['human', 'interface', 'computer'],
['survey', 'user', 'computer', 'system', 'response', 'time'],
['eps', 'user', 'interface', 'system'],
['system', 'human', 'system', 'eps'],
['user', 'response', 'time'],
['trees'],
['graph', 'trees'],
['graph', 'minors', 'trees'],
['graph', 'minors', 'survey']]
dictionary = Dictionary(texts)
corpus = [dictionary.doc2bow(text) for text in texts]
def testfile():
# temporary data will be stored to this file
return os.path.join(tempfile.gettempdir(), 'gensim_models.tst')
class TestLogEntropyModel(unittest.TestCase):
def setUp(self):
self.corpus_small = mmcorpus.MmCorpus(datapath('test_corpus_small.mm'))
self.corpus_ok = mmcorpus.MmCorpus(datapath('test_corpus_ok.mm'))
def testTransform(self):
# create the transformation model
model = logentropy_model.LogEntropyModel(self.corpus_ok, normalize=False)
# transform one document
doc = list(self.corpus_ok)[0]
transformed = model[doc]
expected = [(0, 0.3748900964125389),
(1, 0.30730215324230725),
(3, 1.20941755462856)]
self.assertTrue(numpy.allclose(transformed, expected))
def testPersistence(self):
fname = testfile()
model = logentropy_model.LogEntropyModel(self.corpus_ok, normalize=True)
model.save(fname)
model2 = logentropy_model.LogEntropyModel.load(fname)
self.assertTrue(model.entr == model2.entr)
tstvec = []
self.assertTrue(numpy.allclose(model[tstvec], model2[tstvec]))
def testPersistenceCompressed(self):
fname = testfile() + '.gz'
model = logentropy_model.LogEntropyModel(self.corpus_ok, normalize=True)
model.save(fname)
model2 = logentropy_model.LogEntropyModel.load(fname, mmap=None)
self.assertTrue(model.entr == model2.entr)
tstvec = []
self.assertTrue(numpy.allclose(model[tstvec], model2[tstvec]))
#endclass TestLogEntropyModel
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
| lgpl-2.1 |
kornicameister/ansible-modules-extras | cloud/openstack/os_keystone_service.py | 45 | 6224 | #!/usr/bin/python
# Copyright 2016 Sam Yaple
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
from distutils.version import StrictVersion
DOCUMENTATION = '''
---
module: os_keystone_service
short_description: Manage OpenStack Identity services
extends_documentation_fragment: openstack
author: "Sam Yaple (@SamYaple)"
version_added: "2.2"
description:
- Create, update, or delete OpenStack Identity service. If a service
with the supplied name already exists, it will be updated with the
new description and enabled attributes.
options:
name:
description:
- Name of the service
required: true
description:
description:
- Description of the service
required: false
default: None
enabled:
description:
- Is the service enabled
required: false
default: True
service_type:
description:
- The type of service
required: true
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Create a service for glance
- os_keystone_service:
cloud: mycloud
state: present
name: glance
service_type: image
description: OpenStack Image Service
# Delete a service
- os_keystone_service:
cloud: mycloud
state: absent
name: glance
service_type: image
'''
RETURN = '''
service:
description: Dictionary describing the service.
returned: On success when I(state) is 'present'
type: dictionary
contains:
id:
description: Service ID.
type: string
sample: "3292f020780b4d5baf27ff7e1d224c44"
name:
description: Service name.
type: string
sample: "glance"
service_type:
description: Service type.
type: string
sample: "image"
description:
description: Service description.
type: string
sample: "OpenStack Image Service"
enabled:
description: Service status.
type: boolean
sample: True
id:
description: The service ID.
returned: On success when I(state) is 'present'
type: string
sample: "3292f020780b4d5baf27ff7e1d224c44"
'''
def _needs_update(module, service):
if service.enabled != module.params['enabled']:
return True
if service.description is not None and \
service.description != module.params['description']:
return True
return False
def _system_state_change(module, service):
state = module.params['state']
if state == 'absent' and service:
return True
if state == 'present':
if service is None:
return True
return _needs_update(module, service)
return False
def main():
argument_spec = openstack_full_argument_spec(
description=dict(default=None),
enabled=dict(default=True, type='bool'),
name=dict(required=True),
service_type=dict(required=True),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
if StrictVersion(shade.__version__) < StrictVersion('1.6.0'):
module.fail_json(msg="To utilize this module, the installed version of"
"the shade library MUST be >=1.6.0")
description = module.params['description']
enabled = module.params['enabled']
name = module.params['name']
state = module.params['state']
service_type = module.params['service_type']
try:
cloud = shade.operator_cloud(**module.params)
services = cloud.search_services(name_or_id=name,
filters=dict(type=service_type))
if len(services) > 1:
module.fail_json(msg='Service name %s and type %s are not unique' %
(name, service_type))
elif len(services) == 1:
service = services[0]
else:
service = None
if module.check_mode:
module.exit_json(changed=_system_state_change(module, service))
if state == 'present':
if service is None:
service = cloud.create_service(name=name,
description=description, type=service_type, enabled=True)
changed = True
else:
if _needs_update(module, service):
service = cloud.update_service(
service.id, name=name, type=service_type, enabled=enabled,
description=description)
changed = True
else:
changed = False
module.exit_json(changed=changed, service=service, id=service.id)
elif state == 'absent':
if service is None:
changed=False
else:
cloud.delete_service(service.id)
changed=True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
manqala/erpnext | erpnext/stock/report/bom_search/bom_search.py | 86 | 1083 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
def execute(filters=None):
data = []
parents = {
"Product Bundle Item": "Product Bundle",
"BOM Explosion Item": "BOM",
"BOM Item": "BOM"
}
for doctype in ("Product Bundle Item",
"BOM Explosion Item" if filters.search_sub_assemblies else "BOM Item"):
all_boms = {}
for d in frappe.get_all(doctype, fields=["parent", "item_code"]):
all_boms.setdefault(d.parent, []).append(d.item_code)
for parent, items in all_boms.iteritems():
valid = True
for key, item in filters.iteritems():
if key != "search_sub_assemblies":
if item and item not in items:
valid = False
if valid:
data.append((parent, parents[doctype]))
return [{
"fieldname": "parent",
"label": "BOM",
"width": 200,
"fieldtype": "Dynamic Link",
"options": "doctype"
},
{
"fieldname": "doctype",
"label": "Type",
"width": 200,
"fieldtype": "Data"
}], data
| gpl-3.0 |
abircse06/youtube-dl | youtube_dl/extractor/jukebox.py | 140 | 2153 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
RegexNotFoundError,
unescapeHTML,
)
class JukeboxIE(InfoExtractor):
_VALID_URL = r'^http://www\.jukebox?\..+?\/.+[,](?P<id>[a-z0-9\-]+)\.html'
_TEST = {
'url': 'http://www.jukebox.es/kosheen/videoclip,pride,r303r.html',
'info_dict': {
'id': 'r303r',
'ext': 'flv',
'title': 'Kosheen-En Vivo Pride',
'uploader': 'Kosheen',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
html = self._download_webpage(url, video_id)
iframe_url = unescapeHTML(self._search_regex(r'<iframe .*src="([^"]*)"', html, 'iframe url'))
iframe_html = self._download_webpage(iframe_url, video_id, 'Downloading iframe')
if re.search(r'class="jkb_waiting"', iframe_html) is not None:
raise ExtractorError('Video is not available(in your country?)!')
self.report_extraction(video_id)
try:
video_url = self._search_regex(r'"config":{"file":"(?P<video_url>http:[^"]+\?mdtk=[0-9]+)"',
iframe_html, 'video url')
video_url = unescapeHTML(video_url).replace('\/', '/')
except RegexNotFoundError:
youtube_url = self._search_regex(
r'config":{"file":"(http:\\/\\/www\.youtube\.com\\/watch\?v=[^"]+)"',
iframe_html, 'youtube url')
youtube_url = unescapeHTML(youtube_url).replace('\/', '/')
self.to_screen('Youtube video detected')
return self.url_result(youtube_url, ie='Youtube')
title = self._html_search_regex(r'<h1 class="inline">([^<]+)</h1>',
html, 'title')
artist = self._html_search_regex(r'<span id="infos_article_artist">([^<]+)</span>',
html, 'artist')
return {
'id': video_id,
'url': video_url,
'title': artist + '-' + title,
'uploader': artist,
}
| unlicense |
GeraldLoeffler/nupic | tests/swarming/nupic/swarming/experiments/dummyV2/description.py | 32 | 15324 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ronmarianetti/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [ (u'timestamp', 'first'),
(u'gym', 'first'),
(u'consumption', 'mean'),
(u'address', 'first')],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalNextStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'address': { 'fieldname': u'address',
'n': 300,
'name': u'address',
'type': 'SDRCategoryEncoder',
'w': 21},
'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'maxval': 200,
'minval': 0,
'n': 1500,
'name': u'consumption',
'type': 'ScalarEncoder',
'w': 21},
'gym': { 'fieldname': u'gym',
'n': 600,
'name': u'gym',
'type': 'SDRCategoryEncoder',
'w': 21},
'timestamp_dayOfWeek': { 'dayOfWeek': (7, 3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (7, 8),
'type': 'DateEncoder'}},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 15,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupicengine/cluster/database/StreamDef.json.
#
'dataset' : {u'info': u'test_NoProviders',
u'streams': [ { u'columns': [u'*'],
u'info': "test data",
u'source': "file://swarming/test_data.csv"}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
#'iterationCount' : ITERATION_COUNT,
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption',inferenceElement=InferenceElement.prediction,
metric='rmse'),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
}
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
| agpl-3.0 |
espressomd/espresso | testsuite/python/galilei.py | 2 | 3432 | #
# Copyright (C) 2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import unittest as ut
import numpy as np
import espressomd
from espressomd.galilei import GalileiTransform
BOX_L = np.array([10, 20, 30])
class Galilei(ut.TestCase):
system = espressomd.System(box_l=BOX_L)
def setUp(self):
N_PART = 500
self.system.part.add(pos=BOX_L * np.random.random((N_PART, 3)),
v=-5. + 10. * np.random.random((N_PART, 3)),
f=np.random.random((N_PART, 3)))
if espressomd.has_features("MASS"):
self.system.part[:].mass = 42. * np.random.random((N_PART,))
if espressomd.has_features("ROTATION"):
self.system.part[:].omega_lab = -2. - np.random.random((N_PART, 3))
self.system.part[:].torque_lab = - \
2. - np.random.random((N_PART, 3))
def tearDown(self):
self.system.part.clear()
def test_kill_particle_motion(self):
g = GalileiTransform()
g.kill_particle_motion()
np.testing.assert_array_equal(np.copy(self.system.part[:].v), 0)
if espressomd.has_features("ROTATION"):
np.testing.assert_array_less(
np.copy(self.system.part[:].omega_lab), 0)
g.kill_particle_motion(rotation=True)
np.testing.assert_array_equal(
np.copy(self.system.part[:].omega_lab), 0)
def test_kill_particle_forces(self):
g = GalileiTransform()
g.kill_particle_forces()
np.testing.assert_array_equal(np.copy(self.system.part[:].f), 0)
if espressomd.has_features("ROTATION"):
np.testing.assert_array_less(
np.copy(self.system.part[:].torque_lab), 0)
g.kill_particle_forces(torque=True)
np.testing.assert_array_equal(
np.copy(self.system.part[:].torque_lab), 0)
def test_cms(self):
parts = self.system.part[:]
g = GalileiTransform()
total_mass = np.sum(parts.mass)
com = np.sum(
np.multiply(parts.mass.reshape((-1, 1)), parts.pos), axis=0) / total_mass
np.testing.assert_allclose(np.copy(g.system_CMS()), com)
def test_cms_velocity(self):
parts = self.system.part[:]
g = GalileiTransform()
total_mass = np.sum(parts.mass)
com_v = np.sum(
np.multiply(parts.mass.reshape((-1, 1)), parts.v), axis=0) / total_mass
np.testing.assert_allclose(np.copy(g.system_CMS_velocity()), com_v)
def test_galilei_transform(self):
g = GalileiTransform()
g.galilei_transform()
np.testing.assert_allclose(
np.copy(g.system_CMS_velocity()), np.zeros((3,)), atol=1e-15)
if __name__ == "__main__":
ut.main()
| gpl-3.0 |
drawks/ansible | lib/ansible/modules/storage/netapp/na_ontap_nvme_namespace.py | 48 | 6502 | #!/usr/bin/python
# (c) 2019, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create/Delete NVME namespace
extends_documentation_fragment:
- netapp.na_ontap
module: na_ontap_nvme_namespace
options:
state:
choices: ['present', 'absent']
description:
- Whether the specified namespace should exist or not.
default: present
vserver:
description:
- Name of the vserver to use.
required: true
ostype:
description:
- Specifies the ostype for initiators
choices: ['windows', 'linux', 'vmware', 'xen', 'hyper_v']
size:
description:
- Size in bytes.
Range is [0..2^63-1].
type: int
path:
description:
- Namespace path.
type: str
short_description: "NetApp ONTAP Manage NVME Namespace"
version_added: "2.8"
'''
EXAMPLES = """
- name: Create NVME Namespace
na_ontap_nvme_namespace:
state: present
ostype: linux
path: /vol/ansible/test
size: 20
vserver: "{{ vserver }}"
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Create NVME Namespace (Idempotency)
na_ontap_nvme_namespace:
state: present
ostype: linux
path: /vol/ansible/test
size: 20
vserver: "{{ vserver }}"
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_module import NetAppModule
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppONTAPNVMENamespace(object):
"""
Class with NVME namespace methods
"""
def __init__(self):
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, type='str', choices=['present', 'absent'], default='present'),
vserver=dict(required=True, type='str'),
ostype=dict(required=False, type='str', choices=['windows', 'linux', 'vmware', 'xen', 'hyper_v']),
path=dict(required=True, type='str'),
size=dict(required=False, type='int')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[('state', 'present', ['ostype', 'size'])],
supports_check_mode=True
)
self.na_helper = NetAppModule()
self.parameters = self.na_helper.set_parameters(self.module.params)
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=self.parameters['vserver'])
def get_namespace(self):
"""
Get current namespace details
:return: dict if namespace exists, None otherwise
"""
namespace_get = netapp_utils.zapi.NaElement('nvme-namespace-get-iter')
query = {
'query': {
'nvme-namespace-info': {
'path': self.parameters['path'],
'vserver': self.parameters['vserver']
}
}
}
namespace_get.translate_struct(query)
try:
result = self.server.invoke_successfully(namespace_get, enable_tunneling=False)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error fetching namespace info: %s' % to_native(error),
exception=traceback.format_exc())
if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) >= 1:
return result
return None
def create_namespace(self):
"""
Create a NVME Namespace
"""
options = {'path': self.parameters['path'],
'ostype': self.parameters['ostype'],
'size': self.parameters['size']
}
namespace_create = netapp_utils.zapi.NaElement('nvme-namespace-create')
namespace_create.translate_struct(options)
try:
self.server.invoke_successfully(namespace_create, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error creating namespace for path %s: %s'
% (self.parameters.get('path'), to_native(error)),
exception=traceback.format_exc())
def delete_namespace(self):
"""
Delete a NVME Namespace
"""
options = {'path': self.parameters['path']
}
namespace_delete = netapp_utils.zapi.NaElement.create_node_with_children('nvme-namespace-delete', **options)
try:
self.server.invoke_successfully(namespace_delete, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting namespace for path %s: %s'
% (self.parameters.get('path'), to_native(error)),
exception=traceback.format_exc())
def apply(self):
"""
Apply action to NVME Namespace
"""
netapp_utils.ems_log_event("na_ontap_nvme_namespace", self.server)
current = self.get_namespace()
cd_action = self.na_helper.get_cd_action(current, self.parameters)
if self.na_helper.changed:
if self.module.check_mode:
pass
else:
if cd_action == 'create':
self.create_namespace()
elif cd_action == 'delete':
self.delete_namespace()
self.module.exit_json(changed=self.na_helper.changed)
def main():
"""Execute action"""
community_obj = NetAppONTAPNVMENamespace()
community_obj.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
armgilles/open-moulinette | caf/scripts/PajeCom.py | 2 | 2407 | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 2 13:47:48 2015
@author: GILLES Armand
"""
import pandas as pd
import glob
df = pd.read_csv('source/PajeCom2009.csv', sep=";")
df.columns = ['Communes', 'Codes_Insee', 'NB_Allocataires_2009',
'ALL_PAJE_2009', 'ALL_PRIM_2009', 'ALL_BASEP_2009',
'ALL_ASMA_2009','ALL_Clca_Colca_2009']
files = glob.glob('source/PajeCom*')
for path_file in files:
year = str(path_file[-8:-4])
if (year != '2009'):
df_temp = pd.read_csv(path_file, sep=';')
# Rename Col with year
year_col = ['Communes', 'Codes_Insee']
features_col = []
for col in df_temp.columns[2:]:
year_col.append(col +"_"+ year)
features_col.append(col +"_"+ year)
# Adding key for mergeing
features_col.append('Codes_Insee')
df_temp.columns = year_col
df = pd.merge(df, df_temp[features_col], how='inner', on='Codes_Insee')
# Rename col to have unique name in futur merge
list_col = []
for col in df.columns:
if "nb_allocataires" in col.lower(): # NB_Allocataires (2009) != NB_allocataires (2010)
list_col.append(col+"_PC") # PC = PageCom
else:
list_col.append(col)
df.columns = list_col
df.to_csv('data/full_PageCom.csv', encoding='utf-8', index=False)
## Features
#u'NB_Allocataires_2009_PC',
# u'ALL_PAJE_2009', u'ALL_PRIM_2009', u'ALL_BASEP_2009', u'ALL_ASMA_2009',
# u'ALL_Clca_Colca_2009', u'NB_Allocataires_2010_PC', u'ALL_PAJE_2010',
# u'ALL_PRIM_2010', u'ALL_BASEP_2010', u'ALL_ASMA_2010',
# u'ALL_Clca_Colca_2010', u'NB_Allocataires_2011_PC', u'ALL_PAJE_2011',
# u'ALL_PRIM_2011', u'ALL_BASEP_2011', u'ALL_ASMA_2011',
# u'ALL_Clca_Colca_2011', u'NB_Allocataires_2012_PC', u'ALL_PAJE_2012',
# u'ALL_PRIM_2012', u'ALL_BASEP_2012', u'ALL_ASMA_2012',
# u'ALL_Clca_Colca_2012', u'NB_Allocataires_2013_PC', u'ALL_PAJE_2013',
# u'ALL_PRIM_2013', u'ALL_BASEP_2013', u'ALL_ASMA_2013',
# u'ALL_Clca_Colca_2013', u'NB_Allocataires_2014_PC', u'ALL_PAJE_2014',
# u'ALL_PRIM_2014', u'ALL_BASEP_2014', u'ALL_CMG_2014',
# u'ALL_CMG_ASMA_2014', u'ALL_CMG_DOM_2014', u'ALL_CMG_A_2014',
# u'ALL_Clca_Colca_2014', u'NB_Allocataires_2015_PC', u'ALL_PAJE_2015',
# u'ALL_PRIM_2015', u'ALL_BASEP_2015', u'ALL_ASMA_2015',
# u'ALL_Clca_Colca_2015' | mit |
wangjun/pyload | module/plugins/accounts/QuickshareCz.py | 3 | 1904 | # -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: zoidberg
"""
import re
from module.plugins.Account import Account
from module.utils import parseFileSize
class QuickshareCz(Account):
__name__ = "QuickshareCz"
__version__ = "0.01"
__type__ = "account"
__description__ = """quickshare.cz account plugin"""
__author_name__ = ("zoidberg")
__author_mail__ = ("zoidberg@mujmail.cz")
def loadAccountInfo(self, user, req):
html = req.load("http://www.quickshare.cz/premium", decode=True)
found = re.search(r'Stav kreditu: <strong>(.+?)</strong>', html)
if found:
trafficleft = parseFileSize(found.group(1)) / 1024
premium = True if trafficleft else False
else:
trafficleft = None
premium = False
return {"validuntil": -1, "trafficleft": trafficleft, "premium": premium}
def login(self, user, data, req):
html = req.load('http://www.quickshare.cz/html/prihlaseni_process.php', post={
"akce": u'Přihlásit',
"heslo": data['password'],
"jmeno": user
}, decode=True)
if u'>Takový uživatel neexistuje.<' in html or u'>Špatné heslo.<' in html:
self.wrongPassword()
| gpl-3.0 |
CodingCat/gpudb | gpudb.py | 9 | 6224 | #! /usr/bin/python
import sys
import os
import shutil
import pickle
import readline
import re
sys.path.insert(0, "XML2CODE")
import ystree
COMMANDS = ["help","create", "delete", "list", "restore", "load", "translate", "execute", "exit"]
RE_SPACE = re.compile('.*\s+$', re.M)
class cmdCompleter(object):
def _listdir(self, root):
res = []
for name in os.listdir(root):
path = os.path.join(root, name)
if os.path.isdir(path):
name += os.sep
res.append(name)
return res
def _complete_path(self, path=None):
if not path:
return self._listdir('.')
dirname, rest = os.path.split(path)
tmp = dirname if dirname else '.'
res = [os.path.join(dirname, p)
for p in self._listdir(tmp) if p.startswith(rest)]
if len(res) > 1 or not os.path.exists(path):
return res
if os.path.isdir(path):
return [os.path.join(path, p) for p in self._listdir(path)]
return [path + ' ']
def complete_all(self, args):
if not args:
return self._complete_path('.')
return self._complete_path(args[-1])
def complete(self, text, state):
buffer = readline.get_line_buffer()
line = readline.get_line_buffer().split()
if not line:
return [c + ' ' for c in COMMANDS][state]
if RE_SPACE.match(buffer):
line.append('')
cmd = line[0].strip()
if cmd in COMMANDS:
impl = getattr(self, 'complete_all')
args = line[1:]
if args:
return (impl(args) + [None])[state]
return [cmd + ' '][state]
results = [c + ' ' for c in COMMANDS if c.startswith(cmd)] + [None]
return results[state]
def dbHelp():
print "Command:"
print "\tcreate DBName: create the database"
print "\tdelete DBName: delete the database"
print "\tlist DBName: list the table infomation in the database"
print "\trestore DBName: restore the metadata for a created Database"
print "\tload TableName data: load data into the given table"
print "\ttranslate SQL: translate SQL into CUDA file"
print "\texecute SQL: translate and execute given SQL on GPU"
print "\texit"
def dbCreate(dbName, schemaFile):
ret = 0
dbTop = "database"
if not os.path.exists(dbTop):
os.makedirs(dbTop)
dbPath = dbTop + "/" + dbName
if os.path.exists(dbPath):
return -1
os.makedirs(dbPath)
cmd = 'python XML2CODE/main.py ' + schemaFile + ' &> /dev/null'
ret = os.system(cmd)
if ret !=0 :
return -1
cmd = 'make -C src/utility/ loader &> /dev/null'
ret = os.system(cmd)
if ret != 0:
return -1
cmd = 'mv src/utility/gpuDBLoader ' + dbPath
ret = os.system(cmd)
if ret != 0:
return -1
cmd = 'mv src/utility/.metadata ' + dbPath
ret = os.system(cmd)
if ret != 0:
return -1
return 0
def dbDelete(dbName):
dbTop = "database"
dbPath = dbTop + "/" + dbName
if os.path.exists(dbPath):
shutil.rmtree(dbPath)
def dbList(dbName):
dbTop = "database"
dbPath = dbTop + "/" + dbName
if not os.path.exists(dbPath):
return -1
metaPath = dbPath + "/.metadata"
if not os.path.exists(metaPath):
return -2
metaFile = open(metaPath, 'rb')
tableDict = pickle.load(metaFile)
metaFile.close()
for tn in tableDict.keys():
print tn
return 0
def dbRestore(dbName, schemaFile):
dbTop = "database"
dbPath = dbTop + "/" + dbName
if not os.path.exists(dbPath):
return -1
cmd = 'python XML2CODE/main.py ' + schemaFile + ' &> /dev/null'
ret = os.system(cmd)
if ret !=0 :
return -1
cmd = 'make -C src/utility/ loader &> /dev/null'
ret = os.system(cmd)
if ret != 0:
return -1
cmd = 'mv src/utility/gpuDBLoader ' + dbPath
ret = os.system(cmd)
if ret != 0:
return -1
cmd = 'mv src/utility/.metadata ' + dbPath
ret = os.system(cmd)
if ret != 0:
return -1
return 0
if len(sys.argv) != 2:
print "./gpudb.py schemaFile"
exit(-1)
schemaFile = sys.argv[1]
while 1:
ret = 0
dbCreated = 0
dbName = ""
comp = cmdCompleter()
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(comp.complete)
cmd = raw_input(">")
cmdA = cmd.lstrip().rstrip().split()
if len(cmdA) == 0:
continue
if cmdA[0].upper() == "HELP":
dbHelp()
elif cmdA[0].upper() == "?":
dbHelp()
elif cmdA[0].upper() == "EXIT":
break
elif cmdA[0].upper() == "CREATE":
if dbCreated !=0:
print "Already created database. Delete first."
continue
if len(cmdA) !=2:
print "usage: create DBName"
else:
ret = dbCreate(cmdA[1].upper(), schemaFile)
if ret == -1:
print cmdA[1] + " already exists"
else:
dbCreated = 1
dbName = cmdA[1].upper()
print cmdA[1] + " has been successfully created."
elif cmdA[0].upper() == "DELETE":
if len(cmdA) != 2:
print "usage: delete DBName"
dbCreated = 0
dbDelete(cmdA[1].upper())
print cmdA[1] + " has been successfully deleted."
elif cmdA[0].upper() == "LIST":
if len(cmdA) != 2:
print "usage: list DBName"
continue
ret = dbList(cmdA[1].upper())
if ret == -1:
print cmdA[1] + " doesn't exist"
elif ret == -2:
print cmdA[1] + " metaData doesn't exist"
elif cmdA[0].upper() == "RESTORE":
if len(cmdA) != 2:
print "usage: restore DBName"
continue
ret = dbRestore(cmdA[1].upper(),schemaFile)
if ret == -1:
print "Failed to restore the metadata for " + cmdA[1]
continue
elif cmdA[0].upper() == "LOAD":
pass
else:
print "Unknown command"
os.system("clear")
| apache-2.0 |
Khan/flask-wtf | tests/__init__.py | 1 | 9854 | from __future__ import with_statement
import re
from flask import Flask, Response, render_template, jsonify
from flaskext.uploads import UploadSet, IMAGES, TEXT, configure_uploads
from flaskext.testing import TestCase as _TestCase
from flaskext.wtf import Form, TextField, FileField, HiddenField, \
SubmitField, Required, FieldList, file_required, file_allowed, html5
class DummyField(object):
def __init__(self, data, name='f', label='', id='', type='TextField'):
self.data = data
self.name = name
self.label = label
self.id = id
self.type = type
_value = lambda x: x.data
__unicode__ = lambda x: x.data
__call__ = lambda x, **k: x.data
__iter__ = lambda x: iter(x.data)
iter_choices = lambda x: iter(x.data)
class TestCase(_TestCase):
def create_app(self):
class MyForm(Form):
name = TextField("Name", validators=[Required()])
submit = SubmitField("Submit")
class HiddenFieldsForm(Form):
name = HiddenField()
url = HiddenField()
method = HiddenField()
secret = HiddenField()
submit = SubmitField("Submit")
def __init__(self, *args, **kwargs):
super(HiddenFieldsForm, self).__init__(*args, **kwargs)
self.method.name = '_method'
class SimpleForm(Form):
pass
app = Flask(__name__)
app.secret_key = "secret"
@app.route("/", methods=("GET", "POST"))
def index():
form = MyForm()
if form.validate_on_submit():
name = form.name.data.upper()
else:
name = ''
return render_template("index.html",
form=form,
name=name)
@app.route("/simple/", methods=("POST",))
def simple():
form = SimpleForm()
form.validate()
assert form.csrf_enabled
assert not form.validate()
assert not form.validate()
return "OK"
@app.route("/hidden/")
def hidden():
form = HiddenFieldsForm()
return render_template("hidden.html", form=form)
@app.route("/ajax/", methods=("POST",))
def ajax_submit():
form = MyForm()
if form.validate_on_submit():
return jsonify(name=form.name.data,
success=True,
errors=None)
return jsonify(name=None,
errors=form.errors,
success=False)
return app
class HTML5Tests(TestCase):
field = DummyField("name", id="name", name="name")
def test_url_input(self):
assert html5.URLInput()(self.field) == \
'<input id="name" name="name" type="url" value="name" />'
def test_search_input(self):
assert html5.SearchInput()(self.field) == \
'<input id="name" name="name" type="search" value="name" />'
def test_date_input(self):
assert html5.DateInput()(self.field) == \
'<input id="name" name="name" type="date" value="name" />'
def test_email_input(self):
assert html5.EmailInput()(self.field) == \
'<input id="name" name="name" type="email" value="name" />'
def test_number_input(self):
assert html5.NumberInput()(self.field, min=0, max=10) == \
'<input id="name" max="10" min="0" name="name" type="number" value="name" />'
def test_range_input(self):
assert html5.RangeInput()(self.field, min=0, max=10) == \
'<input id="name" max="10" min="0" name="name" type="range" value="name" />'
# FILE UPLOAD TESTS #
images = UploadSet("images", IMAGES)
text = UploadSet("text", TEXT)
class FileUploadForm(Form):
upload = FileField("Upload file")
class MultipleFileUploadForm(Form):
uploads = FieldList(FileField("upload"), min_entries=3)
class ImageUploadForm(Form):
upload = FileField("Upload file",
validators=[file_required(),
file_allowed(images)])
class TextUploadForm(Form):
upload = FileField("Upload file",
validators=[file_required(),
file_allowed(text)])
class TestFileUpload(TestCase):
def create_app(self):
app = super(TestFileUpload, self).create_app()
app.config['CSRF_ENABLED'] = False
app.config['UPLOADED_FILES_DEST'] = 'uploads'
app.config['UPLOADS_DEFAULT_DEST'] = 'uploads'
configure_uploads(app, [images, text])
@app.route("/upload-image/", methods=("POST",))
def upload_image():
form = ImageUploadForm()
if form.validate_on_submit():
return "OK"
return "invalid"
@app.route("/upload-text/", methods=("POST",))
def upload_text():
form = TextUploadForm()
if form.validate_on_submit():
return "OK"
return "invalid"
@app.route("/upload-multiple/", methods=("POST",))
def upload_multiple():
form = MultipleFileUploadForm()
if form.validate_on_submit():
assert len(form.uploads.entries) == 3
for upload in form.uploads.entries:
assert upload.file is not None
return "OK"
@app.route("/upload-multiple-field/", methods=("POST",))
def upload_multiple_field():
form = MultipleFileFieldUploadForm()
if form.validate_on_submit():
assert len(form.uploads.files) == 3
for upload in form.uploads.files:
assert "flask.png" in upload.filename
return "OK"
@app.route("/upload/", methods=("POST",))
def upload():
form = FileUploadForm()
if form.validate_on_submit():
filedata = form.upload.file
else:
filedata = None
return render_template("upload.html",
filedata=filedata,
form=form)
return app
def test_multiple_files(self):
fps = [self.app.open_resource("flask.png") for i in xrange(3)]
data = [("uploads-%d" % i, fp) for i, fp in enumerate(fps)]
response = self.client.post("/upload-multiple/", data=dict(data))
assert response.status_code == 200
def test_valid_file(self):
with self.app.open_resource("flask.png") as fp:
response = self.client.post("/upload-image/",
data={'upload' : fp})
assert "OK" in response.data
def test_missing_file(self):
response = self.client.post("/upload-image/",
data={'upload' : "test"})
assert "invalid" in response.data
def test_invalid_file(self):
with self.app.open_resource("flask.png") as fp:
response = self.client.post("/upload-text/",
data={'upload' : fp})
assert "invalid" in response.data
def test_invalid_file(self):
response = self.client.post("/upload/",
data={'upload' : 'flask.png'})
assert "flask.png</h3>" not in response.data
class TestValidateOnSubmit(TestCase):
def test_not_submitted(self):
response = self.client.get("/")
assert 'DANNY' not in response.data
def test_submitted_not_valid(self):
self.app.config['CSRF_ENABLED'] = False
response = self.client.post("/", data={})
assert 'DANNY' not in response.data
def test_submitted_and_valid(self):
self.app.config['CSRF_ENABLED'] = False
response = self.client.post("/", data={"name" : "danny"})
print response.data
assert 'DANNY' in response.data
class TestHiddenTag(TestCase):
def test_hidden_tag(self):
response = self.client.get("/hidden/")
assert response.data.count('type="hidden"') == 5
assert 'name="_method"' in response.data
class TestCSRF(TestCase):
def test_csrf_token(self):
response = self.client.get("/")
assert '<div style="display:none;"><input id="csrf" name="csrf" type="hidden" value' in response.data
def test_invalid_csrf(self):
response = self.client.post("/", data={"name" : "danny"})
assert 'DANNY' not in response.data
assert "Missing or invalid CSRF token" in response.data
def test_csrf_disabled(self):
self.app.config['CSRF_ENABLED'] = False
response = self.client.post("/", data={"name" : "danny"})
assert 'DANNY' in response.data
def test_validate_twice(self):
response = self.client.post("/simple/", data={})
self.assert_200(response)
def test_ajax(self):
response = self.client.post("/ajax/",
data={"name" : "danny"},
headers={'X-Requested-With' : 'XMLHttpRequest'})
assert response.status_code == 200
def test_valid_csrf(self):
response = self.client.get("/")
pattern = re.compile(r'name="csrf" type="hidden" value="([0-9a-zA-Z-]*)"')
match = pattern.search(response.data)
assert match
csrf_token = match.groups()[0]
response = self.client.post("/", data={"name" : "danny",
"csrf" : csrf_token})
assert "DANNY" in response.data
| bsd-3-clause |
nazavode/automaton | setup.py | 2 | 1811 | # -*- coding: utf-8 -*-
#
# Copyright 2015 Federico Ficarelli
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="python-automaton",
version='1.3.1',
description="Minimal finite-state machines",
long_description=long_description,
author="Federico Ficarelli",
author_email="federico.ficarelli@gmail.com",
url="https://github.com/nazavode/automaton",
package_dir={"": "src"},
py_modules=['automaton'],
install_requires=['networkx', 'tabulate'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Operating System :: OS Independent',
],
keywords='automata automaton statemachine',
)
| apache-2.0 |
RichardWithnell/net-next-sim | tools/perf/scripts/python/syscall-counts.py | 1996 | 1700 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
vlinhd11/vlinhd11-android-scripting | python/src/Lib/json/__init__.py | 57 | 12286 | r"""A simple, fast, extensible JSON encoder and decoder
JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
json exposes an API familiar to uses of the standard library
marshal and pickle modules.
Encoding basic Python object hierarchies::
>>> import json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing (using repr() because of extraneous whitespace in the output)::
>>> import json
>>> print repr(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
'{\n "4": 5, \n "6": 7\n}'
Decoding JSON::
>>> import json
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
[u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('"\\"foo\\bar"')
u'"foo\x08ar'
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)
[u'streaming API']
Specializing JSON object decoding::
>>> import json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal)
Decimal('1.1')
Extending JSONEncoder::
>>> import json
>>> class ComplexEncoder(json.JSONEncoder):
... def default(self, obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... return json.JSONEncoder.default(self, obj)
...
>>> dumps(2 + 1j, cls=ComplexEncoder)
'[2.0, 1.0]'
>>> ComplexEncoder().encode(2 + 1j)
'[2.0, 1.0]'
>>> list(ComplexEncoder().iterencode(2 + 1j))
['[', '2.0', ', ', '1.0', ']']
Using json.tool from the shell to validate and
pretty-print::
$ echo '{"json":"obj"}' | python -mjson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -mjson.tool
Expecting property name: line 1 column 2 (char 2)
Note that the JSON produced by this module's default settings
is a subset of YAML, so it may be used as a serializer for that as well.
"""
__version__ = '1.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from .decoder import JSONDecoder
from .encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object
containing a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
| apache-2.0 |
Djimmer/obts | Fuzzer/function_scanner.py | 1 | 6412 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import socket
import time
import binascii
import os
import sys
from libmich.formats import *
import gsm_um
import smarter_fuzzer_function_def as fuzzer
import itertools
from random import randint
from math import factorial
import logging
from pythonjsonlogger import jsonlogger
# Fill in current mobile device
if len(sys.argv) > 2:
device = sys.argv[1];
imsi = sys.argv[2];
else:
print("ERROR: Device name not found.")
print("Call the script with: ./smarter_fuzzer #DEVICE #IMSI");
print("Where #DEVICE is the name and #IMSI is the IMSI of the mobile device.");
sys.exit(0);
############################################### SETTINGS #############################################
# Default OpenBTS port
TESTCALL_PORT = 28670;
# Log file location
date = str(time.strftime("%Y%m%d-%H%M%S"));
log_all_functions_JSON = "logs/functions/" + device + "_log_" + date + ".json";
# Creat socket
tcsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
tcsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
tcsock.settimeout(2)
ocsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ocsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
HOST = 'localhost' # Symbolic name meaning all available interfaces
PORT = 21337 # Arbitrary non-privileged port
ocsock.bind((HOST, PORT))
ocsock.settimeout(20)
# Initialize JSON logger
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# create a file handler
handler = logging.FileHandler(log_all_functions_JSON)
handler.setLevel(logging.INFO)
# create a logging format
formatter = jsonlogger.JsonFormatter()
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
logger.info({
"message": "Function Scanner; Device and SIM information",
"device": device,
"imsi" : imsi});
################################################# LOG ################################################
def log_packets(run, maxRun, packet, parsed_packet, reply, parsed_reply):
if "ERROR" in parsed_reply:
parsed_reply = "libmich ERROR";
logger.info({
"message": run,
"maxRun" : maxRun,
"packet": str(packet).encode("hex"),
"parsed_packet": parsed_packet,
"reply": str(reply).encode("hex"),
"parsed_reply": parsed_reply
})
############################################## CHANNEL ###############################################
# Send a restart to OpenBTS to establish a new channel
def establishNewChannel():
restart = "RESTART";
print("Channel restart: Establishing a new channel, this may take a second.");
tcsock.sendto(restart, ('127.0.0.1', TESTCALL_PORT));
# Wait for OpenBTS to confirm new channel.
try:
reply = ocsock.recv(20000)
except:
print "Could not establish a new channel.";
return False;
print "New channel established, fuzzing will continue.";
time.sleep(1);
return True;
def send(tcsock, packet):
try:
tcsock.sendto(packet, ('127.0.0.1', TESTCALL_PORT))
reply = tcsock.recv(1024)
except socket.timeout:
print "socket.timeout: Mobile device is not responding";
return False
return packetImplemented(reply)
def packetImplemented(reply):
parsed_reply = repr(L3Mobile.parse_L3(reply));
print "Received packet: ", str(reply).encode("hex") + "\n";
print "GSM_UM interpetation: " + '\n' + parsed_reply + "\n\n";
if "RELEASE_COMPLETE" in parsed_reply:
return "Restart";
elif((str(reply).encode("hex") == "786e430200")): #MDL_ERROR_INDICATION
return "Restart";
elif((str(reply).encode("hex") == "789ea400")): #MDL_ERROR_INDICATION
return "Restart";
elif((str(reply).encode("hex") == "06126100")):
return "Skip";
elif "Message type non-existent or not implemented" in parsed_reply:
return "Skip";
else:
return reply;
############################################### UTILS ################################################
def printPacket(packet, currentRun, total_runs):
print('------------------------------- INPUT -------------------------------' + '\n');
print('Run ' + str(currentRun) + "/" + str(total_runs) + '\n');
# Make the packet readable
if(len(packet) % 2 == 0):
printable = str(packet).encode("hex");
print "Current complete packet: " + printable + '\n';
# Decode printable hex to make it usable for L3Mobile.
# Adding the \x for the bytes.
l3msg_input = repr(L3Mobile.parse_L3(str(packet)));
print "GSM_UM interpetation: \n " + l3msg_input + '\n\n';
print "------------------------------- OUTPUT -------------------------------" + '\n';
############################################ SMART FUZZER ############################################
# This fuzzer targets fields with variable length
# Tries all different bytes for length byte
# Tries random bytes for a range of lengths
######################################################################################################
# Fuzzer specific settings
maxPacketAttempt = 5;
currentPacketAttempt = 1;
protocols = [3];
currentRun = 1;
total_runs = len(protocols) * 256;
print "Total amount of runs: " + str(total_runs);
time.sleep(1);
for i in protocols:
firstByte = "{0:0{1}x}".format(i,2);
n = 1;
while n < 256:
secondByte = "{0:0{1}x}".format(n,2);
if(i == 5 and n == 17):
# Skip because the packet 0511 is a Authentication Reject
# and disconnects the mobile device
secondByte = "{0:0{1}x}".format(n+1,2);
packet = "\\x" + str(firstByte) + "\\x" + str(secondByte);
packet = packet.replace('\\x', '').decode('hex');
print "Packet: " + str(packet).encode("hex");
printPacket(packet, currentRun, total_runs);
# Send packet to the mobile device.
result = send(tcsock, packet);
if(result == "Restart" or result == False):
currentPacketAttempt = currentPacketAttempt + 1;
establishNewChannel();
if(currentPacketAttempt >= maxPacketAttempt):
parsed_packet = repr(L3Mobile.parse_L3(packet));
log_packets(currentRun, total_runs, packet, parsed_packet, "None", "None");
currentRun = currentRun + 1;
n = n + 1;
elif(result =="Skip"):
currentRun = currentRun + 1;
currentPacketAttempt = 0;
n = n + 1;
else:
parsed_result = repr(L3Mobile.parse_L3(result));
parsed_packet = repr(L3Mobile.parse_L3(packet));
log_packets(currentRun, total_runs, packet, parsed_packet, result, parsed_result);
currentRun = currentRun + 1;
currentPacketAttempt = 0;
n = n + 1;
| agpl-3.0 |
jordiclariana/ansible | lib/ansible/modules/cloud/vmware/vmware_guest.py | 1 | 70396 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: vmware_guest
short_description: Manages virtualmachines in vcenter
description:
- Uses pyvmomi to ...
- copy a template to a new virtualmachine
- poweron/poweroff/restart a virtualmachine
- remove a virtualmachine
version_added: 2.2
author:
- James Tanner (@jctanner) <tanner.jc@gmail.com>
- Loic Blot (@nerzhul) <loic.blot@unix-experience.fr>
notes:
- Tested on vSphere 6.0
requirements:
- "python >= 2.6"
- PyVmomi
options:
state:
description:
- What state should the virtualmachine be in?
- if state is set to present and VM exists, ensure the VM configuration if conform to task arguments
required: True
choices: ['present', 'absent', 'poweredon', 'poweredoff', 'restarted', 'suspended']
name:
description:
- Name of the newly deployed guest
required: True
name_match:
description:
- If multiple vms matching the name, use the first or last found
required: False
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is vmware's unique identifier.
- This is required if name is not supplied.
required: False
template:
description:
- Template used to create guest.
- If this value is not set, VM is created without using a template.
- If the guest exists already this setting will be ignored.
required: False
is_template:
description:
- Flag the instance as a template
required: False
default: False
version_added: "2.3"
folder:
description:
- Destination folder path for the new guest
required: False
hardware:
description:
- "Manage some VM hardware attributes."
- "Valid attributes are: memory_mb, num_cpus and scsi"
- "scsi: Valid values are buslogic, lsilogic, lsilogicsas and paravirtual (default)"
required: False
guest_id:
description:
- "Set the guest ID (Debian, RHEL, Windows...)"
- "This field is required when creating a VM"
- "Valid values are referenced here: https://www.vmware.com/support/developer/converter-sdk/conv55_apireference/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html"
required: False
version_added: "2.3"
disk:
description:
- "A list of disks to add"
- "Valid attributes are: size_[tb,gb,mb,kb], type, datastore and autoselect_datastore"
- "type: Valid value is thin (default: None)"
- "datastore: Datastore to use for the disk. If autoselect_datastore is True, filter datastore selection."
- "autoselect_datastore (bool): select the less used datastore."
required: False
resource_pool:
description:
- Affect machine to the given resource pool
- Resource pool should be child of the selected host parent
required: False
default: None
version_added: "2.3"
wait_for_ip_address:
description:
- Wait until vcenter detects an IP address for the guest
required: False
force:
description:
- Ignore warnings and complete the actions
required: False
datacenter:
description:
- Destination datacenter for the deploy operation
required: True
cluster:
description:
- The cluster name where the VM will run.
required: False
version_added: "2.3"
esxi_hostname:
description:
- The esxi hostname where the VM will run.
required: False
annotation:
description:
- A note or annotation to include in the VM
required: False
version_added: "2.3"
customize:
description:
- Should customization spec be applied. This is only used when deploying a template.
required: False
version_added: "2.3"
networks:
description:
- Network to use should include VM network name or VLAN, ip and gateway
- "You can add 'mac' optional field to customize mac address"
required: False
version_added: "2.3"
dns_servers:
description:
- DNS servers to use
required: False
version_added: "2.3"
domain:
description:
- Domain to use while customizing
required: False
version_added: "2.3"
snapshot_op:
description:
- A key, value pair of snapshot operation types and their additional required parameters.
required: False
version_added: "2.3"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
Example from Ansible playbook
#
# Create a VM from a template
#
- name: create the VM
vmware_guest:
validate_certs: False
hostname: 192.0.2.44
username: administrator@vsphere.local
password: vmware
name: testvm_2
state: poweredon
folder: testvms
guest_id: centos64guest
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: paravirtual
datacenter: datacenter1
esxi_hostname: 192.0.2.117
template: template_el7
wait_for_ip_address: yes
register: deploy
#
# Create a VM and flag it as a template
#
- name: create VM template
vmware_guest:
validate_certs: False
hostname: 192.0.2.88
username: administrator@vsphere.local
password: vmware
name: testvm_6
folder: testvms
is_template: yes
guest_id: debian6_64Guest
resource_pool: highperformance_pool
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: lsilogic
datacenter: datacenter1
cluster: vmware_cluster_esx
wait_for_ip_address: yes
register: deploy
#
# Clone Template and customize
#
- name: Clone template and customize
vmware_guest:
hostname: "192.168.1.209"
username: "administrator@vsphere.local"
password: "vmware"
validate_certs: False
name: testvm-2
datacenter: datacenter1
cluster: cluster
validate_certs: False
template: template_el7
customize: True
domain: "example.com"
dns_servers: ['192.168.1.1','192.168.1.2']
networks:
'192.168.1.0/24':
network: 'VM Network'
gateway: '192.168.1.1'
ip: "192.168.1.100"
mac: "aa:bb:dd:aa:00:14"
#
# Gather facts only
#
- name: gather the VM facts
vmware_guest:
validate_certs: False
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
name: testvm_2
esxi_hostname: 192.168.1.117
state: gatherfacts
register: facts
### Snapshot Operations
# Create snapshot
- vmware_guest:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: create
name: snap1
description: snap1_description
# Remove a snapshot
- vmware_guest:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: remove
name: snap1
# Revert to a snapshot
- vmware_guest:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: revert
name: snap1
# List all snapshots of a VM
- vmware_guest:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: list_all
# List current snapshot of a VM
- vmware_guest:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: list_current
# Remove all snapshots of a VM
- vmware_guest:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: remove_all
'''
RETURN = """
instance:
descripton: metadata about the new virtualmachine
returned: always
type: dict
sample: None
"""
import os
import time
from netaddr import IPNetwork, IPAddress
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.vmware import get_all_objs, connect_to_api
try:
import json
except ImportError:
import simplejson as json
HAS_PYVMOMI = False
try:
import pyVmomi
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
pass
class PyVmomiDeviceHelper(object):
""" This class is a helper to create easily VMWare Objects for PyVmomiHelper """
def __init__(self, module):
self.module = module
self.next_disk_unit_number = 0
@staticmethod
def create_scsi_controller(scsi_type):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
if scsi_type == 'lsilogic':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicController()
elif scsi_type == 'paravirtual':
scsi_ctl.device = vim.vm.device.ParaVirtualSCSIController()
elif scsi_type == 'buslogic':
scsi_ctl.device = vim.vm.device.VirtualBusLogicController()
elif scsi_type == 'lsilogicsas':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicSASController()
scsi_ctl.device.deviceInfo = vim.Description()
scsi_ctl.device.slotInfo = vim.vm.device.VirtualDevice.PciBusSlotInfo()
scsi_ctl.device.slotInfo.pciSlotNumber = 16
scsi_ctl.device.controllerKey = 100
scsi_ctl.device.unitNumber = 3
scsi_ctl.device.busNumber = 0
scsi_ctl.device.hotAddRemove = True
scsi_ctl.device.sharedBus = 'noSharing'
scsi_ctl.device.scsiCtlrUnitNumber = 7
return scsi_ctl
@staticmethod
def is_scsi_controller(device):
return isinstance(device, vim.vm.device.VirtualLsiLogicController) or \
isinstance(device, vim.vm.device.ParaVirtualSCSIController) or \
isinstance(device, vim.vm.device.VirtualBusLogicController) or \
isinstance(device, vim.vm.device.VirtualLsiLogicSASController)
def create_scsi_disk(self, scsi_ctl, disk_index=None):
diskspec = vim.vm.device.VirtualDeviceSpec()
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
diskspec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.create
diskspec.device = vim.vm.device.VirtualDisk()
diskspec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
diskspec.device.backing.diskMode = 'persistent'
diskspec.device.controllerKey = scsi_ctl.device.key
assert self.next_disk_unit_number != 7
assert disk_index != 7
"""
Configure disk unit number.
"""
if disk_index is not None:
diskspec.device.unitNumber = disk_index
self.next_disk_unit_number = disk_index + 1
else:
diskspec.device.unitNumber = self.next_disk_unit_number
self.next_disk_unit_number += 1
# unit number 7 is reserved to SCSI controller, increase next index
if self.next_disk_unit_number == 7:
self.next_disk_unit_number += 1
return diskspec
def create_nic(self, device_type, device_label, device_infos):
nic = vim.vm.device.VirtualDeviceSpec()
if device_type == 'pcnet32':
nic.device = vim.vm.device.VirtualPCNet32()
if device_type == 'vmxnet2':
nic.device = vim.vm.device.VirtualVmxnet2()
elif device_type == 'vmxnet3':
nic.device = vim.vm.device.VirtualVmxnet3()
elif device_type == 'e1000':
nic.device = vim.vm.device.VirtualE1000()
elif device_type == 'e1000e':
nic.device = vim.vm.device.VirtualE1000e()
elif device_type == 'sriov':
nic.device = vim.vm.device.VirtualSriovEthernetCard()
else:
self.module.fail_json(msg="Invalid device_type '%s' for network %s" %
(device_type, device_infos['network']))
nic.device.wakeOnLanEnabled = True
nic.device.addressType = 'assigned'
nic.device.deviceInfo = vim.Description()
nic.device.deviceInfo.label = device_label
nic.device.deviceInfo.summary = device_infos['network']
nic.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic.device.connectable.startConnected = True
nic.device.connectable.allowGuestControl = True
nic.device.connectable.connected = True
if 'mac' in device_infos:
nic.device.macAddress = device_infos['mac']
return nic
class PyVmomiCache(object):
""" This class caches references to objects which are requested multiples times but not modified """
def __init__(self, content):
self.content = content
self.networks = {}
self.clusters = {}
self.esx_hosts = {}
def get_network(self, network):
if network not in self.networks:
self.networks[network] = get_obj(self.content, [vim.Network], network)
return self.networks[network]
def get_cluster(self, cluster):
if cluster not in self.clusters:
self.clusters[cluster] = get_obj(self.content, [vim.ClusterComputeResource], cluster)
return self.clusters[cluster]
def get_esx_host(self, host):
if host not in self.esx_hosts:
self.esx_hosts[host] = get_obj(self.content, [vim.HostSystem], host)
return self.esx_hosts[host]
class PyVmomiHelper(object):
def __init__(self, module):
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi module required')
self.module = module
self.device_helper = PyVmomiDeviceHelper(self.module)
self.params = module.params
self.si = None
self.content = connect_to_api(self.module)
self.datacenter = None
self.folders = None
self.foldermap = {'fvim_by_path': {}, 'path_by_fvim': {}, 'path_by_vvim': {}, 'paths': {},
'uuids': {}}
self.configspec = None
self.change_detected = False
self.customspec = None
self.current_vm_obj = None
self.cache = PyVmomiCache(self.content)
def should_deploy_from_template(self):
return self.params.get('template') is not None
def _build_folder_tree(self, folder):
tree = {'virtualmachines': [],
'subfolders': {},
'vimobj': folder,
'name': folder.name}
children = None
if hasattr(folder, 'childEntity'):
children = folder.childEntity
if children:
for child in children:
if child == folder or child in tree:
continue
if isinstance(child, vim.Folder):
ctree = self._build_folder_tree(child)
tree['subfolders'][child] = dict.copy(ctree)
elif isinstance(child, vim.VirtualMachine):
tree['virtualmachines'].append(child)
else:
if isinstance(folder, vim.VirtualMachine):
return folder
return tree
def _build_folder_map(self, folder, inpath='/'):
""" Build a searchable index for vms+uuids+folders """
if isinstance(folder, tuple):
folder = folder[1]
thispath = os.path.join(inpath, folder['name'])
if thispath not in self.foldermap['paths']:
self.foldermap['paths'][thispath] = []
# store object by path and store path by object
self.foldermap['fvim_by_path'][thispath] = folder['vimobj']
self.foldermap['path_by_fvim'][folder['vimobj']] = thispath
for item in folder.items():
k = item[0]
v = item[1]
if k == 'name':
pass
elif k == 'subfolders':
for x in v.items():
self._build_folder_map(x, inpath=thispath)
elif k == 'virtualmachines':
for x in v:
# Apparently x.config can be None on corrupted VMs
if x.config is None: continue
self.foldermap['uuids'][x.config.uuid] = x.config.name
self.foldermap['paths'][thispath].append(x.config.uuid)
if x not in self.foldermap['path_by_vvim']:
self.foldermap['path_by_vvim'][x] = thispath
def getfolders(self):
if not self.datacenter:
self.get_datacenter()
self.folders = self._build_folder_tree(self.datacenter.vmFolder)
self._build_folder_map(self.folders)
@staticmethod
def compile_folder_path_for_object(vobj):
""" make a /vm/foo/bar/baz like folder path for an object """
paths = []
if isinstance(vobj, vim.Folder):
paths.append(vobj.name)
thisobj = vobj
while hasattr(thisobj, 'parent'):
thisobj = thisobj.parent
if isinstance(thisobj, vim.Folder):
paths.append(thisobj.name)
paths.reverse()
if paths[0] == 'Datacenters':
paths.remove('Datacenters')
return '/' + '/'.join(paths)
def get_datacenter(self):
self.datacenter = get_obj(self.content, [vim.Datacenter],
self.params['datacenter'])
def getvm(self, name=None, uuid=None, folder=None, name_match=None, cache=False):
# https://www.vmware.com/support/developer/vc-sdk/visdk2xpubs/ReferenceGuide/vim.SearchIndex.html
# self.si.content.searchIndex.FindByInventoryPath('DC1/vm/test_folder')
vm = None
searchpath = None
if uuid:
vm = self.content.searchIndex.FindByUuid(uuid=uuid, vmSearch=True)
elif folder:
# Build the absolute folder path to pass into the search method
if self.params['folder'].startswith('/'):
searchpath = '%(datacenter)s%(folder)s' % self.params
else:
# need to look for matching absolute path
if not self.folders:
self.getfolders()
paths = self.foldermap['paths'].keys()
paths = [x for x in paths if x.endswith(self.params['folder'])]
if len(paths) > 1:
self.module.fail_json(
msg='%(folder)s matches more than one folder. Please use the absolute path starting with /vm/' % self.params)
elif paths:
searchpath = paths[0]
if searchpath:
# get all objects for this path ...
fObj = self.content.searchIndex.FindByInventoryPath(searchpath)
if fObj:
if isinstance(fObj, vim.Datacenter):
fObj = fObj.vmFolder
for cObj in fObj.childEntity:
if not isinstance(cObj, vim.VirtualMachine):
continue
if cObj.name == name:
vm = cObj
break
if not vm:
# FIXME - this is unused if folder has a default value
# narrow down by folder
if folder:
if not self.folders:
self.getfolders()
# compare the folder path of each VM against the search path
vmList = get_all_objs(self.content, [vim.VirtualMachine])
for item in vmList.items():
vobj = item[0]
if not isinstance(vobj.parent, vim.Folder):
continue
if self.compile_folder_path_for_object(vobj) == searchpath:
# Match by name
if vobj.config.name == name:
self.current_vm_obj = vobj
return vobj
if name_match:
if name_match == 'first':
vm = get_obj(self.content, [vim.VirtualMachine], name)
elif name_match == 'last':
matches = []
for thisvm in get_all_objs(self.content, [vim.VirtualMachine]):
if thisvm.config.name == name:
matches.append(thisvm)
if matches:
vm = matches[-1]
else:
matches = []
for thisvm in get_all_objs(self.content, [vim.VirtualMachine]):
if thisvm.config.name == name:
matches.append(thisvm)
if len(matches) > 1:
self.module.fail_json(
msg='more than 1 vm exists by the name %s. Please specify a uuid, or a folder, '
'or a datacenter or name_match' % name)
if matches:
vm = matches[0]
if cache and vm:
self.current_vm_obj = vm
return vm
def set_powerstate(self, vm, state, force):
"""
Set the power status for a VM determined by the current and
requested states. force is forceful
"""
facts = self.gather_facts(vm)
expected_state = state.replace('_', '').lower()
current_state = facts['hw_power_status'].lower()
result = {}
# Need Force
if not force and current_state not in ['poweredon', 'poweredoff']:
return "VM is in %s power state. Force is required!" % current_state
# State is already true
if current_state == expected_state:
result['changed'] = False
result['failed'] = False
else:
task = None
try:
if expected_state == 'poweredoff':
task = vm.PowerOff()
elif expected_state == 'poweredon':
task = vm.PowerOn()
elif expected_state == 'restarted':
if current_state in ('poweredon', 'poweringon', 'resetting', 'poweredoff'):
task = vm.Reset()
else:
result = {'changed': False, 'failed': True,
'msg': "Cannot restart VM in the current state %s" % current_state}
elif expected_state == 'suspended':
if current_state in ('poweredon', 'poweringon'):
task = vm.Suspend()
else:
result = {'changed': False, 'failed': True,
'msg': 'Cannot suspend VM in the current state %s' % current_state}
except Exception:
result = {'changed': False, 'failed': True,
'msg': get_exception()}
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result = {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
result = {'changed': True, 'failed': False}
# need to get new metadata if changed
if result['changed']:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
result['instance'] = facts
return result
@staticmethod
def gather_facts(vm):
""" Gather facts from vim.VirtualMachine object. """
facts = {
'module_hw': True,
'hw_name': vm.config.name,
'hw_power_status': vm.summary.runtime.powerState,
'hw_guest_full_name': vm.summary.guest.guestFullName,
'hw_guest_id': vm.summary.guest.guestId,
'hw_product_uuid': vm.config.uuid,
'hw_processor_count': vm.config.hardware.numCPU,
'hw_memtotal_mb': vm.config.hardware.memoryMB,
'hw_interfaces': [],
'ipv4': None,
'ipv6': None,
}
netDict = {}
for device in vm.guest.net:
netDict[device.macAddress] = list(device.ipAddress)
for k, v in iteritems(netDict):
for ipaddress in v:
if ipaddress:
if '::' in ipaddress:
facts['ipv6'] = ipaddress
else:
facts['ipv4'] = ipaddress
ethernet_idx = 0
for idx, entry in enumerate(vm.config.hardware.device):
if not hasattr(entry, 'macAddress'):
continue
factname = 'hw_eth' + str(ethernet_idx)
facts[factname] = {
'addresstype': entry.addressType,
'label': entry.deviceInfo.label,
'macaddress': entry.macAddress,
'ipaddresses': netDict.get(entry.macAddress, None),
'macaddress_dash': entry.macAddress.replace(':', '-'),
'summary': entry.deviceInfo.summary,
}
facts['hw_interfaces'].append('eth' + str(ethernet_idx))
ethernet_idx += 1
return facts
def remove_vm(self, vm):
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.ManagedEntity.html#destroy
task = vm.Destroy()
self.wait_for_task(task)
if task.info.state == 'error':
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
return {'changed': True, 'failed': False}
def configure_guestid(self, vm_obj, vm_creation=False):
# guest_id is not required when using templates
if self.should_deploy_from_template() and self.params.get('guest_id') is None:
return
# guest_id is only mandatory on VM creation
if vm_creation and self.params['guest_id'] is None:
self.module.fail_json(msg="guest_id attribute is mandatory for VM creation")
if vm_obj is None or self.configspec.guestId != vm_obj.summary.guest.guestId:
self.change_detected = True
self.configspec.guestId = self.params['guest_id']
def configure_cpu_and_memory(self, vm_obj, vm_creation=False):
# set cpu/memory/etc
if 'hardware' in self.params:
if 'num_cpus' in self.params['hardware']:
self.configspec.numCPUs = int(self.params['hardware']['num_cpus'])
if vm_obj is None or self.configspec.numCPUs != vm_obj.config.hardware.numCPU:
self.change_detected = True
# num_cpu is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.num_cpus attribute is mandatory for VM creation")
if 'memory_mb' in self.params['hardware']:
self.configspec.memoryMB = int(self.params['hardware']['memory_mb'])
if vm_obj is None or self.configspec.memoryMB != vm_obj.config.hardware.memoryMB:
self.change_detected = True
# memory_mb is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.memory_mb attribute is mandatory for VM creation")
def get_vm_network_interfaces(self, vm=None):
if vm is None:
return []
device_list = []
for device in vm.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualPCNet32) or \
isinstance(device, vim.vm.device.VirtualVmxnet2) or \
isinstance(device, vim.vm.device.VirtualVmxnet3) or \
isinstance(device, vim.vm.device.VirtualE1000) or \
isinstance(device, vim.vm.device.VirtualE1000e) or \
isinstance(device, vim.vm.device.VirtualSriovEthernetCard):
device_list.append(device)
return device_list
def configure_network(self, vm_obj):
# Ignore empty networks, this permits to keep networks when deploying a template/cloning a VM
if len(self.params['networks']) == 0:
return
network_devices = list()
for network in self.params['networks']:
if network:
if 'ip' in self.params['networks'][network]:
ip = self.params['networks'][network]['ip']
if ip not in IPNetwork(network):
self.module.fail_json(msg="ip '%s' not in network %s" % (ip, network))
ipnet = IPNetwork(network)
self.params['networks'][network]['subnet_mask'] = str(ipnet.netmask)
if 'network' in self.params['networks'][network]:
if get_obj(self.content, [vim.Network], self.params['networks'][network]['network']) is None:
self.module.fail_json(msg="Network %s doesn't exists" % network)
elif 'vlan' in self.params['networks'][network]:
network_name = None
dvps = get_all_objs(self.content, [vim.dvs.DistributedVirtualPortgroup])
for dvp in dvps:
if dvp.config.defaultPortConfig.vlan.vlanId == self.params['networks'][network]['vlan']:
network_name = dvp.config.name
break
if network_name:
self.params['networks'][network]['network'] = network_name
else:
self.module.fail_json(msg="VLAN %(vlan)s doesn't exists" % self.params['networks'][network])
else:
self.module.fail_json(msg="You need to define a network or a vlan")
network_devices.append(self.params['networks'][network])
adaptermaps = []
# List current device for Clone or Idempotency
current_net_devices = self.get_vm_network_interfaces(vm=vm_obj)
if len(network_devices) < len(current_net_devices):
self.module.fail_json(msg="given network device list is lesser than current VM device list (%d < %d). "
"Removing interfaces is not allowed"
% (len(network_devices), len(current_net_devices)))
for key in range(0, len(network_devices)):
# Default device type is vmxnet3, VMWare best practice
device_type = network_devices[key].get('device_type', 'vmxnet3')
nic = self.device_helper.create_nic(device_type,
'Network Adapter %s' % (key + 1),
network_devices[key])
nic_change_detected = False
if key < len(current_net_devices) and (vm_obj or self.should_deploy_from_template()):
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
# Changing mac address has no effect when editing interface
if 'mac' in network_devices[key] and nic.device.macAddress != current_net_devices[key].macAddress:
self.module.fail_json(msg="Changing MAC address has not effect when interface is already present. "
"The failing new MAC address is %s" % nic.device.macAddress)
nic.device = current_net_devices[key]
nic.device.deviceInfo = vim.Description()
else:
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_change_detected = True
if hasattr(self.cache.get_network(network_devices[key]['network']), 'portKeys'):
# VDS switch
pg_obj = get_obj(self.content, [vim.dvs.DistributedVirtualPortgroup], network_devices[key]['network'])
dvs_port_connection = vim.dvs.PortConnection()
dvs_port_connection.portgroupKey = pg_obj.key
dvs_port_connection.switchUuid = pg_obj.config.distributedVirtualSwitch.uuid
nic.device.backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo()
nic.device.backing.port = dvs_port_connection
else:
# vSwitch
if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo):
nic.device.backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_change_detected = True
net_obj = self.cache.get_network(network_devices[key]['network'])
if nic.device.backing.network != net_obj:
nic.device.backing.network = net_obj
nic_change_detected = True
if nic.device.backing.deviceName != network_devices[key]['network']:
nic.device.backing.deviceName = network_devices[key]['network']
nic_change_detected = True
if nic_change_detected:
self.configspec.deviceChange.append(nic)
self.change_detected = True
if vm_obj is None or self.should_deploy_from_template():
if 'ip' in network_devices[key]:
guest_map = vim.vm.customization.AdapterMapping()
guest_map.adapter = vim.vm.customization.IPSettings()
guest_map.adapter.ip = vim.vm.customization.FixedIp()
guest_map.adapter.ip.ipAddress = str(network_devices[key]['ip'])
guest_map.adapter.subnetMask = str(network_devices[key]['subnet_mask'])
if 'gateway' in network_devices[key]:
guest_map.adapter.gateway = network_devices[key]['gateway']
if self.params.get('domain'):
guest_map.adapter.dnsDomain = self.params['domain']
adaptermaps.append(guest_map)
if vm_obj is None or self.should_deploy_from_template():
# DNS settings
globalip = vim.vm.customization.GlobalIPSettings()
globalip.dnsServerList = self.params['dns_servers']
globalip.dnsSuffixList = str(self.params['domain'])
# Hostname settings
ident = vim.vm.customization.LinuxPrep()
ident.domain = str(self.params['domain'])
ident.hostName = vim.vm.customization.FixedName()
ident.hostName.name = self.params['name']
self.customspec = vim.vm.customization.Specification()
self.customspec.nicSettingMap = adaptermaps
self.customspec.globalIPSettings = globalip
self.customspec.identity = ident
def get_vm_scsi_controller(self, vm_obj):
# If vm_obj doesn't exists no SCSI controller to find
if vm_obj is None:
return None
for device in vm_obj.config.hardware.device:
if self.device_helper.is_scsi_controller(device):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.device = device
return scsi_ctl
return None
def get_configured_disk_size(self, expected_disk_spec):
# what size is it?
if [x for x in expected_disk_spec.keys() if x.startswith('size_') or x == 'size']:
# size_tb, size_gb, size_mb, size_kb, size_b ...?
if 'size' in expected_disk_spec:
expected = ''.join(c for c in expected_disk_spec['size'] if c.isdigit())
unit = expected_disk_spec['size'].replace(expected, '').lower()
expected = int(expected)
else:
param = [x for x in expected_disk_spec.keys() if x.startswith('size_')][0]
unit = param.split('_')[-1].lower()
expected = [x[1] for x in expected_disk_spec.items() if x[0].startswith('size_')][0]
expected = int(expected)
if unit == 'tb':
return expected * 1024 * 1024 * 1024
elif unit == 'gb':
return expected * 1024 * 1024
elif unit == ' mb':
return expected * 1024
elif unit == 'kb':
return expected
self.module.fail_json(
msg='%s is not a supported unit for disk size. Supported units are kb, mb, gb or tb' % unit)
# No size found but disk, fail
self.module.fail_json(
msg="No size, size_kb, size_mb, size_gb or size_tb attribute found into disk configuration")
def configure_disks(self, vm_obj):
# Ignore empty disk list, this permits to keep disks when deploying a template/cloning a VM
if len(self.params['disk']) == 0:
return
scsi_ctl = self.get_vm_scsi_controller(vm_obj)
# Create scsi controller only if we are deploying a new VM, not a template or reconfiguring
if vm_obj is None or scsi_ctl is None:
scsi_ctl = self.device_helper.create_scsi_controller(self.get_scsi_type())
self.change_detected = True
self.configspec.deviceChange.append(scsi_ctl)
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)] \
if vm_obj is not None else None
if disks is not None and self.params.get('disk') and len(self.params.get('disk')) < len(disks):
self.module.fail_json(msg="Provided disks configuration has less disks than "
"the target object (%d vs %d)" % (len(self.params.get('disk')), len(disks)))
disk_index = 0
for expected_disk_spec in self.params.get('disk'):
disk_modified = False
# If we are manipulating and existing objects which has disks and disk_index is in disks
if vm_obj is not None and disks is not None and disk_index < len(disks):
diskspec = vim.vm.device.VirtualDeviceSpec()
# set the operation to edit so that it knows to keep other settings
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
diskspec.device = disks[disk_index]
else:
diskspec = self.device_helper.create_scsi_disk(scsi_ctl, disk_index)
disk_modified = True
# is it thin?
if 'type' in expected_disk_spec:
if expected_disk_spec.get('type', '').lower() == 'thin':
diskspec.device.backing.thinProvisioned = True
# which datastore?
if expected_disk_spec.get('datastore'):
# TODO: This is already handled by the relocation spec,
# but it needs to eventually be handled for all the
# other disks defined
pass
# increment index for next disk search
disk_index += 1
# index 7 is reserved to SCSI controller
if disk_index == 7:
disk_index += 1
kb = self.get_configured_disk_size(expected_disk_spec)
# VMWare doesn't allow to reduce disk sizes
if kb < diskspec.device.capacityInKB:
self.module.fail_json(
msg="given disk size is lesser than found (%d < %d). Reducing disks is not allowed." %
(kb, diskspec.device.capacityInKB))
if kb != diskspec.device.capacityInKB or disk_modified:
diskspec.device.capacityInKB = kb
self.configspec.deviceChange.append(diskspec)
self.change_detected = True
def select_host(self):
# if the user wants a cluster, get the list of hosts for the cluster and use the first one
if self.params['cluster']:
cluster = self.cache.get_cluster(self.params['cluster'])
if not cluster:
self.module.fail_json(msg="Failed to find a cluster named %(cluster)s" % self.params)
hostsystems = [x for x in cluster.host]
# TODO: add a policy to select host
hostsystem = hostsystems[0]
else:
hostsystem = self.cache.get_esx_host(self.params['esxi_hostname'])
if not hostsystem:
self.module.fail_json(msg="Failed to find a host named %(esxi_hostname)s" % self.params)
return hostsystem
def select_datastore(self, vm_obj=None):
datastore = None
datastore_name = None
if len(self.params['disk']) != 0:
# TODO: really use the datastore for newly created disks
if 'autoselect_datastore' in self.params['disk'][0] and self.params['disk'][0]['autoselect_datastore']:
datastores = get_all_objs(self.content, [vim.Datastore])
if datastores is None or len(datastores) == 0:
self.module.fail_json(msg="Unable to find a datastore list when autoselecting")
datastore_freespace = 0
for ds in datastores:
if ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
if 'datastore' in self.params['disk'][0] and \
isinstance(self.params['disk'][0]['datastore'], str) and \
ds.name.find(self.params['disk'][0]['datastore']) < 0:
continue
datastore = ds
datastore_name = datastore.name
datastore_freespace = ds.summary.freeSpace
elif 'datastore' in self.params['disk'][0]:
datastore_name = self.params['disk'][0]['datastore']
datastore = get_obj(self.content, [vim.Datastore], datastore_name)
else:
self.module.fail_json(msg="Either datastore or autoselect_datastore "
"should be provided to select datastore")
if not datastore and self.should_deploy_from_template():
# use the template's existing DS
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)]
datastore = disks[0].backing.datastore
datastore_name = datastore.name
if not datastore:
self.module.fail_json(msg="Failed to find a matching datastore")
return datastore, datastore_name
def obj_has_parent(self, obj, parent):
assert obj is not None and parent is not None
current_parent = obj
while True:
if current_parent.name == parent.name:
return True
current_parent = current_parent.parent
if current_parent is None:
return False
def select_resource_pool(self, host):
resource_pools = get_all_objs(self.content, [vim.ResourcePool])
for rp in resource_pools.items():
if not rp[0]:
continue
if not hasattr(rp[0], 'parent'):
continue
# Find resource pool on host
if self.obj_has_parent(rp[0].parent, host.parent):
# If no resource_pool selected or it's the selected pool, return it
if self.module.params['resource_pool'] is None or rp[0].name == self.module.params['resource_pool']:
return rp[0]
if self.module.params['resource_pool'] is not None:
self.module.fail_json(msg="Could not find resource_pool %s for selected host %s"
% (self.module.params['resource_pool'], host.name))
else:
self.module.fail_json(msg="Failed to find a resource group for %s" % host.name)
def get_scsi_type(self):
disk_controller_type = "paravirtual"
# set cpu/memory/etc
if 'hardware' in self.params:
if 'scsi' in self.params['hardware']:
if self.params['hardware']['scsi'] in ['buslogic', 'paravirtual', 'lsilogic', 'lsilogicsas']:
disk_controller_type = self.params['hardware']['scsi']
else:
self.module.fail_json(msg="hardware.scsi attribute should be 'paravirtual' or 'lsilogic'")
return disk_controller_type
def deploy_vm(self):
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/clone_vm.py
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.CloneSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.ConfigSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# FIXME:
# - multiple datacenters
# - multiple templates by the same name
# - static IPs
datacenters = get_all_objs(self.content, [vim.Datacenter])
datacenter = get_obj(self.content, [vim.Datacenter], self.params['datacenter'])
if not datacenter:
self.module.fail_json(msg='No datacenter named %(datacenter)s was found' % self.params)
# find matching folders
if self.params['folder'].startswith('/'):
folders = [x for x in self.foldermap['fvim_by_path'].items() if x[0] == self.params['folder']]
else:
folders = [x for x in self.foldermap['fvim_by_path'].items() if x[0].endswith(self.params['folder'])]
# throw error if more than one match or no matches
if len(folders) == 0:
self.module.fail_json(msg='No folder matched the path: %(folder)s' % self.params)
elif len(folders) > 1:
self.module.fail_json(
msg='Too many folders matched "%s", please give the full path starting with /vm/' % self.params[
'folder'])
# grab the folder vim object
destfolder = folders[0][1]
hostsystem = self.select_host()
if self.should_deploy_from_template():
# FIXME: need to search for this in the same way as guests to ensure accuracy
vm_obj = get_obj(self.content, [vim.VirtualMachine], self.params['template'])
if not vm_obj:
self.module.fail_json(msg="Could not find a template named %(template)s" % self.params)
else:
vm_obj = None
# set the destination datastore for VM & disks
(datastore, datastore_name) = self.select_datastore(vm_obj)
resource_pool = self.select_resource_pool(hostsystem)
self.configspec = vim.vm.ConfigSpec(cpuHotAddEnabled=True, memoryHotAddEnabled=True)
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=vm_obj, vm_creation=True)
self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True)
self.configure_disks(vm_obj=vm_obj)
self.configure_network(vm_obj=vm_obj)
try:
if self.should_deploy_from_template():
# create the relocation spec
relospec = vim.vm.RelocateSpec()
relospec.host = hostsystem
relospec.datastore = datastore
relospec.pool = resource_pool
clonespec = vim.vm.CloneSpec(template=self.params['is_template'],
location=relospec)
if self.params['customize'] is True:
clonespec.customization = self.customspec
clonespec.config = self.configspec
task = vm_obj.Clone(folder=destfolder, name=self.params['name'], spec=clonespec)
else:
# ConfigSpec require name for VM creation
self.configspec.name = self.params['name']
self.configspec.files = vim.vm.FileInfo(logDirectory=None,
snapshotDirectory=None,
suspendDirectory=None,
vmPathName="[" + datastore_name + "] " + self.params["name"])
task = destfolder.CreateVM_Task(config=self.configspec, pool=resource_pool)
self.wait_for_task(task)
except TypeError:
self.module.fail_json(msg="TypeError was returned, please ensure to give correct inputs.")
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
# set annotation
vm = task.info.result
if self.params['annotation']:
annotation_spec = vim.vm.ConfigSpec()
annotation_spec.annotation = str(self.params['annotation'])
task = vm.ReconfigVM_Task(annotation_spec)
self.wait_for_task(task)
if self.params['wait_for_ip_address'] or self.params['state'] in ['poweredon', 'restarted']:
self.set_powerstate(vm, 'poweredon', force=False)
if self.params['wait_for_ip_address']:
self.wait_for_vm_ip(vm)
vm_facts = self.gather_facts(vm)
return {'changed': self.change_detected, 'failed': False, 'instance': vm_facts}
def reconfigure_vm(self):
self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=self.current_vm_obj)
self.configure_cpu_and_memory(vm_obj=self.current_vm_obj)
self.configure_disks(vm_obj=self.current_vm_obj)
self.configure_network(vm_obj=self.current_vm_obj)
relospec = vim.vm.RelocateSpec()
hostsystem = self.select_host()
relospec.pool = self.select_resource_pool(hostsystem)
change_applied = False
if relospec.pool != self.current_vm_obj.resourcePool:
task = self.current_vm_obj.RelocateVM_Task(spec=relospec)
self.wait_for_task(task)
change_applied = True
# Only send VMWare task if we see a modification
if self.change_detected:
task = self.current_vm_obj.ReconfigVM_Task(spec=self.configspec)
self.wait_for_task(task)
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
change_applied = True
vm_facts = self.gather_facts(self.current_vm_obj)
return {'changed': change_applied, 'failed': False, 'instance': vm_facts}
@staticmethod
def wait_for_task(task):
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.Task.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.TaskInfo.html
# https://github.com/virtdevninja/pyvmomi-community-samples/blob/master/samples/tools/tasks.py
while task.info.state not in ['success', 'error']:
time.sleep(1)
def wait_for_vm_ip(self, vm, poll=100, sleep=5):
ips = None
facts = {}
thispoll = 0
while not ips and thispoll <= poll:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
if facts['ipv4'] or facts['ipv6']:
ips = True
else:
time.sleep(sleep)
thispoll += 1
return facts
def fetch_file_from_guest(self, vm, username, password, src, dest):
""" Use VMWare's filemanager api to fetch a file over http """
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if tools_status == 'toolsNotInstalled' or tools_status == 'toolsNotRunning':
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/FileManager/FileTransferInformation.rst
fti = self.content.guestOperationsManager.fileManager. \
InitiateFileTransferFromGuest(vm, creds, src)
result['size'] = fti.size
result['url'] = fti.url
# Use module_utils to fetch the remote url returned from the api
rsp, info = fetch_url(self.module, fti.url, use_proxy=False,
force=True, last_mod_time=None,
timeout=10, headers=None)
# save all of the transfer data
for k, v in iteritems(info):
result[k] = v
# exit early if xfer failed
if info['status'] != 200:
result['failed'] = True
return result
# attempt to read the content and write it
try:
with open(dest, 'wb') as f:
f.write(rsp.read())
except Exception as e:
result['failed'] = True
result['msg'] = str(e)
return result
def push_file_to_guest(self, vm, username, password, src, dest, overwrite=True):
""" Use VMWare's filemanager api to fetch a file over http """
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if tools_status == 'toolsNotInstalled' or tools_status == 'toolsNotRunning':
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
# the api requires a filesize in bytes
fdata = None
try:
# filesize = os.path.getsize(src)
filesize = os.stat(src).st_size
with open(src, 'rb') as f:
fdata = f.read()
result['local_filesize'] = filesize
except Exception as e:
result['failed'] = True
result['msg'] = "Unable to read src file: %s" % str(e)
return result
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.vm.guest.FileManager.html#initiateFileTransferToGuest
file_attribute = vim.vm.guest.FileManager.FileAttributes()
url = self.content.guestOperationsManager.fileManager. \
InitiateFileTransferToGuest(vm, creds, dest, file_attribute,
filesize, overwrite)
# PUT the filedata to the url ...
rsp, info = fetch_url(self.module, url, method="put", data=fdata,
use_proxy=False, force=True, last_mod_time=None,
timeout=10, headers=None)
result['msg'] = str(rsp.read())
# save all of the transfer data
for k, v in iteritems(info):
result[k] = v
return result
def run_command_in_guest(self, vm, username, password, program_path, program_args, program_cwd, program_env):
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if (tools_status == 'toolsNotInstalled' or
tools_status == 'toolsNotRunning'):
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
try:
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/ProcessManager.rst
pm = self.content.guestOperationsManager.processManager
# https://www.vmware.com/support/developer/converter-sdk/conv51_apireference/vim.vm.guest.ProcessManager.ProgramSpec.html
ps = vim.vm.guest.ProcessManager.ProgramSpec(
# programPath=program,
# arguments=args
programPath=program_path,
arguments=program_args,
workingDirectory=program_cwd,
)
res = pm.StartProgramInGuest(vm, creds, ps)
result['pid'] = res
pdata = pm.ListProcessesInGuest(vm, creds, [res])
# wait for pid to finish
while not pdata[0].endTime:
time.sleep(1)
pdata = pm.ListProcessesInGuest(vm, creds, [res])
result['owner'] = pdata[0].owner
result['startTime'] = pdata[0].startTime.isoformat()
result['endTime'] = pdata[0].endTime.isoformat()
result['exitCode'] = pdata[0].exitCode
if result['exitCode'] != 0:
result['failed'] = True
result['msg'] = "program exited non-zero"
else:
result['msg'] = "program completed successfully"
except Exception as e:
result['msg'] = str(e)
result['failed'] = True
return result
def list_snapshots_recursively(self, snapshots):
snapshot_data = []
for snapshot in snapshots:
snap_text = 'Id: %s; Name: %s; Description: %s; CreateTime: %s; State: %s' % (snapshot.id, snapshot.name,
snapshot.description,
snapshot.createTime,
snapshot.state)
snapshot_data.append(snap_text)
snapshot_data = snapshot_data + self.list_snapshots_recursively(snapshot.childSnapshotList)
return snapshot_data
def get_snapshots_by_name_recursively(self, snapshots, snapname):
snap_obj = []
for snapshot in snapshots:
if snapshot.name == snapname:
snap_obj.append(snapshot)
else:
snap_obj = snap_obj + self.get_snapshots_by_name_recursively(snapshot.childSnapshotList, snapname)
return snap_obj
def get_current_snap_obj(self, snapshots, snapob):
snap_obj = []
for snapshot in snapshots:
if snapshot.snapshot == snapob:
snap_obj.append(snapshot)
snap_obj = snap_obj + self.get_current_snap_obj(snapshot.childSnapshotList, snapob)
return snap_obj
def snapshot_vm(self, vm, guest, snapshot_op):
""" To perform snapshot operations create/remove/revert/list_all/list_current/remove_all """
snapshot_op_name = None
try:
snapshot_op_name = snapshot_op['op_type']
except KeyError:
self.module.fail_json(msg="Specify op_type - create/remove/revert/list_all/list_current/remove_all")
task = None
result = {}
if snapshot_op_name not in ['create', 'remove', 'revert', 'list_all', 'list_current', 'remove_all']:
self.module.fail_json(msg="Specify op_type - create/remove/revert/list_all/list_current/remove_all")
if snapshot_op_name != 'create' and vm.snapshot is None:
self.module.exit_json(msg="VM - %s doesn't have any snapshots" % guest)
if snapshot_op_name == 'create':
try:
snapname = snapshot_op['name']
except KeyError:
self.module.fail_json(msg="specify name & description(optional) to create a snapshot")
if 'description' in snapshot_op:
snapdesc = snapshot_op['description']
else:
snapdesc = ''
dumpMemory = False
quiesce = False
task = vm.CreateSnapshot(snapname, snapdesc, dumpMemory, quiesce)
elif snapshot_op_name in ['remove', 'revert']:
try:
snapname = snapshot_op['name']
except KeyError:
self.module.fail_json(msg="specify snapshot name")
snap_obj = self.get_snapshots_by_name_recursively(vm.snapshot.rootSnapshotList, snapname)
# if len(snap_obj) is 0; then no snapshots with specified name
if len(snap_obj) == 1:
snap_obj = snap_obj[0].snapshot
if snapshot_op_name == 'remove':
task = snap_obj.RemoveSnapshot_Task(True)
else:
task = snap_obj.RevertToSnapshot_Task()
else:
self.module.exit_json(
msg="Couldn't find any snapshots with specified name: %s on VM: %s" % (snapname, guest))
elif snapshot_op_name == 'list_all':
snapshot_data = self.list_snapshots_recursively(vm.snapshot.rootSnapshotList)
result['snapshot_data'] = snapshot_data
elif snapshot_op_name == 'list_current':
current_snapref = vm.snapshot.currentSnapshot
current_snap_obj = self.get_current_snap_obj(vm.snapshot.rootSnapshotList, current_snapref)
result['current_snapshot'] = 'Id: %s; Name: %s; Description: %s; CreateTime: %s; State: %s' % (
current_snap_obj[0].id,
current_snap_obj[0].name, current_snap_obj[0].description, current_snap_obj[0].createTime,
current_snap_obj[0].state)
elif snapshot_op_name == 'remove_all':
task = vm.RemoveAllSnapshots()
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result = {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
result = {'changed': True, 'failed': False}
return result
def get_obj(content, vimtype, name):
"""
Return an object by name, if name is None the
first found object is returned
"""
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if name:
if c.name == name:
obj = c
break
else:
obj = c
break
container.Destroy()
return obj
def main():
module = AnsibleModule(
argument_spec=dict(
hostname=dict(
type='str',
default=os.environ.get('VMWARE_HOST')
),
username=dict(
type='str',
default=os.environ.get('VMWARE_USER')
),
password=dict(
type='str', no_log=True,
default=os.environ.get('VMWARE_PASSWORD')
),
state=dict(
required=False,
choices=[
'poweredon',
'poweredoff',
'present',
'absent',
'restarted',
'suspended',
'gatherfacts',
],
default='present'),
validate_certs=dict(required=False, type='bool', default=True),
template_src=dict(required=False, type='str', aliases=['template'], default=None),
is_template=dict(required=False, type='bool', default=False),
annotation=dict(required=False, type='str', aliases=['notes']),
name=dict(required=True, type='str'),
name_match=dict(required=False, type='str', default='first'),
snapshot_op=dict(required=False, type='dict', default={}),
uuid=dict(required=False, type='str'),
folder=dict(required=False, type='str', default='/vm'),
guest_id=dict(required=False, type='str', default=None),
disk=dict(required=False, type='list', default=[]),
hardware=dict(required=False, type='dict', default={}),
force=dict(required=False, type='bool', default=False),
datacenter=dict(required=False, type='str', default=None),
esxi_hostname=dict(required=False, type='str', default=None),
cluster=dict(required=False, type='str', default=None),
wait_for_ip_address=dict(required=False, type='bool', default=True),
customize=dict(required=False, type='bool', default=False),
dns_servers=dict(required=False, type='list', default=None),
domain=dict(required=False, type='str', default=None),
networks=dict(required=False, type='dict', default={}),
resource_pool=dict(required=False, type='str', default=None)
),
supports_check_mode=True,
mutually_exclusive=[
['esxi_hostname', 'cluster'],
],
required_together=[
['state', 'force'],
['template'],
],
)
result = {'failed': False, 'changed': False}
# Prepend /vm if it was missing from the folder path, also strip trailing slashes
if not module.params['folder'].startswith('/vm') and module.params['folder'].startswith('/'):
module.params['folder'] = '/vm%(folder)s' % module.params
module.params['folder'] = module.params['folder'].rstrip('/')
# Fail check, customize require template to be defined
if module.params["customize"] and not module.params['template']:
module.fail_json(msg="customize option is only valid when template option is defined")
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.getvm(name=module.params['name'],
folder=module.params['folder'],
uuid=module.params['uuid'],
name_match=module.params['name_match'],
cache=True)
# VM already exists
if vm:
if module.params['state'] == 'absent':
# destroy it
if module.params['force']:
# has to be poweredoff first
pyv.set_powerstate(vm, 'poweredoff', module.params['force'])
result = pyv.remove_vm(vm)
elif module.params['state'] == 'present':
result = pyv.reconfigure_vm()
elif module.params['state'] in ['poweredon', 'poweredoff', 'restarted', 'suspended']:
# set powerstate
tmp_result = pyv.set_powerstate(vm, module.params['state'], module.params['force'])
if tmp_result['changed']:
result["changed"] = True
if not tmp_result["failed"]:
result["failed"] = False
elif module.params['state'] == 'gatherfacts':
# Run for facts only
try:
module.exit_json(instance=pyv.gather_facts(vm))
except Exception:
e = get_exception()
module.fail_json(msg="Fact gather failed with exception %s" % e)
elif module.params['snapshot_op']:
result = pyv.snapshot_vm(vm, module.params['name'], module.params['snapshot_op'])
else:
# This should not happen
assert False
# VM doesn't exist
else:
if module.params['state'] in ['poweredon', 'poweredoff', 'present', 'restarted', 'suspended']:
# Create it ...
result = pyv.deploy_vm()
elif module.params['state'] == 'gatherfacts':
module.fail_json(msg="Unable to gather facts for inexistant VM %s" % module.params['name'])
if 'failed' not in result:
result['failed'] = False
if result['failed']:
module.fail_json(**result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
PLyczkowski/Sticky-Keymap | 2.74/scripts/addons_contrib/space_view3d_cursor_control/history.py | 3 | 9299 | # -*- coding: utf-8 -*-
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
"""
TODO:
IDEAS:
LATER:
ISSUES:
Bugs:
Seg-faults when unregistering addon...
Mites:
* History back button does not light up on first cursor move.
It does light up on the second, or when mouse enters the tool-area
* Switching between local and global view triggers new cursor position in history trace.
* Each consecutive click on the linex operator triggers new cursor position in history trace.
(2011-01-16) Was not able to fix this because of some strange script behaviour
while trying to clear linexChoice from addHistoryLocation
QUESTIONS:
"""
import bpy
import bgl
import math
from mathutils import Vector, Matrix
from mathutils import geometry
from .misc_utils import *
from .constants_utils import *
from .cursor_utils import *
from .ui_utils import *
class CursorHistoryData(bpy.types.PropertyGroup):
# History tracker
historyDraw = bpy.props.BoolProperty(description="Draw history trace in 3D view",default=1)
historyDepth = 144
historyWindow = 12
historyPosition = [-1] # Integer must be in a list or else it can not be written to
historyLocation = []
#historySuppression = [False] # Boolean must be in a list or else it can not be written to
def addHistoryLocation(self, l):
if(self.historyPosition[0]==-1):
self.historyLocation.append(l.copy())
self.historyPosition[0]=0
return
if(l==self.historyLocation[self.historyPosition[0]]):
return
#if self.historySuppression[0]:
#self.historyPosition[0] = self.historyPosition[0] - 1
#else:
#self.hideLinexChoice()
while(len(self.historyLocation)>self.historyPosition[0]+1):
self.historyLocation.pop(self.historyPosition[0]+1)
#self.historySuppression[0] = False
self.historyLocation.append(l.copy())
if(len(self.historyLocation)>self.historyDepth):
self.historyLocation.pop(0)
self.historyPosition[0] = len(self.historyLocation)-1
#print (self.historyLocation)
#def enableHistorySuppression(self):
#self.historySuppression[0] = True
def previousLocation(self):
if(self.historyPosition[0]<=0):
return
self.historyPosition[0] = self.historyPosition[0] - 1
CursorAccess.setCursor(self.historyLocation[self.historyPosition[0]].copy())
def nextLocation(self):
if(self.historyPosition[0]<0):
return
if(self.historyPosition[0]+1==len(self.historyLocation)):
return
self.historyPosition[0] = self.historyPosition[0] + 1
CursorAccess.setCursor(self.historyLocation[self.historyPosition[0]].copy())
class VIEW3D_OT_cursor_previous(bpy.types.Operator):
"""Previous cursor location"""
bl_idname = "view3d.cursor_previous"
bl_label = "Previous cursor location"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.previousLocation()
return {'FINISHED'}
class VIEW3D_OT_cursor_next(bpy.types.Operator):
"""Next cursor location"""
bl_idname = "view3d.cursor_next"
bl_label = "Next cursor location"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.nextLocation()
return {'FINISHED'}
class VIEW3D_OT_cursor_history_show(bpy.types.Operator):
"""Show cursor trace"""
bl_idname = "view3d.cursor_history_show"
bl_label = "Show cursor trace"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.historyDraw = True
BlenderFake.forceRedraw()
return {'FINISHED'}
class VIEW3D_OT_cursor_history_hide(bpy.types.Operator):
"""Hide cursor trace"""
bl_idname = "view3d.cursor_history_hide"
bl_label = "Hide cursor trace"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.historyDraw = False
BlenderFake.forceRedraw()
return {'FINISHED'}
class VIEW3D_PT_cursor_history(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Cursor History"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(self, context):
# Display in object or edit mode.
cc = context.scene.cursor_history
cc.addHistoryLocation(CursorAccess.getCursor())
if (context.area.type == 'VIEW_3D' and
(context.mode == 'EDIT_MESH'
or context.mode == 'OBJECT')):
return 1
return 0
def draw_header(self, context):
layout = self.layout
cc = context.scene.cursor_history
if cc.historyDraw:
GUI.drawIconButton(True, layout, 'RESTRICT_VIEW_OFF', "view3d.cursor_history_hide", False)
else:
GUI.drawIconButton(True, layout, 'RESTRICT_VIEW_ON' , "view3d.cursor_history_show", False)
def draw(self, context):
layout = self.layout
sce = context.scene
cc = context.scene.cursor_history
row = layout.row()
row.label("Navigation: ")
GUI.drawIconButton(cc.historyPosition[0]>0, row, 'PLAY_REVERSE', "view3d.cursor_previous")
#if(cc.historyPosition[0]<0):
#row.label(" -- ")
#else:
#row.label(" "+str(cc.historyPosition[0])+" ")
GUI.drawIconButton(cc.historyPosition[0]<len(cc.historyLocation)-1, row, 'PLAY', "view3d.cursor_next")
row = layout.row()
col = row.column()
col.prop(CursorAccess.findSpace(), "cursor_location")
class VIEW3D_PT_cursor_history_init(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Register callback"
bl_options = {'DEFAULT_CLOSED'}
initDone = False
_handle = None
@staticmethod
def handle_add(self, context):
VIEW3D_PT_cursor_history_init._handle = bpy.types.SpaceView3D.draw_handler_add(
cursor_history_draw, (self, context), 'WINDOW', 'POST_PIXEL')
@staticmethod
def handle_remove():
if VIEW3D_PT_cursor_history_init._handle is not None:
bpy.types.SpaceView3D.draw_handler_remove(VIEW3D_PT_cursor_history_init._handle, 'WINDOW')
VIEW3D_PT_cursor_history_init._handle = None
@classmethod
def poll(cls, context):
if VIEW3D_PT_cursor_history_init.initDone:
return False
print ("Cursor History draw-callback registration...")
sce = context.scene
if context.area.type == 'VIEW_3D':
VIEW3D_PT_cursor_history_init.handle_add(cls, context)
VIEW3D_PT_cursor_history_init.initDone = True
print ("Cursor History draw-callback registered")
# Unregister to prevent double registration...
# Started to fail after v2.57
# bpy.types.unregister(VIEW3D_PT_cursor_history_init)
else:
print("View3D not found, cannot run operator")
return False
def draw_header(self, context):
pass
def draw(self, context):
pass
def cursor_history_draw(cls,context):
cc = context.scene.cursor_history
draw = 0
if hasattr(cc, "historyDraw"):
draw = cc.historyDraw
if(draw):
bgl.glEnable(bgl.GL_BLEND)
bgl.glShadeModel(bgl.GL_FLAT)
alpha = 1-PHI_INV
# History Trace
if cc.historyPosition[0]<0:
return
bgl.glBegin(bgl.GL_LINE_STRIP)
ccc = 0
for iii in range(cc.historyWindow+1):
ix_rel = iii - int(cc.historyWindow / 2)
ix = cc.historyPosition[0] + ix_rel
if(ix<0 or ix>=len(cc.historyLocation)):
continue
ppp = region3d_get_2d_coordinates(context, cc.historyLocation[ix])
if(ix_rel<=0):
bgl.glColor4f(0, 0, 0, alpha)
else:
bgl.glColor4f(1, 0, 0, alpha)
bgl.glVertex2f(ppp[0], ppp[1])
ccc = ccc + 1
bgl.glEnd()
| gpl-2.0 |
txemi/ansible | lib/ansible/modules/network/avi/avi_applicationpersistenceprofile.py | 28 | 5571 | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 16.3.8
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_applicationpersistenceprofile
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of ApplicationPersistenceProfile Avi RESTful Object
description:
- This module is used to configure ApplicationPersistenceProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
app_cookie_persistence_profile:
description:
- Specifies the application cookie persistence profile parameters.
description:
description:
- User defined description for the object.
hdr_persistence_profile:
description:
- Specifies the custom http header persistence profile parameters.
http_cookie_persistence_profile:
description:
- Specifies the http cookie persistence profile parameters.
ip_persistence_profile:
description:
- Specifies the client ip persistence profile parameters.
name:
description:
- A user-friendly name for the persistence profile.
required: true
persistence_type:
description:
- Method used to persist clients to the same server for a duration of time or a session.
- Default value when not specified in API or module is interpreted by Avi Controller as PERSISTENCE_TYPE_CLIENT_IP_ADDRESS.
required: true
server_hm_down_recovery:
description:
- Specifies behavior when a persistent server has been marked down by a health monitor.
- Default value when not specified in API or module is interpreted by Avi Controller as HM_DOWN_PICK_NEW_SERVER.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the persistence profile.
extends_documentation_fragment:
- avi
'''
EXAMPLES = '''
- name: Create an Application Persistence setting using http cookie.
avi_applicationpersistenceprofile:
controller: ''
username: ''
password: ''
http_cookie_persistence_profile:
always_send_cookie: false
cookie_name: My-HTTP
key:
- aes_key: ShYGZdMks8j6Bpvm2sCvaXWzvXms2Z9ob+TTjRy46lQ=
name: c1276819-550c-4adf-912d-59efa5fd7269
- aes_key: OGsyVk84VCtyMENFOW0rMnRXVnNrb0RzdG5mT29oamJRb0dlbHZVSjR1az0=
name: a080de57-77c3-4580-a3ea-e7a6493c14fd
- aes_key: UVN0cU9HWmFUM2xOUzBVcmVXaHFXbnBLVUUxMU1VSktSVU5HWjJOWmVFMTBUMUV4UmxsNk4xQmFZejA9
name: 60478846-33c6-484d-868d-bbc324fce4a5
timeout: 15
name: My-HTTP-Cookie
persistence_type: PERSISTENCE_TYPE_HTTP_COOKIE
server_hm_down_recovery: HM_DOWN_PICK_NEW_SERVER
tenant_ref: Demo
'''
RETURN = '''
obj:
description: ApplicationPersistenceProfile (api/applicationpersistenceprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
app_cookie_persistence_profile=dict(type='dict',),
description=dict(type='str',),
hdr_persistence_profile=dict(type='dict',),
http_cookie_persistence_profile=dict(type='dict',),
ip_persistence_profile=dict(type='dict',),
name=dict(type='str', required=True),
persistence_type=dict(type='str', required=True),
server_hm_down_recovery=dict(type='str',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=16.3.5.post1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'applicationpersistenceprofile',
set([]))
if __name__ == '__main__':
main()
| gpl-3.0 |
gyurisc/stackjobs | clean_data.py | 1 | 1758 | # Ad-hoc fixing of mongo database
from datetime import datetime
import pymongo
client = pymongo.MongoClient('localhost', 27017)
db = client['stackoverflow']
jobs = db['jobs']
# total jobs
total_jobs = jobs.count()
print "Total jobs: %s" % total_jobs
print "=== Fixing Date Stamp ==="
date_stamp = datetime(2016, 6, 1, 7, 01, 01)
jobs.update_many({ "date" : { "$exists" : False}}, {"$set" : {"date" : date_stamp}})
count = 0
for job in jobs.find( { "date" : { "$exists" : False}}):
count = count + 1
# print(job)
print "=== Fixing Date Stamp ==="
print "Number of jobs with no date is %s." % count
count = 0
for job in jobs.find( { "date" : date_stamp}):
count = count + 1
# print(job)
print "Number of jobs with default date is %s." % count
# Week number
print "=== Fixing Week Number ==="
wkcount = jobs.find( {"weeknum" : {"$exists" : True}}).count()
print "Week number exists with %s and missing for %s jobs." % (wkcount, total_jobs - wkcount)
for job in jobs.find({"weeknum" : {"$exists": False}}):
d = datetime.strptime(job["date"], '%Y-%m-%d')
wk = d.isocalendar()[1]
jobs.update({"_id" : job["_id"]}, {"$set" : {"weeknum" : wk}})
# Employee and Location Whitespace
print "=== Fixing Employee & Location ==="
print "Striping strings from white space in employer and location strings"
for job in jobs.find():
_emp = job["employer"].strip()
_loc = job["location"].strip()
jobs.update({"_id" : job["_id"]}, {"$set" : {"employer" : _emp, "location" : _loc}})
print "Stripping strings from whitespace where salary exists"
for job in jobs.find({ "salary" : { "$exists" : True }}):
_salary = job["salary"].strip()
jobs.update({"_id" : job["_id"]}, {"$set" : {"salary" : _salary}})
| mit |
darren-wang/gl | glance/api/v1/controller.py | 8 | 3895 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glance_store as store
from oslo_log import log as logging
import webob.exc
from glance.common import exception
from glance import i18n
import glance.registry.client.v1.api as registry
LOG = logging.getLogger(__name__)
_ = i18n._
class BaseController(object):
def get_image_meta_or_404(self, request, image_id):
"""
Grabs the image metadata for an image with a supplied
identifier or raises an HTTPNotFound (404) response
:param request: The WSGI/Webob Request object
:param image_id: The opaque image identifier
:raises HTTPNotFound if image does not exist
"""
context = request.context
try:
return registry.get_image_metadata(context, image_id)
except exception.NotFound:
msg = "Image with identifier %s not found" % image_id
LOG.debug(msg)
raise webob.exc.HTTPNotFound(
msg, request=request, content_type='text/plain')
except exception.Forbidden:
msg = "Forbidden image access"
LOG.debug(msg)
raise webob.exc.HTTPForbidden(msg,
request=request,
content_type='text/plain')
def get_active_image_meta_or_error(self, request, image_id):
"""
Same as get_image_meta_or_404 except that it will raise a 403 if the
image is deactivated or 404 if the image is otherwise not 'active'.
"""
image = self.get_image_meta_or_404(request, image_id)
if image['status'] == 'deactivated':
msg = "Image %s is deactivated" % image_id
LOG.debug(msg)
msg = _("Image %s is deactivated") % image_id
raise webob.exc.HTTPForbidden(
msg, request=request, content_type='type/plain')
if image['status'] != 'active':
msg = "Image %s is not active" % image_id
LOG.debug(msg)
msg = _("Image %s is not active") % image_id
raise webob.exc.HTTPNotFound(
msg, request=request, content_type='text/plain')
return image
def update_store_acls(self, req, image_id, location_uri, public=False):
if location_uri:
try:
read_tenants = []
write_tenants = []
members = registry.get_image_members(req.context, image_id)
if members:
for member in members:
if member['can_share']:
write_tenants.append(member['member_id'])
else:
read_tenants.append(member['member_id'])
store.set_acls(location_uri, public=public,
read_tenants=read_tenants,
write_tenants=write_tenants,
context=req.context)
except store.UnknownScheme:
msg = _("Store for image_id not found: %s") % image_id
raise webob.exc.HTTPBadRequest(explanation=msg,
request=req,
content_type='text/plain')
| apache-2.0 |
hhbyyh/spark | python/pyspark/shell.py | 37 | 2333 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
"""
import atexit
import os
import platform
import warnings
import py4j
from pyspark import SparkConf
from pyspark.context import SparkContext
from pyspark.sql import SparkSession, SQLContext
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
SparkContext._ensure_initialized()
try:
spark = SparkSession._create_shell_session()
except Exception:
import sys
import traceback
warnings.warn("Failed to initialize Spark session.")
traceback.print_exc(file=sys.stderr)
sys.exit(1)
sc = spark.sparkContext
sql = spark.sql
atexit.register(lambda: sc.stop())
# for compatibility
sqlContext = spark._wrapped
sqlCtx = sqlContext
print(r"""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version %s
/_/
""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
platform.python_build()[1]))
print("SparkSession available as 'spark'.")
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file:
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
if _pythonstartup and os.path.isfile(_pythonstartup):
with open(_pythonstartup) as f:
code = compile(f.read(), _pythonstartup, 'exec')
exec(code)
| apache-2.0 |
seem-sky/kbengine | kbe/src/lib/python/Lib/ctypes/test/test_funcptr.py | 92 | 3911 | import os, unittest
from ctypes import *
try:
WINFUNCTYPE
except NameError:
# fake to enable this test on Linux
WINFUNCTYPE = CFUNCTYPE
import _ctypes_test
lib = CDLL(_ctypes_test.__file__)
class CFuncPtrTestCase(unittest.TestCase):
def test_basic(self):
X = WINFUNCTYPE(c_int, c_int, c_int)
def func(*args):
return len(args)
x = X(func)
self.assertEqual(x.restype, c_int)
self.assertEqual(x.argtypes, (c_int, c_int))
self.assertEqual(sizeof(x), sizeof(c_voidp))
self.assertEqual(sizeof(X), sizeof(c_voidp))
def test_first(self):
StdCallback = WINFUNCTYPE(c_int, c_int, c_int)
CdeclCallback = CFUNCTYPE(c_int, c_int, c_int)
def func(a, b):
return a + b
s = StdCallback(func)
c = CdeclCallback(func)
self.assertEqual(s(1, 2), 3)
self.assertEqual(c(1, 2), 3)
# The following no longer raises a TypeError - it is now
# possible, as in C, to call cdecl functions with more parameters.
#self.assertRaises(TypeError, c, 1, 2, 3)
self.assertEqual(c(1, 2, 3, 4, 5, 6), 3)
if not WINFUNCTYPE is CFUNCTYPE and os.name != "ce":
self.assertRaises(TypeError, s, 1, 2, 3)
def test_structures(self):
WNDPROC = WINFUNCTYPE(c_long, c_int, c_int, c_int, c_int)
def wndproc(hwnd, msg, wParam, lParam):
return hwnd + msg + wParam + lParam
HINSTANCE = c_int
HICON = c_int
HCURSOR = c_int
LPCTSTR = c_char_p
class WNDCLASS(Structure):
_fields_ = [("style", c_uint),
("lpfnWndProc", WNDPROC),
("cbClsExtra", c_int),
("cbWndExtra", c_int),
("hInstance", HINSTANCE),
("hIcon", HICON),
("hCursor", HCURSOR),
("lpszMenuName", LPCTSTR),
("lpszClassName", LPCTSTR)]
wndclass = WNDCLASS()
wndclass.lpfnWndProc = WNDPROC(wndproc)
WNDPROC_2 = WINFUNCTYPE(c_long, c_int, c_int, c_int, c_int)
# This is no longer true, now that WINFUNCTYPE caches created types internally.
## # CFuncPtr subclasses are compared by identity, so this raises a TypeError:
## self.assertRaises(TypeError, setattr, wndclass,
## "lpfnWndProc", WNDPROC_2(wndproc))
# instead:
self.assertIs(WNDPROC, WNDPROC_2)
# 'wndclass.lpfnWndProc' leaks 94 references. Why?
self.assertEqual(wndclass.lpfnWndProc(1, 2, 3, 4), 10)
f = wndclass.lpfnWndProc
del wndclass
del wndproc
self.assertEqual(f(10, 11, 12, 13), 46)
def test_dllfunctions(self):
def NoNullHandle(value):
if not value:
raise WinError()
return value
strchr = lib.my_strchr
strchr.restype = c_char_p
strchr.argtypes = (c_char_p, c_char)
self.assertEqual(strchr(b"abcdefghi", b"b"), b"bcdefghi")
self.assertEqual(strchr(b"abcdefghi", b"x"), None)
strtok = lib.my_strtok
strtok.restype = c_char_p
# Neither of this does work: strtok changes the buffer it is passed
## strtok.argtypes = (c_char_p, c_char_p)
## strtok.argtypes = (c_string, c_char_p)
def c_string(init):
size = len(init) + 1
return (c_char*size)(*init)
s = b"a\nb\nc"
b = c_string(s)
## b = (c_char * (len(s)+1))()
## b.value = s
## b = c_string(s)
self.assertEqual(strtok(b, b"\n"), b"a")
self.assertEqual(strtok(None, b"\n"), b"b")
self.assertEqual(strtok(None, b"\n"), b"c")
self.assertEqual(strtok(None, b"\n"), None)
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
nginx/unit | test/test_respawn.py | 1 | 3110 | import re
import subprocess
import time
from unit.applications.lang.python import TestApplicationPython
from unit.option import option
class TestRespawn(TestApplicationPython):
prerequisites = {'modules': {'python': 'any'}}
PATTERN_ROUTER = 'unit: router'
PATTERN_CONTROLLER = 'unit: controller'
def setup_method(self):
self.app_name = "app-" + option.temp_dir.split('/')[-1]
self.load('empty', self.app_name)
assert 'success' in self.conf(
'1', 'applications/' + self.app_name + '/processes'
)
def pid_by_name(self, name, ppid):
output = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
m = re.search(r'\s*(\d+)\s*' + str(ppid) + r'.*' + name, output)
return None if m is None else m.group(1)
def kill_pids(self, *pids):
subprocess.call(['kill', '-9'] + list(pids))
def wait_for_process(self, process, unit_pid):
for i in range(50):
found = self.pid_by_name(process, unit_pid)
if found is not None:
break
time.sleep(0.1)
return found
def find_proc(self, name, ppid, ps_output):
return re.findall(str(ppid) + r'.*' + name, ps_output)
def smoke_test(self, unit_pid):
for _ in range(10):
r = self.conf('1', 'applications/' + self.app_name + '/processes')
if 'success' in r:
break
time.sleep(0.1)
assert 'success' in r
assert self.get()['status'] == 200
# Check if the only one router, controller,
# and application processes running.
out = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
assert len(self.find_proc(self.PATTERN_ROUTER, unit_pid, out)) == 1
assert len(self.find_proc(self.PATTERN_CONTROLLER, unit_pid, out)) == 1
assert len(self.find_proc(self.app_name, unit_pid, out)) == 1
def test_respawn_router(self, skip_alert, unit_pid, skip_fds_check):
skip_fds_check(router=True)
pid = self.pid_by_name(self.PATTERN_ROUTER, unit_pid)
self.kill_pids(pid)
skip_alert(r'process %s exited on signal 9' % pid)
assert self.wait_for_process(self.PATTERN_ROUTER, unit_pid) is not None
self.smoke_test(unit_pid)
def test_respawn_controller(self, skip_alert, unit_pid, skip_fds_check):
skip_fds_check(controller=True)
pid = self.pid_by_name(self.PATTERN_CONTROLLER, unit_pid)
self.kill_pids(pid)
skip_alert(r'process %s exited on signal 9' % pid)
assert (
self.wait_for_process(self.PATTERN_CONTROLLER, unit_pid)
is not None
)
assert self.get()['status'] == 200
self.smoke_test(unit_pid)
def test_respawn_application(self, skip_alert, unit_pid):
pid = self.pid_by_name(self.app_name, unit_pid)
self.kill_pids(pid)
skip_alert(r'process %s exited on signal 9' % pid)
assert self.wait_for_process(self.app_name, unit_pid) is not None
self.smoke_test(unit_pid)
| apache-2.0 |
cynapse/cynin | src/ubify.coretypes/ubify/coretypes/content/newsitem.py | 5 | 2433 | ###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy of the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Public License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at support@cynapse.com with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# legal@cynapse.com
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
from Products.Archetypes.atapi import *
from Products.ATContentTypes.content.newsitem \
import ATNewsItem as BaseClass
from Products.ATContentTypes.content.newsitem \
import ATNewsItemSchema as DefaultSchema
from Products.ATContentTypes.content.base import registerATCT
from ubify.coretypes.config import PROJECTNAME
schema = DefaultSchema.copy()
class ATNewsItem(BaseClass):
__doc__ = BaseClass.__doc__ + "(customizable version)"
portal_type = BaseClass.portal_type
archetype_name = BaseClass.archetype_name
schema = schema
registerATCT(ATNewsItem, PROJECTNAME)
| gpl-3.0 |
rodo/ansible-tsung | ec2tool.py | 1 | 5117 | #!/usr/bin/env python
import boto.ec2
import jinja2
import sys
import json
import yaml
class Tsing(boto.ec2.instance.Instance):
def shortname(self):
return self.private_dns_name.split('.')[0]
@property
def private_short_name(self):
return self.private_dns_name.split('.')[0]
def get_specs(instance, region, data):
"""
region (string) : the region name
data (dict)
"""
datas = get_data_region(region, data)
instance_spec = get_instance(instance, datas)
return instance_spec
def get_instance(instance, data):
"""
instance (string)
data (dict)
"""
result = None
for inst in data['instanceTypes']:
for size in inst['sizes']:
if instance == size['size']:
result = size
break
return result
def get_data_region(region, data):
"""
region (string) : the region name
data (dict)
"""
config = data['config']
ec2_regions = {"us-east-1": "us-east",
"us-west-1": "us-west",
"us-west-2": "us-west-2",
"eu-west-1": "eu-ireland",
"ap-southeast-1": "apac-sin",
"ap-southeast-2": "apac-syd",
"ap-northeast-1": "apac-tokyo",
"sa-east-1": "sa-east-1"
}
for reg in config['regions']:
if reg['region'] == ec2_regions[region]:
return reg
def write_nodes(controller, injectors, data):
"""
controller (dict)
injectors (dict)
"""
hosts = open("playbooks/roles/tsung/vars/nodes.yml", 'w')
hosts.write("---\n")
contr_str = "controller: { private_dns_name: '%s', private_ip_address: '%s', private_short_name: '%s' }\n\n"
hosts.write(contr_str % (controller.private_dns_name,
controller.private_ip_address,
controller.private_short_name))
hosts.write("injectors:\n")
for injec in injectors:
print injec.__dict__
specs = get_specs(injec.instance_type, region, data)
injector = {"private_dns_name": str(injec.private_dns_name),
"private_ip_address": str(injec.private_ip_address),
"private_short_name": str(injec.private_short_name),
"instance_type": str(injec.instance_type),
"cpu": int(specs['vCPU'])}
hosts.write(" - {}".format(yaml.dump(injector, encoding='utf-8')))
hosts.close()
def instance_weights(injectors, region, data):
"""
Define instances weights
"""
assw = {}
weights = []
for injec in injectors:
specs = get_specs(injec['instance_type'], region, data)
weights.append(float(specs['memoryGiB']))
minweight = min(weights)
for injec in injectors:
specs = get_specs(injec['instance_type'], region, data)
iid = injec['id']
assw[iid] = int(round(float(specs['memoryGiB']) / minweight))
return assw
def parse_instances(instances):
"""
Wait for instance in running state
"""
controller = None
injectors = []
for instance in instances:
inst = instance.instances[0]
inst.__class__ = Tsing
if inst.state == 'running':
tags = inst.tags
if 'tsung_role' in tags:
if tags['tsung_role'] == 'controller':
controller = inst
else:
injectors.append(inst)
else:
injectors.append(inst)
return controller, injectors
def cloud_connect(region):
"""
Connect on cloud
"""
print "connect on {}...".format(region)
conn = boto.ec2.connect_to_region(region)
return conn
def write_ini(injectors, controller):
"""
Write ansible .ini file
"""
templateLoader = jinja2.FileSystemLoader(searchpath=".")
templateEnv = jinja2.Environment(loader=templateLoader)
templateVars = {"injectors": injectors,
"controller": controller}
#
# Configure the cluster
#
template = templateEnv.get_template("cluster.j2")
clients = open("cluster.ini", 'w')
clients.write(template.render(templateVars))
clients.close()
if __name__ == "__main__":
try:
region = sys.argv[1]
except:
print "usage : ec2tool.py REGI0N"
sys.exit(1)
conn = cloud_connect(region)
print "connected"
instances = conn.get_all_instances()
controller, injectors = parse_instances(instances)
print "found\n {} injectors".format(len(injectors))
if controller is None:
print "ERROR didn't found any controller"
sys.exit(1)
else:
print " controller : tsung@{} ".format(controller.ip_address)
#
#
with open("linux-od.json") as data_file:
data = json.load(data_file)
#
#
write_nodes(controller, injectors, data)
write_ini(injectors, controller)
#
print 'ansible-playbook -i cluster.ini -u ubuntu playbooks/tsung.yml'
| gpl-3.0 |
marcuschia/ShaniXBMCWork | plugin.video.ZemTV-shani/pyaes.py | 33 | 17162 | """Simple AES cipher implementation in pure Python following PEP-272 API
Homepage: https://bitbucket.org/intgr/pyaes/
The goal of this module is to be as fast as reasonable in Python while still
being Pythonic and readable/understandable. It is licensed under the permissive
MIT license.
Hopefully the code is readable and commented enough that it can serve as an
introduction to the AES cipher for Python coders. In fact, it should go along
well with the Stick Figure Guide to AES:
http://www.moserware.com/2009/09/stick-figure-guide-to-advanced.html
Contrary to intuition, this implementation numbers the 4x4 matrices from top to
bottom for efficiency reasons::
0 4 8 12
1 5 9 13
2 6 10 14
3 7 11 15
Effectively it's the transposition of what you'd expect. This actually makes
the code simpler -- except the ShiftRows step, but hopefully the explanation
there clears it up.
"""
####
# Copyright (c) 2010 Marti Raudsepp <marti@juffo.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
####
from array import array
# Globals mandated by PEP 272:
# http://www.python.org/dev/peps/pep-0272/
MODE_ECB = 1
MODE_CBC = 2
#MODE_CTR = 6
block_size = 16
key_size = None
def new(key, mode, IV=None):
if mode == MODE_ECB:
return ECBMode(AES(key))
elif mode == MODE_CBC:
if IV is None:
raise ValueError, "CBC mode needs an IV value!"
return CBCMode(AES(key), IV)
else:
raise NotImplementedError
#### AES cipher implementation
class AES(object):
block_size = 16
def __init__(self, key):
self.setkey(key)
def setkey(self, key):
"""Sets the key and performs key expansion."""
self.key = key
self.key_size = len(key)
if self.key_size == 16:
self.rounds = 10
elif self.key_size == 24:
self.rounds = 12
elif self.key_size == 32:
self.rounds = 14
else:
raise ValueError, "Key length must be 16, 24 or 32 bytes"
self.expand_key()
def expand_key(self):
"""Performs AES key expansion on self.key and stores in self.exkey"""
# The key schedule specifies how parts of the key are fed into the
# cipher's round functions. "Key expansion" means performing this
# schedule in advance. Almost all implementations do this.
#
# Here's a description of AES key schedule:
# http://en.wikipedia.org/wiki/Rijndael_key_schedule
# The expanded key starts with the actual key itself
exkey = array('B', self.key)
# extra key expansion steps
if self.key_size == 16:
extra_cnt = 0
elif self.key_size == 24:
extra_cnt = 2
else:
extra_cnt = 3
# 4-byte temporary variable for key expansion
word = exkey[-4:]
# Each expansion cycle uses 'i' once for Rcon table lookup
for i in xrange(1, 11):
#### key schedule core:
# left-rotate by 1 byte
word = word[1:4] + word[0:1]
# apply S-box to all bytes
for j in xrange(4):
word[j] = aes_sbox[word[j]]
# apply the Rcon table to the leftmost byte
word[0] = word[0] ^ aes_Rcon[i]
#### end key schedule core
for z in xrange(4):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
# Last key expansion cycle always finishes here
if len(exkey) >= (self.rounds+1) * self.block_size:
break
# Special substitution step for 256-bit key
if self.key_size == 32:
for j in xrange(4):
# mix in bytes from the last subkey XORed with S-box of
# current word bytes
word[j] = aes_sbox[word[j]] ^ exkey[-self.key_size + j]
exkey.extend(word)
# Twice for 192-bit key, thrice for 256-bit key
for z in xrange(extra_cnt):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
self.exkey = exkey
def add_round_key(self, block, round):
"""AddRoundKey step in AES. This is where the key is mixed into plaintext"""
offset = round * 16
exkey = self.exkey
for i in xrange(16):
block[i] ^= exkey[offset + i]
#print 'AddRoundKey:', block
def sub_bytes(self, block, sbox):
"""SubBytes step, apply S-box to all bytes
Depending on whether encrypting or decrypting, a different sbox array
is passed in.
"""
for i in xrange(16):
block[i] = sbox[block[i]]
#print 'SubBytes :', block
def shift_rows(self, b):
"""ShiftRows step. Shifts 2nd row to left by 1, 3rd row by 2, 4th row by 3
Since we're performing this on a transposed matrix, cells are numbered
from top to bottom::
0 4 8 12 -> 0 4 8 12 -- 1st row doesn't change
1 5 9 13 -> 5 9 13 1 -- row shifted to left by 1 (wraps around)
2 6 10 14 -> 10 14 2 6 -- shifted by 2
3 7 11 15 -> 15 3 7 11 -- shifted by 3
"""
b[1], b[5], b[ 9], b[13] = b[ 5], b[ 9], b[13], b[ 1]
b[2], b[6], b[10], b[14] = b[10], b[14], b[ 2], b[ 6]
b[3], b[7], b[11], b[15] = b[15], b[ 3], b[ 7], b[11]
#print 'ShiftRows :', b
def shift_rows_inv(self, b):
"""Similar to shift_rows above, but performed in inverse for decryption."""
b[ 5], b[ 9], b[13], b[ 1] = b[1], b[5], b[ 9], b[13]
b[10], b[14], b[ 2], b[ 6] = b[2], b[6], b[10], b[14]
b[15], b[ 3], b[ 7], b[11] = b[3], b[7], b[11], b[15]
#print 'ShiftRows :', b
def mix_columns(self, block):
"""MixColumns step. Mixes the values in each column"""
# Cache global multiplication tables (see below)
mul_by_2 = gf_mul_by_2
mul_by_3 = gf_mul_by_3
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
#v0, v1, v2, v3 = block[col : col+4]
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
block[col ] = mul_by_2[v0] ^ v3 ^ v2 ^ mul_by_3[v1]
block[col+1] = mul_by_2[v1] ^ v0 ^ v3 ^ mul_by_3[v2]
block[col+2] = mul_by_2[v2] ^ v1 ^ v0 ^ mul_by_3[v3]
block[col+3] = mul_by_2[v3] ^ v2 ^ v1 ^ mul_by_3[v0]
#print 'MixColumns :', block
def mix_columns_inv(self, block):
"""Similar to mix_columns above, but performed in inverse for decryption."""
# Cache global multiplication tables (see below)
mul_9 = gf_mul_by_9
mul_11 = gf_mul_by_11
mul_13 = gf_mul_by_13
mul_14 = gf_mul_by_14
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
#v0, v1, v2, v3 = block[col:col+4]
block[col ] = mul_14[v0] ^ mul_9[v3] ^ mul_13[v2] ^ mul_11[v1]
block[col+1] = mul_14[v1] ^ mul_9[v0] ^ mul_13[v3] ^ mul_11[v2]
block[col+2] = mul_14[v2] ^ mul_9[v1] ^ mul_13[v0] ^ mul_11[v3]
block[col+3] = mul_14[v3] ^ mul_9[v2] ^ mul_13[v1] ^ mul_11[v0]
#print 'MixColumns :', block
def encrypt_block(self, block):
"""Encrypts a single block. This is the main AES function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, 0)
for round in xrange(1, self.rounds):
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
self.mix_columns(block)
self.add_round_key(block, round)
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
# no mix_columns step in the last round
self.add_round_key(block, self.rounds)
def decrypt_block(self, block):
"""Decrypts a single block. This is the main AES decryption function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, self.rounds)
# count rounds down from 15 ... 1
for round in xrange(self.rounds-1, 0, -1):
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, round)
self.mix_columns_inv(block)
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, 0)
# no mix_columns step in the last round
#### ECB mode implementation
class ECBMode(object):
"""Electronic CodeBook (ECB) mode encryption.
Basically this mode applies the cipher function to each block individually;
no feedback is done. NB! This is insecure for almost all purposes
"""
def __init__(self, cipher):
self.cipher = cipher
self.block_size = cipher.block_size
def ecb(self, data, block_func):
"""Perform ECB mode with the given function"""
if len(data) % self.block_size != 0:
raise ValueError, "Plaintext length must be multiple of 16"
block_size = self.block_size
data = array('B', data)
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
block_func(block)
data[offset : offset+block_size] = block
return data.tostring()
def encrypt(self, data):
"""Encrypt data in ECB mode"""
return self.ecb(data, self.cipher.encrypt_block)
def decrypt(self, data):
"""Decrypt data in ECB mode"""
return self.ecb(data, self.cipher.decrypt_block)
#### CBC mode
class CBCMode(object):
"""Cipher Block Chaining (CBC) mode encryption. This mode avoids content leaks.
In CBC encryption, each plaintext block is XORed with the ciphertext block
preceding it; decryption is simply the inverse.
"""
# A better explanation of CBC can be found here:
# http://en.wikipedia.org/wiki/Block_cipher_modes_of_operation#Cipher-block_chaining_.28CBC.29
def __init__(self, cipher, IV):
self.cipher = cipher
self.block_size = cipher.block_size
self.IV = array('B', IV)
def encrypt(self, data):
"""Encrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError, "Plaintext length must be multiple of 16"
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
# Perform CBC chaining
for i in xrange(block_size):
block[i] ^= IV[i]
self.cipher.encrypt_block(block)
data[offset : offset+block_size] = block
IV = block
self.IV = IV
return data.tostring()
def decrypt(self, data):
"""Decrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError, "Ciphertext length must be multiple of 16"
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
ctext = data[offset : offset+block_size]
block = ctext[:]
self.cipher.decrypt_block(block)
# Perform CBC chaining
#for i in xrange(block_size):
# data[offset + i] ^= IV[i]
for i in xrange(block_size):
block[i] ^= IV[i]
data[offset : offset+block_size] = block
IV = ctext
#data[offset : offset+block_size] = block
self.IV = IV
return data.tostring()
####
def galois_multiply(a, b):
"""Galois Field multiplicaiton for AES"""
p = 0
while b:
if b & 1:
p ^= a
a <<= 1
if a & 0x100:
a ^= 0x1b
b >>= 1
return p & 0xff
# Precompute the multiplication tables for encryption
gf_mul_by_2 = array('B', [galois_multiply(x, 2) for x in range(256)])
gf_mul_by_3 = array('B', [galois_multiply(x, 3) for x in range(256)])
# ... for decryption
gf_mul_by_9 = array('B', [galois_multiply(x, 9) for x in range(256)])
gf_mul_by_11 = array('B', [galois_multiply(x, 11) for x in range(256)])
gf_mul_by_13 = array('B', [galois_multiply(x, 13) for x in range(256)])
gf_mul_by_14 = array('B', [galois_multiply(x, 14) for x in range(256)])
####
# The S-box is a 256-element array, that maps a single byte value to another
# byte value. Since it's designed to be reversible, each value occurs only once
# in the S-box
#
# More information: http://en.wikipedia.org/wiki/Rijndael_S-box
aes_sbox = array('B',
'637c777bf26b6fc53001672bfed7ab76'
'ca82c97dfa5947f0add4a2af9ca472c0'
'b7fd9326363ff7cc34a5e5f171d83115'
'04c723c31896059a071280e2eb27b275'
'09832c1a1b6e5aa0523bd6b329e32f84'
'53d100ed20fcb15b6acbbe394a4c58cf'
'd0efaafb434d338545f9027f503c9fa8'
'51a3408f929d38f5bcb6da2110fff3d2'
'cd0c13ec5f974417c4a77e3d645d1973'
'60814fdc222a908846eeb814de5e0bdb'
'e0323a0a4906245cc2d3ac629195e479'
'e7c8376d8dd54ea96c56f4ea657aae08'
'ba78252e1ca6b4c6e8dd741f4bbd8b8a'
'703eb5664803f60e613557b986c11d9e'
'e1f8981169d98e949b1e87e9ce5528df'
'8ca1890dbfe6426841992d0fb054bb16'.decode('hex')
)
# This is the inverse of the above. In other words:
# aes_inv_sbox[aes_sbox[val]] == val
aes_inv_sbox = array('B',
'52096ad53036a538bf40a39e81f3d7fb'
'7ce339829b2fff87348e4344c4dee9cb'
'547b9432a6c2233dee4c950b42fac34e'
'082ea16628d924b2765ba2496d8bd125'
'72f8f66486689816d4a45ccc5d65b692'
'6c704850fdedb9da5e154657a78d9d84'
'90d8ab008cbcd30af7e45805b8b34506'
'd02c1e8fca3f0f02c1afbd0301138a6b'
'3a9111414f67dcea97f2cfcef0b4e673'
'96ac7422e7ad3585e2f937e81c75df6e'
'47f11a711d29c5896fb7620eaa18be1b'
'fc563e4bc6d279209adbc0fe78cd5af4'
'1fdda8338807c731b11210592780ec5f'
'60517fa919b54a0d2de57a9f93c99cef'
'a0e03b4dae2af5b0c8ebbb3c83539961'
'172b047eba77d626e169146355210c7d'.decode('hex')
)
# The Rcon table is used in AES's key schedule (key expansion)
# It's a pre-computed table of exponentation of 2 in AES's finite field
#
# More information: http://en.wikipedia.org/wiki/Rijndael_key_schedule
aes_Rcon = array('B',
'8d01020408102040801b366cd8ab4d9a'
'2f5ebc63c697356ad4b37dfaefc59139'
'72e4d3bd61c29f254a943366cc831d3a'
'74e8cb8d01020408102040801b366cd8'
'ab4d9a2f5ebc63c697356ad4b37dfaef'
'c5913972e4d3bd61c29f254a943366cc'
'831d3a74e8cb8d01020408102040801b'
'366cd8ab4d9a2f5ebc63c697356ad4b3'
'7dfaefc5913972e4d3bd61c29f254a94'
'3366cc831d3a74e8cb8d010204081020'
'40801b366cd8ab4d9a2f5ebc63c69735'
'6ad4b37dfaefc5913972e4d3bd61c29f'
'254a943366cc831d3a74e8cb8d010204'
'08102040801b366cd8ab4d9a2f5ebc63'
'c697356ad4b37dfaefc5913972e4d3bd'
'61c29f254a943366cc831d3a74e8cb'.decode('hex')
) | gpl-2.0 |
bbci/pyff | src/Feedbacks/HexoSpeller/Utils.py | 3 | 3180 | # Utils.py -
# Copyright (C) 2009-2010 Sven Daehne
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Utility methods for the classes involved in the Hexospeller feedback.
"""
from math import sin, cos, pi
def rotate_phi_degrees_clockwise(phi, x_y):
""" Rotates the point given by the tuple x_y for phi degrees clockwise and returns the
resulting coordinates in a tuple. """
phi_rad = degrees_to_radians(phi)
return rotate_phi_radians_clockwise(phi_rad, x_y)
def rotate_phi_radians_clockwise(phi, x_y):
""" Rotates the point given by the tuple x_y for phi radians clockwise and returns the
resulting coordinates in a tuple. """
x, y = x_y
x_new = cos(phi)*x + sin(phi)*y
y_new = -sin(phi)*x + cos(phi)*y
return (x_new, y_new)
def rotate_phi_degrees_counter_clockwise(phi, x_y):
""" Rotates the point given by the tuple x_y for phi degrees counter clockwise and returns the
resulting coordinates in a tuple. """
return rotate_phi_degrees_clockwise(-phi, x_y)
def rotate_phi_radians_counter_clockwise(phi, x_y):
""" Rotates the point given by the tuple x_y for phi radians counter clockwise and returns the
resulting coordinates in a tuple. """
return rotate_phi_radians_clockwise(-phi, x_y)
def degrees_to_radians(phi_degrees):
""" Converts the angle phi given in degrees to radians. """
return (phi_degrees/360.0) * 2.0*pi
def radians_to_degrees(phi_radians):
""" Converts the angle phi given in radians to degrees. """
return (phi_radians/(2.0*pi)) * 360.0
def copy_list(orig_list):
""" Creates a shallow copy of the given list and all its sublists. """
c_list = []
for elem in orig_list:
if type(elem) == type([]):
c_list.append(copy_list(elem))
else:
c_list.append(elem)
return c_list
def array_to_list(array):
li = []
for elem in array:
li.append(elem)
return li
def max_with_idx(iter):
max_value = max(iter)
idx = 0
for elem in iter:
if max_value == elem:
return max_value, idx
idx += 1
return max_value, None
def sort_list_according_to_values(list, values):
sorted_list = []
while len(list) > 0:
# find the position of the max value in values
idx = max_with_idx(values)[1]
sorted_list.append(list.pop(idx))
values.pop(idx)
return sorted_list
| gpl-2.0 |
ktbyers/netmiko | netmiko/cisco/cisco_viptela.py | 1 | 3093 | """Subclass specific to Cisco Viptela."""
from typing import Union, Sequence, TextIO, Any
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class CiscoViptelaSSH(CiscoSSHConnection):
"""Subclass specific to Cisco Viptela."""
def session_preparation(self) -> None:
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r"[>#]")
self.set_base_prompt()
self.disable_paging(command="paginate false")
def check_config_mode(self, check_string: str = ")#", pattern: str = "#") -> bool:
"""Checks if the device is in configuration mode or not."""
return super().check_config_mode(check_string=check_string, pattern=pattern)
def commit(self, confirm: bool = False, confirm_response: str = "") -> str:
cmd = "commit"
return super().save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
)
def config_mode(
self,
config_command: str = "conf terminal",
pattern: str = "",
re_flags: int = 0,
) -> str:
return super().config_mode(
config_command=config_command, pattern=pattern, re_flags=re_flags
)
def send_config_set( # type: ignore
self,
config_commands: Union[str, Sequence[str], TextIO, None] = None,
exit_config_mode: bool = False,
**kwargs: Any,
) -> str:
return super().send_config_set(
config_commands=config_commands, exit_config_mode=exit_config_mode, **kwargs
)
def exit_config_mode(self, exit_config: str = "end", pattern: str = r"#") -> str:
"""
Exit from configuration mode.
Viptela might have the following in the output (if no 'commit()' occurred.
Uncommitted changes found, commit them? [yes/no/CANCEL]
"""
output = ""
if self.check_config_mode():
self.write_channel(self.normalize_cmd(exit_config))
# Make sure you read until you detect the command echo (avoid getting out of sync)
if self.global_cmd_verify is not False:
output += self.read_until_pattern(
pattern=re.escape(exit_config.strip())
)
if not re.search(pattern, output, flags=re.M):
uncommit_pattern = r"Uncommitted changes found"
new_pattern = f"({pattern}|{uncommit_pattern})"
output += self.read_until_pattern(pattern=new_pattern)
# Do not save 'uncommited changes'
if uncommit_pattern in output:
self.write_channel(self.normalize_cmd("no"))
output += self.read_until_pattern(pattern=pattern)
if self.check_config_mode():
raise ValueError("Failed to exit configuration mode")
return output
def save_config(
self, cmd: str = "commit", confirm: bool = False, confirm_response: str = ""
) -> str:
"""Saves Config"""
raise NotImplementedError
| mit |
nemesisdesign/openwisp2 | openwisp_controller/config/controller/views.py | 1 | 14788 | import json
from ipaddress import ip_address
from django.core.exceptions import FieldDoesNotExist, ValidationError
from django.db import transaction
from django.db.models import Q
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectMixin
from swapper import load_model
from .. import settings as app_settings
from ..signals import checksum_requested, config_download_requested, device_registered
from ..utils import (
ControllerResponse,
forbid_unallowed,
get_object_or_404,
invalid_response,
send_device_config,
send_vpn_config,
update_last_ip,
)
Device = load_model('config', 'Device')
OrganizationConfigSettings = load_model('config', 'OrganizationConfigSettings')
Vpn = load_model('config', 'Vpn')
class BaseConfigView(SingleObjectMixin, View):
"""
Base view that implements a ``get_object`` method
Subclassed by all views dealing with existing objects
"""
def get_object(self, *args, **kwargs):
kwargs['config__isnull'] = False
return get_object_or_404(self.model, *args, **kwargs)
class CsrfExtemptMixin(object):
"""
Mixin that makes the view extempt from CSFR protection
"""
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class UpdateLastIpMixin(object):
def update_last_ip(self, device, request):
result = update_last_ip(device, request)
if result:
# avoid that any other device in the
# same org stays with the same management_ip
# This can happen when management interfaces are using DHCP
# and they get a new address which was previously used by another
# device that may now be offline, without this fix, we will end up
# with two devices having the same management_ip, which will
# cause OpenWISP to be confused
self.model.objects.filter(
organization=device.organization, management_ip=device.management_ip
).exclude(pk=device.pk).update(management_ip='')
# in the case of last_ip, we take a different approach,
# because it may be a public IP. If it's a public IP we will
# allow it to be duplicated
if ip_address(device.last_ip).is_private:
Device.objects.filter(
organization=device.organization, last_ip=device.last_ip
).exclude(pk=device.pk).update(last_ip='')
return result
class ActiveOrgMixin(object):
"""
adds check to organization.is_active to ``get_object`` method
"""
def get_object(self, *args, **kwargs):
kwargs['organization__is_active'] = True
return super().get_object(*args, **kwargs)
class DeviceChecksumView(ActiveOrgMixin, UpdateLastIpMixin, BaseConfigView):
"""
returns device's configuration checksum
"""
model = Device
def get(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', device.key)
if bad_request:
return bad_request
self.update_last_ip(device, request)
checksum_requested.send(
sender=device.__class__, instance=device, request=request
)
return ControllerResponse(device.config.checksum, content_type='text/plain')
class DeviceDownloadConfigView(ActiveOrgMixin, BaseConfigView):
"""
returns configuration archive as attachment
"""
model = Device
def get(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', device.key)
if bad_request:
return bad_request
config_download_requested.send(
sender=device.__class__, instance=device, request=request
)
return send_device_config(device.config, request)
class DeviceUpdateInfoView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView):
"""
updates general information about the device
"""
model = Device
UPDATABLE_FIELDS = ['os', 'model', 'system']
def post(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'POST', 'key', device.key)
if bad_request:
return bad_request
# update device information
for attr in self.UPDATABLE_FIELDS:
if attr in request.POST:
setattr(device, attr, request.POST.get(attr))
# validate and save everything or fail otherwise
try:
with transaction.atomic():
device.full_clean()
device.save()
except ValidationError as e:
# dump message_dict as JSON,
# this should make it easy to debug
return ControllerResponse(
json.dumps(e.message_dict, indent=4, sort_keys=True),
content_type='text/plain',
status=400,
)
return ControllerResponse('update-info: success', content_type='text/plain')
class DeviceReportStatusView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView):
"""
updates status of config objects
"""
model = Device
def post(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
config = device.config
# ensure request is well formed and authorized
allowed_status = [choices[0] for choices in config.STATUS]
allowed_status.append('running') # backward compatibility
required_params = [('key', device.key), ('status', allowed_status)]
for key, value in required_params:
bad_response = forbid_unallowed(request, 'POST', key, value)
if bad_response:
return bad_response
status = request.POST.get('status')
# mantain backward compatibility with old agents
# ("running" was changed to "applied")
status = status if status != 'running' else 'applied'
# call set_status_{status} method on Config model
method_name = f'set_status_{status}'
getattr(config, method_name)()
return ControllerResponse(
f'report-result: success\ncurrent-status: {config.status}\n',
content_type='text/plain',
)
class DeviceRegisterView(UpdateLastIpMixin, CsrfExtemptMixin, View):
"""
registers new Config objects
"""
model = Device
org_config_settings_model = OrganizationConfigSettings
UPDATABLE_FIELDS = ['os', 'model', 'system']
def init_object(self, **kwargs):
"""
initializes Config object with incoming POST data
"""
device_model = self.model
config_model = device_model.get_config_model()
options = {}
for attr in kwargs.keys():
# skip attributes that are not model fields
try:
device_model._meta.get_field(attr)
except FieldDoesNotExist:
continue
options[attr] = kwargs.get(attr)
# do not specify key if:
# app_settings.CONSISTENT_REGISTRATION is False
# if key is ``None`` (it would cause exception)
if 'key' in options and (
app_settings.CONSISTENT_REGISTRATION is False or options['key'] is None
):
del options['key']
if 'hardware_id' in options and options['hardware_id'] == "":
options['hardware_id'] = None
config = config_model(device=device_model(**options), backend=kwargs['backend'])
config.organization = self.organization
config.device.organization = self.organization
return config
def get_template_queryset(self, config):
"""
returns Template model queryset
"""
queryset = config.get_template_model().objects.all()
# filter templates of the same organization or shared templates
return queryset.filter(Q(organization=self.organization) | Q(organization=None))
def add_tagged_templates(self, config, request):
"""
adds templates specified in incoming POST tag setting
"""
tags = request.POST.get('tags')
if not tags:
return
# retrieve tags and add them to current config
tags = tags.split()
queryset = self.get_template_queryset(config)
templates = queryset.filter(tags__name__in=tags).only('id').distinct()
for template in templates:
config.templates.add(template)
def invalid(self, request):
"""
ensures request is well formed
"""
allowed_backends = [path for path, name in app_settings.BACKENDS]
required_params = [
('secret', None),
('name', None),
('mac_address', None),
('backend', allowed_backends),
]
# valid required params or forbid
for key, value in required_params:
invalid_response = forbid_unallowed(request, 'POST', key, value)
if invalid_response:
return invalid_response
def forbidden(self, request):
"""
ensures request is authorized:
- secret matches an organization's shared_secret
- the organization has registration_enabled set to True
"""
try:
secret = request.POST.get('secret')
org_settings = self.org_config_settings_model.objects.select_related(
'organization'
).get(shared_secret=secret, organization__is_active=True)
except self.org_config_settings_model.DoesNotExist:
return invalid_response(request, 'error: unrecognized secret', status=403)
if not org_settings.registration_enabled:
return invalid_response(request, 'error: registration disabled', status=403)
# set an organization attribute as a side effect
# this attribute will be used in ``init_object``
self.organization = org_settings.organization
def post(self, request, *args, **kwargs):
"""
POST logic
"""
if not app_settings.REGISTRATION_ENABLED:
return ControllerResponse('error: registration disabled', status=403)
# ensure request is valid
bad_response = self.invalid(request)
if bad_response:
return bad_response
# ensure request is allowed
forbidden = self.forbidden(request)
if forbidden:
return forbidden
# prepare model attributes
key = None
if app_settings.CONSISTENT_REGISTRATION:
key = request.POST.get('key')
# try retrieving existing Device first
# (key is not None only if CONSISTENT_REGISTRATION is enabled)
new = False
try:
device = self.model.objects.get(key=key)
# update hw info
for attr in self.UPDATABLE_FIELDS:
if attr in request.POST:
setattr(device, attr, request.POST.get(attr))
config = device.config
# if get queryset fails, instantiate a new Device and Config
except self.model.DoesNotExist:
if not app_settings.REGISTRATION_SELF_CREATION:
return ControllerResponse(
'Device not found in the system, please create it first.',
status=404,
)
new = True
config = self.init_object(**request.POST.dict())
device = config.device
# if get queryset succedes but device has no related config
# instantiate new Config but reuse existing device
except self.model.config.RelatedObjectDoesNotExist:
config = self.init_object(**request.POST.dict())
config.device = device
# update last_ip field of device
device.last_ip = request.META.get('REMOTE_ADDR')
# validate and save everything or fail otherwise
try:
with transaction.atomic():
device.full_clean()
device.save()
config.full_clean()
config.save()
except ValidationError as e:
# dump message_dict as JSON,
# this should make it easy to debug
return ControllerResponse(
json.dumps(e.message_dict, indent=4, sort_keys=True),
content_type='text/plain',
status=400,
)
# add templates specified in tags
self.add_tagged_templates(config, request)
# emit device registered signal
device_registered.send(sender=device.__class__, instance=device, is_new=new)
# prepare response
s = (
'registration-result: success\n'
'uuid: {id}\n'
'key: {key}\n'
'hostname: {name}\n'
'is-new: {is_new}\n'
)
attributes = device.__dict__.copy()
attributes.update({'id': device.pk.hex, 'key': device.key, 'is_new': int(new)})
return ControllerResponse(
s.format(**attributes), content_type='text/plain', status=201
)
class VpnChecksumView(BaseConfigView):
"""
returns vpn's configuration checksum
"""
model = Vpn
def get(self, request, *args, **kwargs):
vpn = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key)
if bad_request:
return bad_request
checksum_requested.send(sender=vpn.__class__, instance=vpn, request=request)
return ControllerResponse(vpn.checksum, content_type='text/plain')
class VpnDownloadConfigView(BaseConfigView):
"""
returns configuration archive as attachment
"""
model = Vpn
def get(self, request, *args, **kwargs):
vpn = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key)
if bad_request:
return bad_request
config_download_requested.send(
sender=vpn.__class__, instance=vpn, request=request
)
return send_vpn_config(vpn, request)
device_checksum = DeviceChecksumView.as_view()
device_download_config = DeviceDownloadConfigView.as_view()
device_update_info = DeviceUpdateInfoView.as_view()
device_report_status = DeviceReportStatusView.as_view()
device_register = DeviceRegisterView.as_view()
vpn_checksum = VpnChecksumView.as_view()
vpn_download_config = VpnDownloadConfigView.as_view()
| gpl-3.0 |
pinax/pinax-blog | pinax/blog/admin.py | 1 | 3056 | from functools import partial as curry
from django.contrib import admin
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from pinax.images.admin import ImageInline
from pinax.images.models import ImageSet
from .conf import settings
from .forms import AdminPostForm
from .models import Blog, Post, ReviewComment, Section
class PostImageSet(ImageSet):
class Meta:
proxy = True
class ReviewInline(admin.TabularInline):
model = ReviewComment
def make_published(modeladmin, request, queryset):
queryset = queryset.exclude(state=Post.STATE_CHOICES[-1][0], published__isnull=False)
queryset.update(state=Post.STATE_CHOICES[-1][0])
queryset.filter(published__isnull=True).update(published=timezone.now())
make_published.short_description = _("Publish selected posts")
class PostAdmin(admin.ModelAdmin):
list_display = ["title", "state", "section", "published", "show_secret_share_url"]
list_filter = ["section", "state"]
form = AdminPostForm
actions = [make_published]
fields = [
"section",
"title",
"slug",
"author",
"markup",
"teaser",
"content",
"description",
"sharable_url",
"state",
"published",
"image_set" # maybe this https://github.com/anziem/django_reverse_admin
]
readonly_fields = ["sharable_url"]
prepopulated_fields = {"slug": ("title",)}
inlines = [
ReviewInline,
]
def show_secret_share_url(self, obj):
return '<a href="{}">{}</a>'.format(obj.sharable_url, obj.sharable_url)
show_secret_share_url.short_description = _("Share this url")
show_secret_share_url.allow_tags = True
def formfield_for_dbfield(self, db_field, **kwargs):
request = kwargs.get("request")
if db_field.name == "author":
ff = super().formfield_for_dbfield(db_field, **kwargs)
ff.initial = request.user.id
return ff
return super().formfield_for_dbfield(db_field, **kwargs)
def get_form(self, request, obj=None, **kwargs):
kwargs.update({
"formfield_callback": curry(self.formfield_for_dbfield, request=request),
})
return super().get_form(request, obj, **kwargs)
def save_form(self, request, form, change):
# this is done for explicitness that we want form.save to commit
# form.save doesn't take a commit kwarg for this reason
return form.save(Blog.objects.first() if not settings.PINAX_BLOG_SCOPING_MODEL else None)
if settings.PINAX_BLOG_SCOPING_MODEL:
PostAdmin.fields.insert(0, "blog")
PostAdmin.list_filter.append("blog__scoper")
class SectionAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
admin.site.register(Post, PostAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(
PostImageSet,
list_display=["blog_post", "primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
| mit |
ejegg/citydash | server/shared/request.py | 3 | 2187 | from django.http import JsonResponse, HttpResponse
from django.shortcuts import render_to_response
from django.template.loader import render_to_string
import logging
import json
import re
logger = logging.getLogger("logger")
class ErrorResponse(Exception):
def __init__(self, message, data=None, status=401):
super(Exception, self).__init__(self, message)
self.data = data or { "error": message }
self.status = status
def make_response(template=None, error_template="error.html"):
"""
View decorator
Tailor the response to the requested data type, as specified
in the Accept header. Expects the wrapped view to return a
dict. If the request wants JSON, renders the dict as JSON data.
"""
def constructor_fn(view):
def wrapped_view(req, *args, **kwargs):
use_template = template
status = 200
try:
data = view(req, *args, **kwargs)
except ErrorResponse as err:
data = err.data
use_template = error_template
status = err.status
# render error template or return JSON with proper error
# code
jsonp_callback = req.GET.get("callback")
if jsonp_callback:
body = "{callback}({json})".format(callback=jsonp_callback,
json=json.dumps(data))
response = HttpResponse(body, status=status)
response["Content-Type"] = "application/javascript"
return response
accepts = req.META["HTTP_ACCEPT"]
typestring, _ = accepts.split(";", 1)
if not use_template \
or re.search(r"application/json", typestring):
response = JsonResponse(data, status=status)
# TODO: We may (or may not!) want to be more restrictive
# in the future:
response["Access-Control-Allow-Origin"] = "*"
return response
return render_to_response(use_template, data, status=status)
return wrapped_view
return constructor_fn
| mit |
sander76/home-assistant | homeassistant/components/ping/device_tracker.py | 2 | 4963 | """Tracks devices by sending a ICMP echo request (ping)."""
import asyncio
from datetime import timedelta
from functools import partial
import logging
import subprocess
import sys
from icmplib import multiping
import voluptuous as vol
from homeassistant import const, util
from homeassistant.components.device_tracker import PLATFORM_SCHEMA
from homeassistant.components.device_tracker.const import (
CONF_SCAN_INTERVAL,
SCAN_INTERVAL,
SOURCE_TYPE_ROUTER,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.async_ import gather_with_concurrency
from homeassistant.util.process import kill_subprocess
from . import async_get_next_ping_id
from .const import DOMAIN, ICMP_TIMEOUT, PING_ATTEMPTS_COUNT, PING_PRIVS, PING_TIMEOUT
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
CONF_PING_COUNT = "count"
CONCURRENT_PING_LIMIT = 6
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(const.CONF_HOSTS): {cv.slug: cv.string},
vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int,
}
)
class HostSubProcess:
"""Host object with ping detection."""
def __init__(self, ip_address, dev_id, hass, config, privileged):
"""Initialize the Host pinger."""
self.hass = hass
self.ip_address = ip_address
self.dev_id = dev_id
self._count = config[CONF_PING_COUNT]
if sys.platform == "win32":
self._ping_cmd = ["ping", "-n", "1", "-w", "1000", ip_address]
else:
self._ping_cmd = ["ping", "-n", "-q", "-c1", "-W1", ip_address]
def ping(self):
"""Send an ICMP echo request and return True if success."""
pinger = subprocess.Popen(
self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
)
try:
pinger.communicate(timeout=1 + PING_TIMEOUT)
return pinger.returncode == 0
except subprocess.TimeoutExpired:
kill_subprocess(pinger)
return False
except subprocess.CalledProcessError:
return False
def update(self) -> bool:
"""Update device state by sending one or more ping messages."""
failed = 0
while failed < self._count: # check more times if host is unreachable
if self.ping():
return True
failed += 1
_LOGGER.debug("No response from %s failed=%d", self.ip_address, failed)
return False
async def async_setup_scanner(hass, config, async_see, discovery_info=None):
"""Set up the Host objects and return the update function."""
privileged = hass.data[DOMAIN][PING_PRIVS]
ip_to_dev_id = {ip: dev_id for (dev_id, ip) in config[const.CONF_HOSTS].items()}
interval = config.get(
CONF_SCAN_INTERVAL,
timedelta(seconds=len(ip_to_dev_id) * config[CONF_PING_COUNT]) + SCAN_INTERVAL,
)
_LOGGER.debug(
"Started ping tracker with interval=%s on hosts: %s",
interval,
",".join(ip_to_dev_id.keys()),
)
if privileged is None:
hosts = [
HostSubProcess(ip, dev_id, hass, config, privileged)
for (dev_id, ip) in config[const.CONF_HOSTS].items()
]
async def async_update(now):
"""Update all the hosts on every interval time."""
results = await gather_with_concurrency(
CONCURRENT_PING_LIMIT,
*[hass.async_add_executor_job(host.update) for host in hosts],
)
await asyncio.gather(
*[
async_see(dev_id=host.dev_id, source_type=SOURCE_TYPE_ROUTER)
for idx, host in enumerate(hosts)
if results[idx]
]
)
else:
async def async_update(now):
"""Update all the hosts on every interval time."""
responses = await hass.async_add_executor_job(
partial(
multiping,
ip_to_dev_id.keys(),
count=PING_ATTEMPTS_COUNT,
timeout=ICMP_TIMEOUT,
privileged=privileged,
id=async_get_next_ping_id(hass),
)
)
_LOGGER.debug("Multiping responses: %s", responses)
await asyncio.gather(
*[
async_see(dev_id=dev_id, source_type=SOURCE_TYPE_ROUTER)
for idx, dev_id in enumerate(ip_to_dev_id.values())
if responses[idx].is_alive
]
)
async def _async_update_interval(now):
try:
await async_update(now)
finally:
async_track_point_in_utc_time(
hass, _async_update_interval, util.dt.utcnow() + interval
)
await _async_update_interval(None)
return True
| apache-2.0 |
mxjl620/scikit-learn | sklearn/decomposition/tests/test_sparse_pca.py | 160 | 6028 | # Author: Vlad Niculae
# License: BSD 3 clause
import sys
import numpy as np
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import if_safe_multiprocessing_with_blas
from sklearn.decomposition import SparsePCA, MiniBatchSparsePCA
from sklearn.utils import check_random_state
def generate_toy_data(n_components, n_samples, image_size, random_state=None):
n_features = image_size[0] * image_size[1]
rng = check_random_state(random_state)
U = rng.randn(n_samples, n_components)
V = rng.randn(n_components, n_features)
centers = [(3, 3), (6, 7), (8, 1)]
sz = [1, 2, 1]
for k in range(n_components):
img = np.zeros(image_size)
xmin, xmax = centers[k][0] - sz[k], centers[k][0] + sz[k]
ymin, ymax = centers[k][1] - sz[k], centers[k][1] + sz[k]
img[xmin:xmax][:, ymin:ymax] = 1.0
V[k, :] = img.ravel()
# Y is defined by : Y = UV + noise
Y = np.dot(U, V)
Y += 0.1 * rng.randn(Y.shape[0], Y.shape[1]) # Add noise
return Y, U, V
# SparsePCA can be a bit slow. To avoid having test times go up, we
# test different aspects of the code in the same test
def test_correct_shapes():
rng = np.random.RandomState(0)
X = rng.randn(12, 10)
spca = SparsePCA(n_components=8, random_state=rng)
U = spca.fit_transform(X)
assert_equal(spca.components_.shape, (8, 10))
assert_equal(U.shape, (12, 8))
# test overcomplete decomposition
spca = SparsePCA(n_components=13, random_state=rng)
U = spca.fit_transform(X)
assert_equal(spca.components_.shape, (13, 10))
assert_equal(U.shape, (12, 13))
def test_fit_transform():
alpha = 1
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
spca_lars = SparsePCA(n_components=3, method='lars', alpha=alpha,
random_state=0)
spca_lars.fit(Y)
# Test that CD gives similar results
spca_lasso = SparsePCA(n_components=3, method='cd', random_state=0,
alpha=alpha)
spca_lasso.fit(Y)
assert_array_almost_equal(spca_lasso.components_, spca_lars.components_)
@if_safe_multiprocessing_with_blas
def test_fit_transform_parallel():
alpha = 1
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
spca_lars = SparsePCA(n_components=3, method='lars', alpha=alpha,
random_state=0)
spca_lars.fit(Y)
U1 = spca_lars.transform(Y)
# Test multiple CPUs
spca = SparsePCA(n_components=3, n_jobs=2, method='lars', alpha=alpha,
random_state=0).fit(Y)
U2 = spca.transform(Y)
assert_true(not np.all(spca_lars.components_ == 0))
assert_array_almost_equal(U1, U2)
def test_transform_nan():
# Test that SparsePCA won't return NaN when there is 0 feature in all
# samples.
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
Y[:, 0] = 0
estimator = SparsePCA(n_components=8)
assert_false(np.any(np.isnan(estimator.fit_transform(Y))))
def test_fit_transform_tall():
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 65, (8, 8), random_state=rng) # tall array
spca_lars = SparsePCA(n_components=3, method='lars',
random_state=rng)
U1 = spca_lars.fit_transform(Y)
spca_lasso = SparsePCA(n_components=3, method='cd', random_state=rng)
U2 = spca_lasso.fit(Y).transform(Y)
assert_array_almost_equal(U1, U2)
def test_initialization():
rng = np.random.RandomState(0)
U_init = rng.randn(5, 3)
V_init = rng.randn(3, 4)
model = SparsePCA(n_components=3, U_init=U_init, V_init=V_init, max_iter=0,
random_state=rng)
model.fit(rng.randn(5, 4))
assert_array_equal(model.components_, V_init)
def test_mini_batch_correct_shapes():
rng = np.random.RandomState(0)
X = rng.randn(12, 10)
pca = MiniBatchSparsePCA(n_components=8, random_state=rng)
U = pca.fit_transform(X)
assert_equal(pca.components_.shape, (8, 10))
assert_equal(U.shape, (12, 8))
# test overcomplete decomposition
pca = MiniBatchSparsePCA(n_components=13, random_state=rng)
U = pca.fit_transform(X)
assert_equal(pca.components_.shape, (13, 10))
assert_equal(U.shape, (12, 13))
def test_mini_batch_fit_transform():
raise SkipTest("skipping mini_batch_fit_transform.")
alpha = 1
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
spca_lars = MiniBatchSparsePCA(n_components=3, random_state=0,
alpha=alpha).fit(Y)
U1 = spca_lars.transform(Y)
# Test multiple CPUs
if sys.platform == 'win32': # fake parallelism for win32
import sklearn.externals.joblib.parallel as joblib_par
_mp = joblib_par.multiprocessing
joblib_par.multiprocessing = None
try:
U2 = MiniBatchSparsePCA(n_components=3, n_jobs=2, alpha=alpha,
random_state=0).fit(Y).transform(Y)
finally:
joblib_par.multiprocessing = _mp
else: # we can efficiently use parallelism
U2 = MiniBatchSparsePCA(n_components=3, n_jobs=2, alpha=alpha,
random_state=0).fit(Y).transform(Y)
assert_true(not np.all(spca_lars.components_ == 0))
assert_array_almost_equal(U1, U2)
# Test that CD gives similar results
spca_lasso = MiniBatchSparsePCA(n_components=3, method='cd', alpha=alpha,
random_state=0).fit(Y)
assert_array_almost_equal(spca_lasso.components_, spca_lars.components_)
| bsd-3-clause |
roberthodgen/thought-jot | src/utilities.py | 1 | 2732 | """
The MIT License (MIT)
Copyright (c) 2015 Robert Hodgen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from ndb_users import users
import string
import re
import google.net.proto.ProtocolBuffer
from google.appengine.ext import ndb
from google.appengine.api import mail
def permalinkify(string):
""" Return a clean URL-friendly version of `string`. """
clean = string.lower().strip() # lowercase, striped of whitespace
clean = re.sub(r'\s(\s*)?', '-', clean) # Replace spaces with dashes "-"
clean = re.sub(r'[^a-z0-9-]', '', clean) # Strip non-alphanumeric
return clean
def key_for_urlsafe_id(key_id):
""" Try returning an NDB Key for `key_id`. None otherwise. """
key = None
try:
key = ndb.Key(urlsafe=key_id)
except google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError, e:
return key
finally:
return key
def send_project_contributor_email(email_address, user, project):
""" Send `email` an email notifying them they've been added as a contributor
on `project`. """
sender_email_address = users._email_sender()
subject = ''.join([project.name, ' invite'])
with open('resource/email/project_contributor.txt', 'r') as f:
body_text = f.read()
body_text = body_text.format(login='http://thought-jot.appspot.com/login',
from_email=user.email, to_email=email_address,
project_name=project.name)
mail.send_mail(sender_email_address, email_address, subject, body_text)
def str_to_bool(string, allow_none=False):
""" Return a Boolean value for `string`. """
if allow_none and string is None:
return None
if string == 'True' or string == 'true':
return True
else:
return False
| mit |
gregswift/ansible | lib/ansible/plugins/callback/syslog_json.py | 54 | 2725 | # Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import logging
import logging.handlers
import socket
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
logs ansible-playbook and ansible runs to a syslog server in json format
make sure you have in ansible.cfg:
callback_plugins = <path_to_callback_plugins_folder>
and put the plugin in <path_to_callback_plugins_folder>
This plugin makes use of the following environment variables:
SYSLOG_SERVER (optional): defaults to localhost
SYSLOG_PORT (optional): defaults to 514
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'syslog_json'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self.logger = logging.getLogger('ansible logger')
self.logger.setLevel(logging.DEBUG)
self.handler = logging.handlers.SysLogHandler(
address = (os.getenv('SYSLOG_SERVER','localhost'),
os.getenv('SYSLOG_PORT',514)),
facility=logging.handlers.SysLogHandler.LOG_USER
)
self.logger.addHandler(self.handler)
self.hostname = socket.gethostname()
def runner_on_failed(self, host, res, ignore_errors=False):
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def runner_on_ok(self, host, res):
self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def runner_on_skipped(self, host, item=None):
self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname,host, 'skipped'))
def runner_on_unreachable(self, host, res):
self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def runner_on_async_failed(self, host, res):
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def playbook_on_import_for_host(self, host, imported_file):
self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname,host,imported_file))
def playbook_on_not_import_for_host(self, host, missing_file):
self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname,host,missing_file))
| gpl-3.0 |
epuzanov/ZenPacks.community.CIMMon | ZenPacks/community/CIMMon/modeler/plugins/community/cim/SNIANetworkPortMap.py | 1 | 3274 | ################################################################################
#
# This program is part of the CIMMon Zenpack for Zenoss.
# Copyright (C) 2012 Egor Puzanov.
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
################################################################################
__doc__="""SNIANetworkPortMap
SNIANetworkPortMap maps SNIA_NetworkPort class to CIM_NetworkPort class.
$Id: SNIANetworkPortMap.py,v 1.0 2012/01/23 23:50:55 egor Exp $"""
__version__ = '$Revision: 1.0 $'[11:-2]
from ZenPacks.community.CIMMon.modeler.plugins.community.cim.CIMNetworkPortMap \
import CIMNetworkPortMap
class SNIANetworkPortMap(CIMNetworkPortMap):
"""Map SNIA_NetworkPort CIM class to CIM_NetworkPort class"""
def queries(self, device):
connectionString = getattr(device, 'zCIMConnectionString', '')
if not connectionString:
return {}
cs = self.prepareCS(device, connectionString)
return {
"CIM_NetworkPort":
(
"SELECT * FROM CIM_NetworkPort",
None,
cs,
{
"setPath":"__PATH",
"description":"Description",
"mtu":"ActiveMaximumTransmissionUnit",
"interfaceName":"ElementName",
"adminStatus":"EnabledDefault",
"operStatus":"EnabledState",
"type":"LinkTechnology",
"macaddress":"PermanentAddress",
"speed":"Speed",
"_sysname":"SystemName",
}
),
"CIM_IPProtocolEndpoint":
(
"SELECT * FROM CIM_IPProtocolEndpoint",
None,
cs,
{
"_path":"__PATH",
"_ipAddress":"Address",
"_ipSubnet":"SubnetMask",
}
),
"CIM_PortImplementsEndpoint":
(
"SELECT Antecedent,Dependent FROM CIM_PortImplementsEndpoint",
None,
cs,
{
"ant":"Antecedent", # LogicalPort
"dep":"Dependent", # ProtocolEndpoint
}
),
"CIM_SystemComponent":
(
"SELECT GroupComponent,PartComponent FROM CIM_SystemComponent",
None,
cs,
{
"gc":"GroupComponent", # System
"pc":"PartComponent", # SystemComponent
},
),
"CIM_ElementStatisticalData":
(
"SELECT ManagedElement,Stats FROM CIM_ElementStatisticalData",
None,
cs,
{
"me":"ManagedElement",
"stats":"Stats",
},
),
}
| gpl-2.0 |
vicky2135/lucious | oscar/lib/python2.7/site-packages/bs4/diagnose.py | 63 | 6747 | """Diagnostic functions, mainly for use when doing tech support."""
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__license__ = "MIT"
import cProfile
from StringIO import StringIO
from HTMLParser import HTMLParser
import bs4
from bs4 import BeautifulSoup, __version__
from bs4.builder import builder_registry
import os
import pstats
import random
import tempfile
import time
import traceback
import sys
import cProfile
def diagnose(data):
"""Diagnostic suite for isolating common problems."""
print "Diagnostic running on Beautiful Soup %s" % __version__
print "Python version %s" % sys.version
basic_parsers = ["html.parser", "html5lib", "lxml"]
for name in basic_parsers:
for builder in builder_registry.builders:
if name in builder.features:
break
else:
basic_parsers.remove(name)
print (
"I noticed that %s is not installed. Installing it may help." %
name)
if 'lxml' in basic_parsers:
basic_parsers.append(["lxml", "xml"])
try:
from lxml import etree
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
except ImportError, e:
print (
"lxml is not installed or couldn't be imported.")
if 'html5lib' in basic_parsers:
try:
import html5lib
print "Found html5lib version %s" % html5lib.__version__
except ImportError, e:
print (
"html5lib is not installed or couldn't be imported.")
if hasattr(data, 'read'):
data = data.read()
elif os.path.exists(data):
print '"%s" looks like a filename. Reading data from the file.' % data
with open(data) as fp:
data = fp.read()
elif data.startswith("http:") or data.startswith("https:"):
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
return
print
for parser in basic_parsers:
print "Trying to parse your markup with %s" % parser
success = False
try:
soup = BeautifulSoup(data, parser)
success = True
except Exception, e:
print "%s could not parse the markup." % parser
traceback.print_exc()
if success:
print "Here's what %s did with the markup:" % parser
print soup.prettify()
print "-" * 80
def lxml_trace(data, html=True, **kwargs):
"""Print out the lxml events that occur during parsing.
This lets you see how lxml parses a document when no Beautiful
Soup code is running.
"""
from lxml import etree
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
print("%s, %4s, %s" % (event, element.tag, element.text))
class AnnouncingParser(HTMLParser):
"""Announces HTMLParser parse events, without doing anything else."""
def _p(self, s):
print(s)
def handle_starttag(self, name, attrs):
self._p("%s START" % name)
def handle_endtag(self, name):
self._p("%s END" % name)
def handle_data(self, data):
self._p("%s DATA" % data)
def handle_charref(self, name):
self._p("%s CHARREF" % name)
def handle_entityref(self, name):
self._p("%s ENTITYREF" % name)
def handle_comment(self, data):
self._p("%s COMMENT" % data)
def handle_decl(self, data):
self._p("%s DECL" % data)
def unknown_decl(self, data):
self._p("%s UNKNOWN-DECL" % data)
def handle_pi(self, data):
self._p("%s PI" % data)
def htmlparser_trace(data):
"""Print out the HTMLParser events that occur during parsing.
This lets you see how HTMLParser parses a document when no
Beautiful Soup code is running.
"""
parser = AnnouncingParser()
parser.feed(data)
_vowels = "aeiou"
_consonants = "bcdfghjklmnpqrstvwxyz"
def rword(length=5):
"Generate a random word-like string."
s = ''
for i in range(length):
if i % 2 == 0:
t = _consonants
else:
t = _vowels
s += random.choice(t)
return s
def rsentence(length=4):
"Generate a random sentence-like string."
return " ".join(rword(random.randint(4,9)) for i in range(length))
def rdoc(num_elements=1000):
"""Randomly generate an invalid HTML document."""
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
elements = []
for i in range(num_elements):
choice = random.randint(0,3)
if choice == 0:
# New tag.
tag_name = random.choice(tag_names)
elements.append("<%s>" % tag_name)
elif choice == 1:
elements.append(rsentence(random.randint(1,4)))
elif choice == 2:
# Close a tag.
tag_name = random.choice(tag_names)
elements.append("</%s>" % tag_name)
return "<html>" + "\n".join(elements) + "</html>"
def benchmark_parsers(num_elements=100000):
"""Very basic head-to-head performance benchmark."""
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
data = rdoc(num_elements)
print "Generated a large invalid HTML document (%d bytes)." % len(data)
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
success = False
try:
a = time.time()
soup = BeautifulSoup(data, parser)
b = time.time()
success = True
except Exception, e:
print "%s could not parse the markup." % parser
traceback.print_exc()
if success:
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
from lxml import etree
a = time.time()
etree.HTML(data)
b = time.time()
print "Raw lxml parsed the markup in %.2fs." % (b-a)
import html5lib
parser = html5lib.HTMLParser()
a = time.time()
parser.parse(data)
b = time.time()
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
def profile(num_elements=100000, parser="lxml"):
filehandle = tempfile.NamedTemporaryFile()
filename = filehandle.name
data = rdoc(num_elements)
vars = dict(bs4=bs4, data=data, parser=parser)
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
stats = pstats.Stats(filename)
# stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats('_html5lib|bs4', 50)
if __name__ == '__main__':
diagnose(sys.stdin.read())
| bsd-3-clause |
mixxorz/wagtail | wagtail/core/migrations/0026_group_collection_permission.py | 24 | 1240 | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('wagtailcore', '0025_collection_initial_data'),
]
operations = [
migrations.CreateModel(
name='GroupCollectionPermission',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('collection', models.ForeignKey(related_name='group_permissions', verbose_name='collection', to='wagtailcore.Collection', on_delete=models.CASCADE)),
('group', models.ForeignKey(related_name='collection_permissions', verbose_name='group', to='auth.Group', on_delete=models.CASCADE)),
('permission', models.ForeignKey(to='auth.Permission', verbose_name='permission', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'group collection permission',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='groupcollectionpermission',
unique_together=set([('group', 'collection', 'permission')]),
),
]
| bsd-3-clause |
manjunaths/tensorflow | tensorflow/python/util/keyword_args.py | 190 | 1657 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keyword args functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from tensorflow.python.util import decorator_utils
def keyword_args_only(func):
"""Decorator for marking specific function accepting keyword args only.
This decorator raises a `ValueError` if the input `func` is called with any
non-keyword args. This prevents the caller from providing the arguments in
wrong order.
Args:
func: The function or method needed to be decorated.
Returns:
Decorated function or method.
Raises:
ValueError: If `func` is not callable.
"""
decorator_utils.validate_callable(func, "keyword_args_only")
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Keyword args only wrapper."""
if args:
raise ValueError(
"Must use keyword args to call {}.".format(func.__name__))
return func(**kwargs)
return new_func
| apache-2.0 |
Belgabor/django | django/db/models/manager.py | 306 | 7872 | from django.utils import copycompat as copy
from django.conf import settings
from django.db import router
from django.db.models.query import QuerySet, EmptyQuerySet, insert_query, RawQuerySet
from django.db.models import signals
from django.db.models.fields import FieldDoesNotExist
def ensure_default_manager(sender, **kwargs):
"""
Ensures that a Model subclass contains a default manager and sets the
_default_manager attribute on the class. Also sets up the _base_manager
points to a plain Manager instance (which could be the same as
_default_manager if it's not a subclass of Manager).
"""
cls = sender
if cls._meta.abstract:
return
if not getattr(cls, '_default_manager', None):
# Create the default manager, if needed.
try:
cls._meta.get_field('objects')
raise ValueError("Model %s must specify a custom Manager, because it has a field named 'objects'" % cls.__name__)
except FieldDoesNotExist:
pass
cls.add_to_class('objects', Manager())
cls._base_manager = cls.objects
elif not getattr(cls, '_base_manager', None):
default_mgr = cls._default_manager.__class__
if (default_mgr is Manager or
getattr(default_mgr, "use_for_related_fields", False)):
cls._base_manager = cls._default_manager
else:
# Default manager isn't a plain Manager class, or a suitable
# replacement, so we walk up the base class hierarchy until we hit
# something appropriate.
for base_class in default_mgr.mro()[1:]:
if (base_class is Manager or
getattr(base_class, "use_for_related_fields", False)):
cls.add_to_class('_base_manager', base_class())
return
raise AssertionError("Should never get here. Please report a bug, including your model and model manager setup.")
signals.class_prepared.connect(ensure_default_manager)
class Manager(object):
# Tracks each time a Manager instance is created. Used to retain order.
creation_counter = 0
def __init__(self):
super(Manager, self).__init__()
self._set_creation_counter()
self.model = None
self._inherited = False
self._db = None
def contribute_to_class(self, model, name):
# TODO: Use weakref because of possible memory leak / circular reference.
self.model = model
setattr(model, name, ManagerDescriptor(self))
if not getattr(model, '_default_manager', None) or self.creation_counter < model._default_manager.creation_counter:
model._default_manager = self
if model._meta.abstract or (self._inherited and not self.model._meta.proxy):
model._meta.abstract_managers.append((self.creation_counter, name,
self))
else:
model._meta.concrete_managers.append((self.creation_counter, name,
self))
def _set_creation_counter(self):
"""
Sets the creation counter value for this instance and increments the
class-level copy.
"""
self.creation_counter = Manager.creation_counter
Manager.creation_counter += 1
def _copy_to_model(self, model):
"""
Makes a copy of the manager and assigns it to 'model', which should be
a child of the existing model (used when inheriting a manager from an
abstract base class).
"""
assert issubclass(model, self.model)
mgr = copy.copy(self)
mgr._set_creation_counter()
mgr.model = model
mgr._inherited = True
return mgr
def db_manager(self, using):
obj = copy.copy(self)
obj._db = using
return obj
@property
def db(self):
return self._db or router.db_for_read(self.model)
#######################
# PROXIES TO QUERYSET #
#######################
def get_empty_query_set(self):
return EmptyQuerySet(self.model, using=self._db)
def get_query_set(self):
"""Returns a new QuerySet object. Subclasses can override this method
to easily customize the behavior of the Manager.
"""
return QuerySet(self.model, using=self._db)
def none(self):
return self.get_empty_query_set()
def all(self):
return self.get_query_set()
def count(self):
return self.get_query_set().count()
def dates(self, *args, **kwargs):
return self.get_query_set().dates(*args, **kwargs)
def distinct(self, *args, **kwargs):
return self.get_query_set().distinct(*args, **kwargs)
def extra(self, *args, **kwargs):
return self.get_query_set().extra(*args, **kwargs)
def get(self, *args, **kwargs):
return self.get_query_set().get(*args, **kwargs)
def get_or_create(self, **kwargs):
return self.get_query_set().get_or_create(**kwargs)
def create(self, **kwargs):
return self.get_query_set().create(**kwargs)
def filter(self, *args, **kwargs):
return self.get_query_set().filter(*args, **kwargs)
def aggregate(self, *args, **kwargs):
return self.get_query_set().aggregate(*args, **kwargs)
def annotate(self, *args, **kwargs):
return self.get_query_set().annotate(*args, **kwargs)
def complex_filter(self, *args, **kwargs):
return self.get_query_set().complex_filter(*args, **kwargs)
def exclude(self, *args, **kwargs):
return self.get_query_set().exclude(*args, **kwargs)
def in_bulk(self, *args, **kwargs):
return self.get_query_set().in_bulk(*args, **kwargs)
def iterator(self, *args, **kwargs):
return self.get_query_set().iterator(*args, **kwargs)
def latest(self, *args, **kwargs):
return self.get_query_set().latest(*args, **kwargs)
def order_by(self, *args, **kwargs):
return self.get_query_set().order_by(*args, **kwargs)
def select_related(self, *args, **kwargs):
return self.get_query_set().select_related(*args, **kwargs)
def values(self, *args, **kwargs):
return self.get_query_set().values(*args, **kwargs)
def values_list(self, *args, **kwargs):
return self.get_query_set().values_list(*args, **kwargs)
def update(self, *args, **kwargs):
return self.get_query_set().update(*args, **kwargs)
def reverse(self, *args, **kwargs):
return self.get_query_set().reverse(*args, **kwargs)
def defer(self, *args, **kwargs):
return self.get_query_set().defer(*args, **kwargs)
def only(self, *args, **kwargs):
return self.get_query_set().only(*args, **kwargs)
def using(self, *args, **kwargs):
return self.get_query_set().using(*args, **kwargs)
def exists(self, *args, **kwargs):
return self.get_query_set().exists(*args, **kwargs)
def _insert(self, values, **kwargs):
return insert_query(self.model, values, **kwargs)
def _update(self, values, **kwargs):
return self.get_query_set()._update(values, **kwargs)
def raw(self, raw_query, params=None, *args, **kwargs):
return RawQuerySet(raw_query=raw_query, model=self.model, params=params, using=self._db, *args, **kwargs)
class ManagerDescriptor(object):
# This class ensures managers aren't accessible via model instances.
# For example, Poll.objects works, but poll_obj.objects raises AttributeError.
def __init__(self, manager):
self.manager = manager
def __get__(self, instance, type=None):
if instance != None:
raise AttributeError("Manager isn't accessible via %s instances" % type.__name__)
return self.manager
class EmptyManager(Manager):
def get_query_set(self):
return self.get_empty_query_set()
| bsd-3-clause |
PaulKinlan/cli-caniuse | site/app/scripts/bower_components/jsrepl-build/extern/python/reloop-closured/lib/python2.7/genericpath.py | 246 | 3015 | """
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except os.error:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path ono systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, ''
| apache-2.0 |
sysalexis/kbengine | kbe/src/lib/python/Lib/xml/etree/ElementPath.py | 87 | 9791 | #
# ElementTree
# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
# 2007-09-10 fl new selection engine
# 2007-09-12 fl fixed parent selector
# 2007-09-13 fl added iterfind; changed findall to return a list
# 2007-11-30 fl added namespaces support
# 2009-10-30 fl added child element value filter
#
# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2009 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
"\.\.|"
"\(\)|"
"[/.*:\[\]\(\)@=])|"
"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
for token in xpath_tokenizer_re.findall(pattern):
tag = token[1]
if tag and tag[0] != "{" and ":" in tag:
try:
prefix, uri = tag.split(":", 1)
if not namespaces:
raise KeyError
yield token[0], "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
else:
yield token
def get_parent_map(context):
parent_map = context.parent_map
if parent_map is None:
context.parent_map = parent_map = {}
for p in context.root.iter():
for e in p:
parent_map[e] = p
return parent_map
def prepare_child(next, token):
tag = token[1]
def select(context, result):
for elem in result:
for e in elem:
if e.tag == tag:
yield e
return select
def prepare_star(next, token):
def select(context, result):
for elem in result:
yield from elem
return select
def prepare_self(next, token):
def select(context, result):
yield from result
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(context, result):
for elem in result:
for e in elem.iter(tag):
if e is not elem:
yield e
return select
def prepare_parent(next, token):
def select(context, result):
# FIXME: raise error if .. is applied at toplevel?
parent_map = get_parent_map(context)
result_map = {}
for elem in result:
if elem in parent_map:
parent = parent_map[elem]
if parent not in result_map:
result_map[parent] = None
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = []
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature.append(token[0] or "-")
predicate.append(token[1])
signature = "".join(signature)
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(context, result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(context, result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match("\-?\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(context, result):
for elem in result:
if elem.find(tag) is not None:
yield elem
return select
if signature == "-='" and not re.match("\-?\d+$", predicate[0]):
# [tag='value']
tag = predicate[0]
value = predicate[-1]
def select(context, result):
for elem in result:
for e in elem.findall(tag):
if "".join(e.itertext()) == value:
yield elem
break
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
# [index]
index = int(predicate[0]) - 1
if index < 0:
raise SyntaxError("XPath position >= 1 expected")
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
if index > -2:
raise SyntaxError("XPath offset from last() must be negative")
else:
index = -1
def select(context, result):
parent_map = get_parent_map(context)
for elem in result:
try:
parent = parent_map[elem]
# FIXME: what if the selector is "*" ?
elems = list(parent.findall(elem.tag))
if elems[index] is elem:
yield elem
except (IndexError, KeyError):
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
_cache = {}
class _SelectorContext:
parent_map = None
def __init__(self, root):
self.root = root
# --------------------------------------------------------------------
##
# Generate all matching objects.
def iterfind(elem, path, namespaces=None):
# compile selector pattern
cache_key = (path, None if namespaces is None
else tuple(sorted(namespaces.items())))
if path[-1:] == "/":
path = path + "*" # implicit all (FIXME: keep this?)
try:
selector = _cache[cache_key]
except KeyError:
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
next = iter(xpath_tokenizer(path, namespaces)).__next__
token = next()
selector = []
while 1:
try:
selector.append(ops[token[0]](next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = next()
if token[0] == "/":
token = next()
except StopIteration:
break
_cache[cache_key] = selector
# execute selector pattern
result = [elem]
context = _SelectorContext(elem)
for select in selector:
result = select(context, result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
try:
return next(iterfind(elem, path, namespaces))
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
try:
elem = next(iterfind(elem, path, namespaces))
return elem.text or ""
except StopIteration:
return default
| lgpl-3.0 |
j-carl/ansible | lib/ansible/plugins/inventory/auto.py | 53 | 2374 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: auto
plugin_type: inventory
author:
- Matt Davis <@nitzmahone>
short_description: Loads and executes an inventory plugin specified in a YAML config
description:
- By whitelisting C(auto) inventory plugin, any YAML inventory config file with a
C(plugin) key at its root will automatically cause the named plugin to be loaded and executed with that
config. This effectively provides automatic whitelisting of all installed/accessible inventory plugins.
- To disable this behavior, remove C(auto) from the C(INVENTORY_ENABLED) config element.
'''
EXAMPLES = '''
# This plugin is not intended for direct use; it is a fallback mechanism for automatic whitelisting of
# all installed inventory plugins.
'''
from ansible.errors import AnsibleParserError
from ansible.plugins.inventory import BaseInventoryPlugin
from ansible.plugins.loader import inventory_loader
class InventoryModule(BaseInventoryPlugin):
NAME = 'auto'
def verify_file(self, path):
if not path.endswith('.yml') and not path.endswith('.yaml'):
return False
return super(InventoryModule, self).verify_file(path)
def parse(self, inventory, loader, path, cache=True):
config_data = loader.load_from_file(path, cache=False)
try:
plugin_name = config_data.get('plugin', None)
except AttributeError:
plugin_name = None
if not plugin_name:
raise AnsibleParserError("no root 'plugin' key found, '{0}' is not a valid YAML inventory plugin config file".format(path))
plugin = inventory_loader.get(plugin_name)
if not plugin:
raise AnsibleParserError("inventory config '{0}' specifies unknown plugin '{1}'".format(path, plugin_name))
if not plugin.verify_file(path):
raise AnsibleParserError("inventory config '{0}' could not be verified by plugin '{1}'".format(path, plugin_name))
plugin.parse(inventory, loader, path, cache=cache)
try:
plugin.update_cache_if_changed()
except AttributeError:
pass
| gpl-3.0 |
quokkaproject/quokka-classes | pipelines.py | 1 | 2318 | # coding: utf-8
from flask import request
from quokka.modules.cart.pipelines.base import CartPipeline
from quokka.utils import get_current_user
from .models import CourseSubscription, Subscriber
class SetSubscriber(CartPipeline):
def process(self):
name = request.form.get("name")
email = request.form.get("email")
area_code = request.form.get("area_code")
phone = request.form.get("phone")
document = request.form.get("document")
address = request.form.get("address")
confirm = request.form.get("classes_setsubscriber_confirm")
if not confirm:
return self.render('classes/setsubscriber.html', cart=self.cart)
formdata = dict(name=name, email=email, area_code=area_code,
phone=phone, document=document, address=address)
subscriptions = CourseSubscription.objects.filter(
cart=self.cart
)
user = get_current_user()
for subscription in subscriptions:
subscription.subscriber = self.get_subscriber(user, **formdata)
subscription.save()
self.cart.sender_data = {
"name": name or user.name,
"email": email or user.email,
"area_code": area_code,
"phone": phone.replace('-', '').replace('(', '').replace(')', ''),
}
self.cart.addlog("SetSubscriber Pipeline: defined sender data")
return self.go()
def get_subscriber(self, user, **kwargs):
if not user:
return None
try:
sub = Subscriber.objects.get(user=user)
sub.name = kwargs.get('name')
sub.email = kwargs.get('email')
sub.document = kwargs.get('document')
sub.address = kwargs.get('address')
sub.phone = u"%(area_code)s%(phone)s" % kwargs
sub.save()
return sub
except:
self.cart.addlog("Creating a new subscriber", save=False)
return Subscriber.objects.create(
name=kwargs.get('name'),
email=kwargs.get('email'),
user=user,
document=kwargs.get('document'),
address=kwargs.get('address'),
phone=u"%(area_code)s%(phone)s" % kwargs
)
| mit |
jshiv/turntable | test/lib/python2.7/site-packages/numpy/distutils/fcompiler/absoft.py | 229 | 5612 |
# http://www.absoft.com/literature/osxuserguide.pdf
# http://www.absoft.com/documentation.html
# Notes:
# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py
# generated extension modules (works for f2py v2.45.241_1936 and up)
from __future__ import division, absolute_import, print_function
import os
from numpy.distutils.cpuinfo import cpu
from numpy.distutils.fcompiler import FCompiler, dummy_fortran_file
from numpy.distutils.misc_util import cyg2win32
compilers = ['AbsoftFCompiler']
class AbsoftFCompiler(FCompiler):
compiler_type = 'absoft'
description = 'Absoft Corp Fortran Compiler'
#version_pattern = r'FORTRAN 77 Compiler (?P<version>[^\s*,]*).*?Absoft Corp'
version_pattern = r'(f90:.*?(Absoft Pro FORTRAN Version|FORTRAN 77 Compiler|Absoft Fortran Compiler Version|Copyright Absoft Corporation.*?Version))'+\
r' (?P<version>[^\s*,]*)(.*?Absoft Corp|)'
# on windows: f90 -V -c dummy.f
# f90: Copyright Absoft Corporation 1994-1998 mV2; Cray Research, Inc. 1994-1996 CF90 (2.x.x.x f36t87) Version 2.3 Wed Apr 19, 2006 13:05:16
# samt5735(8)$ f90 -V -c dummy.f
# f90: Copyright Absoft Corporation 1994-2002; Absoft Pro FORTRAN Version 8.0
# Note that fink installs g77 as f77, so need to use f90 for detection.
executables = {
'version_cmd' : None, # set by update_executables
'compiler_f77' : ["f77"],
'compiler_fix' : ["f90"],
'compiler_f90' : ["f90"],
'linker_so' : ["<F90>"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
if os.name=='nt':
library_switch = '/out:' #No space after /out:!
module_dir_switch = None
module_include_switch = '-p'
def update_executables(self):
f = cyg2win32(dummy_fortran_file())
self.executables['version_cmd'] = ['<F90>', '-V', '-c',
f+'.f', '-o', f+'.o']
def get_flags_linker_so(self):
if os.name=='nt':
opt = ['/dll']
# The "-K shared" switches are being left in for pre-9.0 versions
# of Absoft though I don't think versions earlier than 9 can
# actually be used to build shared libraries. In fact, version
# 8 of Absoft doesn't recognize "-K shared" and will fail.
elif self.get_version() >= '9.0':
opt = ['-shared']
else:
opt = ["-K", "shared"]
return opt
def library_dir_option(self, dir):
if os.name=='nt':
return ['-link', '/PATH:"%s"' % (dir)]
return "-L" + dir
def library_option(self, lib):
if os.name=='nt':
return '%s.lib' % (lib)
return "-l" + lib
def get_library_dirs(self):
opt = FCompiler.get_library_dirs(self)
d = os.environ.get('ABSOFT')
if d:
if self.get_version() >= '10.0':
# use shared libraries, the static libraries were not compiled -fPIC
prefix = 'sh'
else:
prefix = ''
if cpu.is_64bit():
suffix = '64'
else:
suffix = ''
opt.append(os.path.join(d, '%slib%s' % (prefix, suffix)))
return opt
def get_libraries(self):
opt = FCompiler.get_libraries(self)
if self.get_version() >= '11.0':
opt.extend(['af90math', 'afio', 'af77math', 'amisc'])
elif self.get_version() >= '10.0':
opt.extend(['af90math', 'afio', 'af77math', 'U77'])
elif self.get_version() >= '8.0':
opt.extend(['f90math', 'fio', 'f77math', 'U77'])
else:
opt.extend(['fio', 'f90math', 'fmath', 'U77'])
if os.name =='nt':
opt.append('COMDLG32')
return opt
def get_flags(self):
opt = FCompiler.get_flags(self)
if os.name != 'nt':
opt.extend(['-s'])
if self.get_version():
if self.get_version()>='8.2':
opt.append('-fpic')
return opt
def get_flags_f77(self):
opt = FCompiler.get_flags_f77(self)
opt.extend(['-N22', '-N90', '-N110'])
v = self.get_version()
if os.name == 'nt':
if v and v>='8.0':
opt.extend(['-f', '-N15'])
else:
opt.append('-f')
if v:
if v<='4.6':
opt.append('-B108')
else:
# Though -N15 is undocumented, it works with
# Absoft 8.0 on Linux
opt.append('-N15')
return opt
def get_flags_f90(self):
opt = FCompiler.get_flags_f90(self)
opt.extend(["-YCFRL=1", "-YCOM_NAMES=LCS", "-YCOM_PFX", "-YEXT_PFX",
"-YCOM_SFX=_", "-YEXT_SFX=_", "-YEXT_NAMES=LCS"])
if self.get_version():
if self.get_version()>'4.6':
opt.extend(["-YDEALLOC=ALL"])
return opt
def get_flags_fix(self):
opt = FCompiler.get_flags_fix(self)
opt.extend(["-YCFRL=1", "-YCOM_NAMES=LCS", "-YCOM_PFX", "-YEXT_PFX",
"-YCOM_SFX=_", "-YEXT_SFX=_", "-YEXT_NAMES=LCS"])
opt.extend(["-f", "fixed"])
return opt
def get_flags_opt(self):
opt = ['-O']
return opt
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils.fcompiler import new_fcompiler
compiler = new_fcompiler(compiler='absoft')
compiler.customize()
print(compiler.get_version())
| mit |
scality/manila | manila/tests/api/v1/test_security_service.py | 3 | 16794 | # Copyright 2012 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from six.moves.urllib import parse
import webob
from manila.api.v1 import security_service
from manila.common import constants
from manila import db
from manila import exception
from manila import test
from manila.tests.api import fakes
class ShareApiTest(test.TestCase):
"""Share Api Test."""
def setUp(self):
super(ShareApiTest, self).setUp()
self.controller = security_service.SecurityServiceController()
self.maxDiff = None
self.ss_active_directory = {
"created_at": "fake-time",
"updated_at": "fake-time-2",
"id": 1,
"name": "fake-name",
"description": "Fake Security Service Desc",
"type": constants.SECURITY_SERVICES_ALLOWED_TYPES[0],
"dns_ip": "1.1.1.1",
"server": "fake-server",
"domain": "fake-domain",
"user": "fake-user",
"password": "fake-password",
"status": constants.STATUS_NEW,
"project_id": "fake",
}
self.ss_ldap = {
"created_at": "fake-time",
"updated_at": "fake-time-2",
"id": 2,
"name": "ss-ldap",
"description": "Fake Security Service Desc",
"type": constants.SECURITY_SERVICES_ALLOWED_TYPES[1],
"dns_ip": "2.2.2.2",
"server": "test-server",
"domain": "test-domain",
"user": "test-user",
"password": "test-password",
"status": "active",
"project_id": "fake",
}
self.valid_search_opts = {
'user': 'fake-user',
'server': 'fake-server',
'dns_ip': '1.1.1.1',
'domain': 'fake-domain',
'type': constants.SECURITY_SERVICES_ALLOWED_TYPES[0],
}
self.check_policy_patcher = mock.patch(
'manila.api.v1.security_service.policy.check_policy')
self.check_policy_patcher.start()
self.addCleanup(self._stop_started_patcher, self.check_policy_patcher)
self.security_service_list_expected_resp = {
'security_services': [{
'id': self.ss_active_directory['id'],
'name': self.ss_active_directory['name'],
'type': self.ss_active_directory['type'],
'status': self.ss_active_directory['status']
}, ]
}
def _stop_started_patcher(self, patcher):
if hasattr(patcher, 'is_local'):
patcher.stop()
def test_security_service_show(self):
db.security_service_get = mock.Mock(
return_value=self.ss_active_directory)
req = fakes.HTTPRequest.blank('/security-services/1')
res_dict = self.controller.show(req, '1')
expected = self.ss_active_directory.copy()
expected.update()
self.assertEqual({'security_service': self.ss_active_directory},
res_dict)
def test_security_service_show_not_found(self):
db.security_service_get = mock.Mock(side_effect=exception.NotFound)
req = fakes.HTTPRequest.blank('/shares/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
req, '1')
def test_security_service_create(self):
sec_service = self.ss_active_directory.copy()
create_stub = mock.Mock(
return_value=sec_service)
self.mock_object(db, 'security_service_create', create_stub)
req = fakes.HTTPRequest.blank('/security-services')
res_dict = self.controller.create(
req, {"security_service": sec_service})
expected = self.ss_active_directory.copy()
self.assertEqual({'security_service': expected}, res_dict)
def test_security_service_create_invalid_types(self):
sec_service = self.ss_active_directory.copy()
sec_service['type'] = 'invalid'
req = fakes.HTTPRequest.blank('/security-services')
self.assertRaises(exception.InvalidInput, self.controller.create, req,
{"security_service": sec_service})
def test_create_security_service_no_body(self):
body = {}
req = fakes.HTTPRequest.blank('/security-services')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create,
req,
body)
def test_security_service_delete(self):
db.security_service_delete = mock.Mock()
db.security_service_get = mock.Mock()
db.share_network_get_all_by_security_service = mock.Mock(
return_value=[])
req = fakes.HTTPRequest.blank('/security_services/1')
resp = self.controller.delete(req, 1)
db.security_service_delete.assert_called_once_with(
req.environ['manila.context'], 1)
self.assertEqual(202, resp.status_int)
def test_security_service_delete_not_found(self):
db.security_service_get = mock.Mock(side_effect=exception.NotFound)
req = fakes.HTTPRequest.blank('/security_services/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete,
req,
1)
def test_security_service_delete_has_share_networks(self):
db.security_service_get = mock.Mock()
db.share_network_get_all_by_security_service = mock.Mock(
return_value=[{'share_network': 'fake_share_network'}])
req = fakes.HTTPRequest.blank('/security_services/1')
self.assertRaises(webob.exc.HTTPForbidden, self.controller.delete,
req, 1)
def test_security_service_update_name(self):
new = self.ss_active_directory.copy()
updated = self.ss_active_directory.copy()
updated['name'] = 'new'
self.mock_object(security_service.policy, 'check_policy')
db.security_service_get = mock.Mock(return_value=new)
db.security_service_update = mock.Mock(return_value=updated)
db.share_network_get_all_by_security_service = mock.Mock(
return_value=[{
'id': 'fake_id',
'share_servers': 'fake_share_server'
}])
body = {"security_service": {"name": "new"}}
req = fakes.HTTPRequest.blank('/security_service/1')
res_dict = self.controller.update(req, 1, body)['security_service']
self.assertEqual(updated['name'], res_dict['name'])
db.share_network_get_all_by_security_service.assert_called_once_with(
req.environ['manila.context'], 1)
self.assertEqual(2, security_service.policy.check_policy.call_count)
security_service.policy.check_policy.assert_has_calls([
mock.call(req.environ['manila.context'],
security_service.RESOURCE_NAME, 'update', new)
])
def test_security_service_update_description(self):
new = self.ss_active_directory.copy()
updated = self.ss_active_directory.copy()
updated['description'] = 'new'
self.mock_object(security_service.policy, 'check_policy')
db.security_service_get = mock.Mock(return_value=new)
db.security_service_update = mock.Mock(return_value=updated)
db.share_network_get_all_by_security_service = mock.Mock(
return_value=[{
'id': 'fake_id',
'share_servers': 'fake_share_server'
}])
body = {"security_service": {"description": "new"}}
req = fakes.HTTPRequest.blank('/security_service/1')
res_dict = self.controller.update(req, 1, body)['security_service']
self.assertEqual(updated['description'], res_dict['description'])
db.share_network_get_all_by_security_service.assert_called_once_with(
req.environ['manila.context'], 1)
self.assertEqual(2, security_service.policy.check_policy.call_count)
security_service.policy.check_policy.assert_has_calls([
mock.call(req.environ['manila.context'],
security_service.RESOURCE_NAME, 'update', new)
])
@mock.patch.object(db, 'security_service_get', mock.Mock())
@mock.patch.object(db, 'share_network_get_all_by_security_service',
mock.Mock())
def test_security_service_update_invalid_keys_sh_server_exists(self):
self.mock_object(security_service.policy, 'check_policy')
db.share_network_get_all_by_security_service.return_value = [
{'id': 'fake_id', 'share_servers': 'fake_share_servers'},
]
db.security_service_get.return_value = self.ss_active_directory.copy()
body = {'security_service': {'user_id': 'new_user'}}
req = fakes.HTTPRequest.blank('/security_services/1')
self.assertRaises(webob.exc.HTTPForbidden, self.controller.update,
req, 1, body)
db.security_service_get.assert_called_once_with(
req.environ['manila.context'], 1)
db.share_network_get_all_by_security_service.assert_called_once_with(
req.environ['manila.context'], 1)
self.assertEqual(1, security_service.policy.check_policy.call_count)
security_service.policy.check_policy.assert_has_calls([
mock.call(req.environ['manila.context'],
security_service.RESOURCE_NAME, 'update',
db.security_service_get.return_value)
])
@mock.patch.object(db, 'security_service_get', mock.Mock())
@mock.patch.object(db, 'security_service_update', mock.Mock())
@mock.patch.object(db, 'share_network_get_all_by_security_service',
mock.Mock())
def test_security_service_update_valid_keys_sh_server_exists(self):
self.mock_object(security_service.policy, 'check_policy')
db.share_network_get_all_by_security_service.return_value = [
{'id': 'fake_id', 'share_servers': 'fake_share_servers'},
]
old = self.ss_active_directory.copy()
updated = self.ss_active_directory.copy()
updated['name'] = 'new name'
updated['description'] = 'new description'
db.security_service_get.return_value = old
db.security_service_update.return_value = updated
body = {
'security_service': {
'description': 'new description',
'name': 'new name',
},
}
req = fakes.HTTPRequest.blank('/security_services/1')
res_dict = self.controller.update(req, 1, body)['security_service']
self.assertEqual(updated['description'], res_dict['description'])
self.assertEqual(updated['name'], res_dict['name'])
db.security_service_get.assert_called_once_with(
req.environ['manila.context'], 1)
db.share_network_get_all_by_security_service.assert_called_once_with(
req.environ['manila.context'], 1)
db.security_service_update.assert_called_once_with(
req.environ['manila.context'], 1, body['security_service'])
self.assertEqual(2, security_service.policy.check_policy.call_count)
security_service.policy.check_policy.assert_has_calls([
mock.call(req.environ['manila.context'],
security_service.RESOURCE_NAME, 'update', old)
])
def test_security_service_list(self):
db.security_service_get_all_by_project = mock.Mock(
return_value=[self.ss_active_directory.copy()])
req = fakes.HTTPRequest.blank('/security_services')
res_dict = self.controller.index(req)
self.assertEqual(self.security_service_list_expected_resp, res_dict)
@mock.patch.object(db, 'share_network_get', mock.Mock())
def test_security_service_list_filter_by_sn(self):
sn = {
'id': 'fake_sn_id',
'security_services': [self.ss_active_directory, ],
}
db.share_network_get.return_value = sn
req = fakes.HTTPRequest.blank(
'/security-services?share_network_id=fake_sn_id')
res_dict = self.controller.index(req)
self.assertEqual(self.security_service_list_expected_resp, res_dict)
db.share_network_get.assert_called_once_with(
req.environ['manila.context'],
sn['id'])
@mock.patch.object(db, 'security_service_get_all', mock.Mock())
def test_security_services_list_all_tenants_admin_context(self):
self.check_policy_patcher.stop()
db.security_service_get_all.return_value = [
self.ss_active_directory,
self.ss_ldap,
]
req = fakes.HTTPRequest.blank(
'/security-services?all_tenants=1&name=fake-name',
use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual(self.security_service_list_expected_resp, res_dict)
db.security_service_get_all.assert_called_once_with(
req.environ['manila.context'])
@mock.patch.object(db, 'security_service_get_all', mock.Mock())
def test_security_services_list_all_tenants_non_admin_context(self):
self.check_policy_patcher.stop()
db.security_service_get_all.return_value = [
self.ss_active_directory,
self.ss_ldap,
]
req = fakes.HTTPRequest.blank(
'/security-services?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized, self.controller.index,
req)
self.assertFalse(db.security_service_get_all.called)
@mock.patch.object(db, 'security_service_get_all_by_project', mock.Mock())
def test_security_services_list_admin_context_invalid_opts(self):
db.security_service_get_all_by_project.return_value = [
self.ss_active_directory,
self.ss_ldap,
]
req = fakes.HTTPRequest.blank(
'/security-services?fake_opt=fake_value',
use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual({'security_services': []}, res_dict)
db.security_service_get_all_by_project.assert_called_once_with(
req.environ['manila.context'],
req.environ['manila.context'].project_id)
@mock.patch.object(db, 'security_service_get_all_by_project', mock.Mock())
def test_security_service_list_all_filter_opts_separately(self):
db.security_service_get_all_by_project.return_value = [
self.ss_active_directory,
self.ss_ldap,
]
for opt, val in self.valid_search_opts.items():
for use_admin_context in [True, False]:
req = fakes.HTTPRequest.blank(
'/security-services?' + opt + '=' + val,
use_admin_context=use_admin_context)
res_dict = self.controller.index(req)
self.assertEqual(self.security_service_list_expected_resp,
res_dict)
db.security_service_get_all_by_project.assert_called_with(
req.environ['manila.context'],
req.environ['manila.context'].project_id)
@mock.patch.object(db, 'security_service_get_all_by_project', mock.Mock())
def test_security_service_list_all_filter_opts(self):
db.security_service_get_all_by_project.return_value = [
self.ss_active_directory,
self.ss_ldap,
]
query_string = '/security-services?' + parse.urlencode(sorted(
[(k, v) for (k, v) in list(self.valid_search_opts.items())]))
for use_admin_context in [True, False]:
req = fakes.HTTPRequest.blank(query_string,
use_admin_context=use_admin_context)
res_dict = self.controller.index(req)
self.assertEqual(self.security_service_list_expected_resp,
res_dict)
db.security_service_get_all_by_project.assert_called_with(
req.environ['manila.context'],
req.environ['manila.context'].project_id)
| apache-2.0 |
helldorado/ansible | lib/ansible/modules/remote_management/ucs/ucs_uuid_pool.py | 64 | 7272 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: ucs_uuid_pool
short_description: Configures server UUID pools on Cisco UCS Manager
description:
- Configures server UUID pools and UUID blocks on Cisco UCS Manager.
- Examples can be used with the L(UCS Platform Emulator,https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify UUID pool is present and will create if needed.
- If C(absent), will verify UUID pool is absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the UUID pool.
- This name can be between 1 and 32 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the UUID pool is created.
required: yes
description:
description:
- "The user-defined description of the UUID pool."
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
prefix:
description:
- UUID prefix used for the range of server UUIDs.
- "If no value is provided, the system derived prefix will be used (equivalent to selecting 'derived' option in UI)."
- "If the user provides a value, the user provided prefix will be used (equivalent to selecting 'other' option in UI)."
- A user provided value should be in the format XXXXXXXX-XXXX-XXXX.
order:
description:
- The Assignment Order field.
- "This can be one of the following:"
- "default - Cisco UCS Manager selects a random identity from the pool."
- "sequential - Cisco UCS Manager selects the lowest available identity from the pool."
choices: [default, sequential]
default: default
first_uuid:
description:
- The first UUID in the block of UUIDs.
- This is the From field in the UCS Manager UUID Blocks menu.
last_uuid:
description:
- The last UUID in the block of UUIDs.
- This is the To field in the UCS Manager Add UUID Blocks menu.
org_dn:
description:
- The distinguished name (dn) of the organization where the resource is assigned.
default: org-root
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.7'
'''
EXAMPLES = r'''
- name: Configure UUID address pool
ucs_uuid_pool:
hostname: 172.16.143.150
username: admin
password: password
name: UUID-Pool
order: sequential
first_uuid: 0000-000000000001
last_uuid: 0000-000000000078
- name: Remove UUID address pool
ucs_uuid_pool:
hostname: 172.16.143.150
username: admin
password: password
name: UUID-Pool
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str', required=True),
description=dict(type='str', aliases=['descr'], default=''),
order=dict(type='str', default='default', choices=['default', 'sequential']),
prefix=dict(type='str', default=''),
first_uuid=dict(type='str'),
last_uuid=dict(type='str'),
state=dict(default='present', choices=['present', 'absent'], type='str'),
)
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
)
# UCSModule verifies ucsmsdk is present and exits on failure. Imports are below ucs object creation.
ucs = UCSModule(module)
err = False
from ucsmsdk.mometa.uuidpool.UuidpoolPool import UuidpoolPool
from ucsmsdk.mometa.uuidpool.UuidpoolBlock import UuidpoolBlock
ucs.result['changed'] = False
try:
mo_exists = False
props_match = False
# dn is <org_dn>/uuid-pool-<name>
dn = module.params['org_dn'] + '/uuid-pool-' + module.params['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
if module.params['state'] == 'absent':
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
ucs.result['changed'] = True
else:
if mo_exists:
# check top-level mo props
kwargs = dict(assignment_order=module.params['order'])
kwargs['descr'] = module.params['description']
if module.params['prefix']:
kwargs['prefix'] = module.params['prefix']
if mo.check_prop_match(**kwargs):
# top-level props match, check next level mo/props
if module.params['last_uuid'] and module.params['first_uuid']:
# uuid address block specified, check properties
block_dn = dn + '/block-from-' + module.params['first_uuid'].upper() + '-to-' + module.params['last_uuid'].upper()
mo_1 = ucs.login_handle.query_dn(block_dn)
if mo_1:
props_match = True
else:
# no UUID address block specified, but top-level props matched
props_match = True
if not props_match:
if not module.check_mode:
# create if mo does not already exist
if not module.params['prefix']:
module.params['prefix'] = 'derived'
mo = UuidpoolPool(
parent_mo_or_dn=module.params['org_dn'],
name=module.params['name'],
descr=module.params['description'],
assignment_order=module.params['order'],
prefix=module.params['prefix']
)
if module.params['last_uuid'] and module.params['first_uuid']:
mo_1 = UuidpoolBlock(
parent_mo_or_dn=mo,
to=module.params['last_uuid'],
r_from=module.params['first_uuid'],
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
ucs.result['changed'] = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
ta2-1/pootle | tests/commands/find_duplicate_emails.py | 8 | 1602 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from django.core.management import call_command
@pytest.mark.cmd
@pytest.mark.django_db
def test_find_duplicate_emails_nodups(capfd, no_extra_users):
"""No duplicates found.
Standard users shouldn't flag any error.
"""
call_command('find_duplicate_emails')
out, err = capfd.readouterr()
assert "There are no accounts with duplicate emails" in out
@pytest.mark.cmd
@pytest.mark.django_db
def test_find_duplicate_emails_noemails(capfd, member, member2):
"""User have no email set."""
call_command('find_duplicate_emails')
out, err = capfd.readouterr()
assert "The following users have no email set" in out
assert "member " in out
assert "member2" in out
@pytest.mark.cmd
@pytest.mark.django_db
def test_find_duplicate_emails_withdups(capfd, member_with_email,
member2_with_email):
"""Find duplicate emails for removal where we have dups.
Standard users shouldn't flag any error.
"""
member2_with_email.email = member_with_email.email
member2_with_email.save()
call_command('find_duplicate_emails')
out, err = capfd.readouterr()
assert "The following users have the email: member_with_email@this.test" in out
assert "member_with_email" in out
assert "member2_with_email" in out
| gpl-3.0 |
florianuhlemann/p2pool_cypherfunk | p2pool/util/forest.py | 281 | 13557 | '''
forest data structure
'''
import itertools
from p2pool.util import skiplist, variable
class TrackerSkipList(skiplist.SkipList):
def __init__(self, tracker):
skiplist.SkipList.__init__(self)
self.tracker = tracker
self.tracker.removed.watch_weakref(self, lambda self, item: self.forget_item(item.hash))
def previous(self, element):
return self.tracker._delta_type.from_element(self.tracker.items[element]).tail
class DistanceSkipList(TrackerSkipList):
def get_delta(self, element):
return element, 1, self.previous(element)
def combine_deltas(self, (from_hash1, dist1, to_hash1), (from_hash2, dist2, to_hash2)):
if to_hash1 != from_hash2:
raise AssertionError()
return from_hash1, dist1 + dist2, to_hash2
def initial_solution(self, start, (n,)):
return 0, start
def apply_delta(self, (dist1, to_hash1), (from_hash2, dist2, to_hash2), (n,)):
if to_hash1 != from_hash2:
raise AssertionError()
return dist1 + dist2, to_hash2
def judge(self, (dist, hash), (n,)):
if dist > n:
return 1
elif dist == n:
return 0
else:
return -1
def finalize(self, (dist, hash), (n,)):
assert dist == n
return hash
def get_attributedelta_type(attrs): # attrs: {name: func}
class ProtoAttributeDelta(object):
__slots__ = ['head', 'tail'] + attrs.keys()
@classmethod
def get_none(cls, element_id):
return cls(element_id, element_id, **dict((k, 0) for k in attrs))
@classmethod
def from_element(cls, item):
return cls(item.hash, item.previous_hash, **dict((k, v(item)) for k, v in attrs.iteritems()))
@staticmethod
def get_head(item):
return item.hash
@staticmethod
def get_tail(item):
return item.previous_hash
def __init__(self, head, tail, **kwargs):
self.head, self.tail = head, tail
for k, v in kwargs.iteritems():
setattr(self, k, v)
def __add__(self, other):
assert self.tail == other.head
return self.__class__(self.head, other.tail, **dict((k, getattr(self, k) + getattr(other, k)) for k in attrs))
def __sub__(self, other):
if self.head == other.head:
return self.__class__(other.tail, self.tail, **dict((k, getattr(self, k) - getattr(other, k)) for k in attrs))
elif self.tail == other.tail:
return self.__class__(self.head, other.head, **dict((k, getattr(self, k) - getattr(other, k)) for k in attrs))
else:
raise AssertionError()
def __repr__(self):
return '%s(%r, %r%s)' % (self.__class__, self.head, self.tail, ''.join(', %s=%r' % (k, getattr(self, k)) for k in attrs))
ProtoAttributeDelta.attrs = attrs
return ProtoAttributeDelta
AttributeDelta = get_attributedelta_type(dict(
height=lambda item: 1,
))
class TrackerView(object):
def __init__(self, tracker, delta_type):
self._tracker = tracker
self._delta_type = delta_type
self._deltas = {} # item_hash -> delta, ref
self._reverse_deltas = {} # ref -> set of item_hashes
self._ref_generator = itertools.count()
self._delta_refs = {} # ref -> delta
self._reverse_delta_refs = {} # delta.tail -> ref
self._tracker.remove_special.watch_weakref(self, lambda self, item: self._handle_remove_special(item))
self._tracker.remove_special2.watch_weakref(self, lambda self, item: self._handle_remove_special2(item))
self._tracker.removed.watch_weakref(self, lambda self, item: self._handle_removed(item))
def _handle_remove_special(self, item):
delta = self._delta_type.from_element(item)
if delta.tail not in self._reverse_delta_refs:
return
# move delta refs referencing children down to this, so they can be moved up in one step
for x in list(self._reverse_deltas.get(self._reverse_delta_refs.get(delta.head, object()), set())):
self.get_last(x)
assert delta.head not in self._reverse_delta_refs, list(self._reverse_deltas.get(self._reverse_delta_refs.get(delta.head, object()), set()))
if delta.tail not in self._reverse_delta_refs:
return
# move ref pointing to this up
ref = self._reverse_delta_refs[delta.tail]
cur_delta = self._delta_refs[ref]
assert cur_delta.tail == delta.tail
self._delta_refs[ref] = cur_delta - delta
assert self._delta_refs[ref].tail == delta.head
del self._reverse_delta_refs[delta.tail]
self._reverse_delta_refs[delta.head] = ref
def _handle_remove_special2(self, item):
delta = self._delta_type.from_element(item)
if delta.tail not in self._reverse_delta_refs:
return
ref = self._reverse_delta_refs.pop(delta.tail)
del self._delta_refs[ref]
for x in self._reverse_deltas.pop(ref):
del self._deltas[x]
def _handle_removed(self, item):
delta = self._delta_type.from_element(item)
# delete delta entry and ref if it is empty
if delta.head in self._deltas:
delta1, ref = self._deltas.pop(delta.head)
self._reverse_deltas[ref].remove(delta.head)
if not self._reverse_deltas[ref]:
del self._reverse_deltas[ref]
delta2 = self._delta_refs.pop(ref)
del self._reverse_delta_refs[delta2.tail]
def get_height(self, item_hash):
return self.get_delta_to_last(item_hash).height
def get_work(self, item_hash):
return self.get_delta_to_last(item_hash).work
def get_last(self, item_hash):
return self.get_delta_to_last(item_hash).tail
def get_height_and_last(self, item_hash):
delta = self.get_delta_to_last(item_hash)
return delta.height, delta.tail
def _get_delta(self, item_hash):
if item_hash in self._deltas:
delta1, ref = self._deltas[item_hash]
delta2 = self._delta_refs[ref]
res = delta1 + delta2
else:
res = self._delta_type.from_element(self._tracker.items[item_hash])
assert res.head == item_hash
return res
def _set_delta(self, item_hash, delta):
other_item_hash = delta.tail
if other_item_hash not in self._reverse_delta_refs:
ref = self._ref_generator.next()
assert ref not in self._delta_refs
self._delta_refs[ref] = self._delta_type.get_none(other_item_hash)
self._reverse_delta_refs[other_item_hash] = ref
del ref
ref = self._reverse_delta_refs[other_item_hash]
ref_delta = self._delta_refs[ref]
assert ref_delta.tail == other_item_hash
if item_hash in self._deltas:
prev_ref = self._deltas[item_hash][1]
self._reverse_deltas[prev_ref].remove(item_hash)
if not self._reverse_deltas[prev_ref] and prev_ref != ref:
self._reverse_deltas.pop(prev_ref)
x = self._delta_refs.pop(prev_ref)
self._reverse_delta_refs.pop(x.tail)
self._deltas[item_hash] = delta - ref_delta, ref
self._reverse_deltas.setdefault(ref, set()).add(item_hash)
def get_delta_to_last(self, item_hash):
assert isinstance(item_hash, (int, long, type(None)))
delta = self._delta_type.get_none(item_hash)
updates = []
while delta.tail in self._tracker.items:
updates.append((delta.tail, delta))
this_delta = self._get_delta(delta.tail)
delta += this_delta
for update_hash, delta_then in updates:
self._set_delta(update_hash, delta - delta_then)
return delta
def get_delta(self, item, ancestor):
assert self._tracker.is_child_of(ancestor, item)
return self.get_delta_to_last(item) - self.get_delta_to_last(ancestor)
class Tracker(object):
def __init__(self, items=[], delta_type=AttributeDelta):
self.items = {} # hash -> item
self.reverse = {} # delta.tail -> set of item_hashes
self.heads = {} # head hash -> tail_hash
self.tails = {} # tail hash -> set of head hashes
self.added = variable.Event()
self.remove_special = variable.Event()
self.remove_special2 = variable.Event()
self.removed = variable.Event()
self.get_nth_parent_hash = DistanceSkipList(self)
self._delta_type = delta_type
self._default_view = TrackerView(self, delta_type)
for item in items:
self.add(item)
def __getattr__(self, name):
attr = getattr(self._default_view, name)
setattr(self, name, attr)
return attr
def add(self, item):
assert not isinstance(item, (int, long, type(None)))
delta = self._delta_type.from_element(item)
if delta.head in self.items:
raise ValueError('item already present')
if delta.head in self.tails:
heads = self.tails.pop(delta.head)
else:
heads = set([delta.head])
if delta.tail in self.heads:
tail = self.heads.pop(delta.tail)
else:
tail = self.get_last(delta.tail)
self.items[delta.head] = item
self.reverse.setdefault(delta.tail, set()).add(delta.head)
self.tails.setdefault(tail, set()).update(heads)
if delta.tail in self.tails[tail]:
self.tails[tail].remove(delta.tail)
for head in heads:
self.heads[head] = tail
self.added.happened(item)
def remove(self, item_hash):
assert isinstance(item_hash, (int, long, type(None)))
if item_hash not in self.items:
raise KeyError()
item = self.items[item_hash]
del item_hash
delta = self._delta_type.from_element(item)
children = self.reverse.get(delta.head, set())
if delta.head in self.heads and delta.tail in self.tails:
tail = self.heads.pop(delta.head)
self.tails[tail].remove(delta.head)
if not self.tails[delta.tail]:
self.tails.pop(delta.tail)
elif delta.head in self.heads:
tail = self.heads.pop(delta.head)
self.tails[tail].remove(delta.head)
if self.reverse[delta.tail] != set([delta.head]):
pass # has sibling
else:
self.tails[tail].add(delta.tail)
self.heads[delta.tail] = tail
elif delta.tail in self.tails and len(self.reverse[delta.tail]) <= 1:
heads = self.tails.pop(delta.tail)
for head in heads:
self.heads[head] = delta.head
self.tails[delta.head] = set(heads)
self.remove_special.happened(item)
elif delta.tail in self.tails and len(self.reverse[delta.tail]) > 1:
heads = [x for x in self.tails[delta.tail] if self.is_child_of(delta.head, x)]
self.tails[delta.tail] -= set(heads)
if not self.tails[delta.tail]:
self.tails.pop(delta.tail)
for head in heads:
self.heads[head] = delta.head
assert delta.head not in self.tails
self.tails[delta.head] = set(heads)
self.remove_special2.happened(item)
else:
raise NotImplementedError()
self.items.pop(delta.head)
self.reverse[delta.tail].remove(delta.head)
if not self.reverse[delta.tail]:
self.reverse.pop(delta.tail)
self.removed.happened(item)
def get_chain(self, start_hash, length):
assert length <= self.get_height(start_hash)
for i in xrange(length):
item = self.items[start_hash]
yield item
start_hash = self._delta_type.get_tail(item)
def is_child_of(self, item_hash, possible_child_hash):
height, last = self.get_height_and_last(item_hash)
child_height, child_last = self.get_height_and_last(possible_child_hash)
if child_last != last:
return None # not connected, so can't be determined
height_up = child_height - height
return height_up >= 0 and self.get_nth_parent_hash(possible_child_hash, height_up) == item_hash
class SubsetTracker(Tracker):
def __init__(self, subset_of, **kwargs):
Tracker.__init__(self, **kwargs)
self.get_nth_parent_hash = subset_of.get_nth_parent_hash # overwrites Tracker.__init__'s
self._subset_of = subset_of
def add(self, item):
if self._subset_of is not None:
assert self._delta_type.get_head(item) in self._subset_of.items
Tracker.add(self, item)
def remove(self, item_hash):
if self._subset_of is not None:
assert item_hash in self._subset_of.items
Tracker.remove(self, item_hash)
| gpl-3.0 |
insiderr/insiderr-app | ios-patches/basemodules/twisted/internet/utils.py | 31 | 8217 | # -*- test-case-name: twisted.test.test_iutils -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utility methods.
"""
from __future__ import division, absolute_import
import sys, warnings
from functools import wraps
from twisted.internet import protocol, defer
from twisted.python import failure
from twisted.python.compat import _PY3, reraise
if not _PY3:
try:
import cStringIO as StringIO
except ImportError:
import StringIO
def _callProtocolWithDeferred(protocol, executable, args, env, path, reactor=None):
if reactor is None:
from twisted.internet import reactor
d = defer.Deferred()
p = protocol(d)
reactor.spawnProcess(p, executable, (executable,)+tuple(args), env, path)
return d
class _UnexpectedErrorOutput(IOError):
"""
Standard error data was received where it was not expected. This is a
subclass of L{IOError} to preserve backward compatibility with the previous
error behavior of L{getProcessOutput}.
@ivar processEnded: A L{Deferred} which will fire when the process which
produced the data on stderr has ended (exited and all file descriptors
closed).
"""
def __init__(self, text, processEnded):
IOError.__init__(self, "got stderr: %r" % (text,))
self.processEnded = processEnded
class _BackRelay(protocol.ProcessProtocol):
"""
Trivial protocol for communicating with a process and turning its output
into the result of a L{Deferred}.
@ivar deferred: A L{Deferred} which will be called back with all of stdout
and, if C{errortoo} is true, all of stderr as well (mixed together in
one string). If C{errortoo} is false and any bytes are received over
stderr, this will fire with an L{_UnexpectedErrorOutput} instance and
the attribute will be set to C{None}.
@ivar onProcessEnded: If C{errortoo} is false and bytes are received over
stderr, this attribute will refer to a L{Deferred} which will be called
back when the process ends. This C{Deferred} is also associated with
the L{_UnexpectedErrorOutput} which C{deferred} fires with earlier in
this case so that users can determine when the process has actually
ended, in addition to knowing when bytes have been received via stderr.
"""
def __init__(self, deferred, errortoo=0):
self.deferred = deferred
self.s = StringIO.StringIO()
if errortoo:
self.errReceived = self.errReceivedIsGood
else:
self.errReceived = self.errReceivedIsBad
def errReceivedIsBad(self, text):
if self.deferred is not None:
self.onProcessEnded = defer.Deferred()
err = _UnexpectedErrorOutput(text, self.onProcessEnded)
self.deferred.errback(failure.Failure(err))
self.deferred = None
self.transport.loseConnection()
def errReceivedIsGood(self, text):
self.s.write(text)
def outReceived(self, text):
self.s.write(text)
def processEnded(self, reason):
if self.deferred is not None:
self.deferred.callback(self.s.getvalue())
elif self.onProcessEnded is not None:
self.onProcessEnded.errback(reason)
def getProcessOutput(executable, args=(), env={}, path=None, reactor=None,
errortoo=0):
"""
Spawn a process and return its output as a deferred returning a string.
@param executable: The file name to run and get the output of - the
full path should be used.
@param args: the command line arguments to pass to the process; a
sequence of strings. The first string should B{NOT} be the
executable's name.
@param env: the environment variables to pass to the processs; a
dictionary of strings.
@param path: the path to run the subprocess in - defaults to the
current directory.
@param reactor: the reactor to use - defaults to the default reactor
@param errortoo: If true, include stderr in the result. If false, if
stderr is received the returned L{Deferred} will errback with an
L{IOError} instance with a C{processEnded} attribute. The
C{processEnded} attribute refers to a L{Deferred} which fires when the
executed process ends.
"""
return _callProtocolWithDeferred(lambda d:
_BackRelay(d, errortoo=errortoo),
executable, args, env, path,
reactor)
class _ValueGetter(protocol.ProcessProtocol):
def __init__(self, deferred):
self.deferred = deferred
def processEnded(self, reason):
self.deferred.callback(reason.value.exitCode)
def getProcessValue(executable, args=(), env={}, path=None, reactor=None):
"""Spawn a process and return its exit code as a Deferred."""
return _callProtocolWithDeferred(_ValueGetter, executable, args, env, path,
reactor)
class _EverythingGetter(protocol.ProcessProtocol):
def __init__(self, deferred):
self.deferred = deferred
self.outBuf = StringIO.StringIO()
self.errBuf = StringIO.StringIO()
self.outReceived = self.outBuf.write
self.errReceived = self.errBuf.write
def processEnded(self, reason):
out = self.outBuf.getvalue()
err = self.errBuf.getvalue()
e = reason.value
code = e.exitCode
if e.signal:
self.deferred.errback((out, err, e.signal))
else:
self.deferred.callback((out, err, code))
def getProcessOutputAndValue(executable, args=(), env={}, path=None,
reactor=None):
"""Spawn a process and returns a Deferred that will be called back with
its output (from stdout and stderr) and it's exit code as (out, err, code)
If a signal is raised, the Deferred will errback with the stdout and
stderr up to that point, along with the signal, as (out, err, signalNum)
"""
return _callProtocolWithDeferred(_EverythingGetter, executable, args, env, path,
reactor)
def _resetWarningFilters(passthrough, addedFilters):
for f in addedFilters:
try:
warnings.filters.remove(f)
except ValueError:
pass
return passthrough
def runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw):
"""Run the function C{f}, but with some warnings suppressed.
@param suppressedWarnings: A list of arguments to pass to filterwarnings.
Must be a sequence of 2-tuples (args, kwargs).
@param f: A callable, followed by its arguments and keyword arguments
"""
for args, kwargs in suppressedWarnings:
warnings.filterwarnings(*args, **kwargs)
addedFilters = warnings.filters[:len(suppressedWarnings)]
try:
result = f(*a, **kw)
except:
exc_info = sys.exc_info()
_resetWarningFilters(None, addedFilters)
reraise(exc_info[1], exc_info[2])
else:
if isinstance(result, defer.Deferred):
result.addBoth(_resetWarningFilters, addedFilters)
else:
_resetWarningFilters(None, addedFilters)
return result
def suppressWarnings(f, *suppressedWarnings):
"""
Wrap C{f} in a callable which suppresses the indicated warnings before
invoking C{f} and unsuppresses them afterwards. If f returns a Deferred,
warnings will remain suppressed until the Deferred fires.
"""
@wraps(f)
def warningSuppressingWrapper(*a, **kw):
return runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw)
return warningSuppressingWrapper
__all__ = [
"runWithWarningsSuppressed", "suppressWarnings",
"getProcessOutput", "getProcessValue", "getProcessOutputAndValue",
]
if _PY3:
__all3__ = ["runWithWarningsSuppressed", "suppressWarnings"]
for name in __all__[:]:
if name not in __all3__:
__all__.remove(name)
del globals()[name]
del name, __all3__
| gpl-3.0 |
walshjon/openmc | openmc/region.py | 1 | 18303 | from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from collections.abc import Iterable, MutableSequence
from copy import deepcopy
import numpy as np
from openmc.checkvalue import check_type
class Region(metaclass=ABCMeta):
"""Region of space that can be assigned to a cell.
Region is an abstract base class that is inherited by
:class:`openmc.Halfspace`, :class:`openmc.Intersection`,
:class:`openmc.Union`, and :class:`openmc.Complement`. Each of those
respective classes are typically not instantiated directly but rather are
created through operators of the Surface and Region classes.
"""
def __and__(self, other):
return Intersection((self, other))
def __or__(self, other):
return Union((self, other))
def __invert__(self):
return Complement(self)
@abstractmethod
def __contains__(self, point):
pass
@abstractmethod
def __str__(self):
pass
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
else:
return str(self) == str(other)
def __ne__(self, other):
return not self == other
def get_surfaces(self, surfaces=None):
"""
Recursively find all the surfaces referenced by a region and return them
Parameters
----------
surfaces: collections.OrderedDict, optional
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
Returns
-------
surfaces: collections.OrderedDict
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
"""
if surfaces is None:
surfaces = OrderedDict()
for region in self:
surfaces = region.get_surfaces(surfaces)
return surfaces
@staticmethod
def from_expression(expression, surfaces):
"""Generate a region given an infix expression.
Parameters
----------
expression : str
Boolean expression relating surface half-spaces. The possible
operators are union '|', intersection ' ', and complement '~'. For
example, '(1 -2) | 3 ~(4 -5)'.
surfaces : dict
Dictionary whose keys are suface IDs that appear in the Boolean
expression and whose values are Surface objects.
"""
# Strip leading and trailing whitespace
expression = expression.strip()
# Convert the string expression into a list of tokens, i.e., operators
# and surface half-spaces, representing the expression in infix
# notation.
i = 0
i_start = -1
tokens = []
while i < len(expression):
if expression[i] in '()|~ ':
# If special character appears immediately after a non-operator,
# create a token with the apporpriate half-space
if i_start >= 0:
j = int(expression[i_start:i])
if j < 0:
tokens.append(-surfaces[abs(j)])
else:
tokens.append(+surfaces[abs(j)])
if expression[i] in '()|~':
# For everything other than intersection, add the operator
# to the list of tokens
tokens.append(expression[i])
else:
# Find next non-space character
while expression[i+1] == ' ':
i += 1
# If previous token is a halfspace or right parenthesis and next token
# is not a left parenthese or union operator, that implies that the
# whitespace is to be interpreted as an intersection operator
if (i_start >= 0 or tokens[-1] == ')') and \
expression[i+1] not in ')|':
tokens.append(' ')
i_start = -1
else:
# Check for invalid characters
if expression[i] not in '-+0123456789':
raise SyntaxError("Invalid character '{}' in expression"
.format(expression[i]))
# If we haven't yet reached the start of a word, start one
if i_start < 0:
i_start = i
i += 1
# If we've reached the end and we're still in a word, create a
# half-space token and add it to the list
if i_start >= 0:
j = int(expression[i_start:])
if j < 0:
tokens.append(-surfaces[abs(j)])
else:
tokens.append(+surfaces[abs(j)])
# The functions below are used to apply an operator to operands on the
# output queue during the shunting yard algorithm.
def can_be_combined(region):
return isinstance(region, Complement) or hasattr(region, 'surface')
def apply_operator(output, operator):
r2 = output.pop()
if operator == ' ':
r1 = output.pop()
if isinstance(r1, Intersection):
r1 &= r2
output.append(r1)
elif isinstance(r2, Intersection) and can_be_combined(r1):
r2.insert(0, r1)
output.append(r2)
else:
output.append(r1 & r2)
elif operator == '|':
r1 = output.pop()
if isinstance(r1, Union):
r1 |= r2
output.append(r1)
elif isinstance(r2, Union) and can_be_combined(r1):
r2.insert(0, r1)
output.append(r2)
else:
output.append(r1 | r2)
elif operator == '~':
output.append(~r2)
# The following is an implementation of the shunting yard algorithm to
# generate an abstract syntax tree for the region expression.
output = []
stack = []
precedence = {'|': 1, ' ': 2, '~': 3}
associativity = {'|': 'left', ' ': 'left', '~': 'right'}
for token in tokens:
if token in (' ', '|', '~'):
# Normal operators
while stack:
op = stack[-1]
if (op not in ('(', ')') and
((associativity[token] == 'right' and
precedence[token] < precedence[op]) or
(associativity[token] == 'left' and
precedence[token] <= precedence[op]))):
apply_operator(output, stack.pop())
else:
break
stack.append(token)
elif token == '(':
# Left parentheses
stack.append(token)
elif token == ')':
# Right parentheses
while stack[-1] != '(':
apply_operator(output, stack.pop())
if len(stack) == 0:
raise SyntaxError('Mismatched parentheses in '
'region specification.')
stack.pop()
else:
# Surface halfspaces
output.append(token)
while stack:
if stack[-1] in '()':
raise SyntaxError('Mismatched parentheses in region '
'specification.')
apply_operator(output, stack.pop())
# Since we are generating an abstract syntax tree rather than a reverse
# Polish notation expression, the output queue should have a single item
# at the end
return output[0]
@abstractmethod
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
region's nodes will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Region
The clone of this region
Raises
------
NotImplementedError
This method is not implemented for the abstract region class.
"""
raise NotImplementedError('The clone method is not implemented for '
'the abstract region class.')
class Intersection(Region, MutableSequence):
r"""Intersection of two or more regions.
Instances of Intersection are generally created via the & operator applied
to two instances of :class:`openmc.Region`. This is illustrated in the
following example:
>>> equator = openmc.ZPlane(z0=0.0)
>>> earth = openmc.Sphere(R=637.1e6)
>>> northern_hemisphere = -earth & +equator
>>> southern_hemisphere = -earth & -equator
>>> type(northern_hemisphere)
<class 'openmc.region.Intersection'>
Instances of this class behave like a mutable sequence, e.g., they can be
indexed and have an append() method.
Parameters
----------
nodes : iterable of openmc.Region
Regions to take the intersection of
Attributes
----------
bounding_box : tuple of numpy.array
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, nodes):
self._nodes = list(nodes)
def __and__(self, other):
new = Intersection(self)
new &= other
return new
def __iand__(self, other):
if isinstance(other, Intersection):
self.extend(other)
else:
self.append(other)
return self
# Implement mutable sequence protocol by delegating to list
def __getitem__(self, key):
return self._nodes[key]
def __setitem__(self, key, value):
self._nodes[key] = value
def __delitem__(self, key):
del self._nodes[key]
def __len__(self):
return len(self._nodes)
def insert(self, index, value):
self._nodes.insert(index, value)
def __contains__(self, point):
"""Check whether a point is contained in the region.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the region
"""
return all(point in n for n in self)
def __str__(self):
return '(' + ' '.join(map(str, self)) + ')'
@property
def bounding_box(self):
lower_left = np.array([-np.inf, -np.inf, -np.inf])
upper_right = np.array([np.inf, np.inf, np.inf])
for n in self:
lower_left_n, upper_right_n = n.bounding_box
lower_left[:] = np.maximum(lower_left, lower_left_n)
upper_right[:] = np.minimum(upper_right, upper_right_n)
return lower_left, upper_right
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
intersection's nodes will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Intersection
The clone of this intersection
"""
if memo is None:
memo = {}
clone = deepcopy(self)
clone[:] = [n.clone(memo) for n in self]
return clone
class Union(Region, MutableSequence):
r"""Union of two or more regions.
Instances of Union are generally created via the | operator applied to two
instances of :class:`openmc.Region`. This is illustrated in the following
example:
>>> s1 = openmc.ZPlane(z0=0.0)
>>> s2 = openmc.Sphere(R=637.1e6)
>>> type(-s2 | +s1)
<class 'openmc.region.Union'>
Instances of this class behave like a mutable sequence, e.g., they can be
indexed and have an append() method.
Parameters
----------
nodes : iterable of openmc.Region
Regions to take the union of
Attributes
----------
bounding_box : 2-tuple of numpy.array
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, nodes):
self._nodes = list(nodes)
def __or__(self, other):
new = Union(self)
new |= other
return new
def __ior__(self, other):
if isinstance(other, Union):
self.extend(other)
else:
self.append(other)
return self
# Implement mutable sequence protocol by delegating to list
def __getitem__(self, key):
return self._nodes[key]
def __setitem__(self, key, value):
self._nodes[key] = value
def __delitem__(self, key):
del self._nodes[key]
def __len__(self):
return len(self._nodes)
def insert(self, index, value):
self._nodes.insert(index, value)
def __contains__(self, point):
"""Check whether a point is contained in the region.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the region
"""
return any(point in n for n in self)
def __str__(self):
return '(' + ' | '.join(map(str, self)) + ')'
@property
def bounding_box(self):
lower_left = np.array([np.inf, np.inf, np.inf])
upper_right = np.array([-np.inf, -np.inf, -np.inf])
for n in self:
lower_left_n, upper_right_n = n.bounding_box
lower_left[:] = np.minimum(lower_left, lower_left_n)
upper_right[:] = np.maximum(upper_right, upper_right_n)
return lower_left, upper_right
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
union's nodes will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Union
The clone of this union
"""
if memo is None:
memo = {}
clone = deepcopy(self)
clone[:] = [n.clone(memo) for n in self]
return clone
class Complement(Region):
"""Complement of a region.
The Complement of an existing :class:`openmc.Region` can be created by using
the ~ operator as the following example demonstrates:
>>> xl = openmc.XPlane(x0=-10.0)
>>> xr = openmc.XPlane(x0=10.0)
>>> yl = openmc.YPlane(y0=-10.0)
>>> yr = openmc.YPlane(y0=10.0)
>>> inside_box = +xl & -xr & +yl & -yl
>>> outside_box = ~inside_box
>>> type(outside_box)
<class 'openmc.region.Complement'>
Parameters
----------
node : openmc.Region
Region to take the complement of
Attributes
----------
node : openmc.Region
Regions to take the complement of
bounding_box : tuple of numpy.array
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, node):
self.node = node
def __contains__(self, point):
"""Check whether a point is contained in the region.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the region
"""
return point not in self.node
def __str__(self):
return '~' + str(self.node)
@property
def node(self):
return self._node
@node.setter
def node(self, node):
check_type('node', node, Region)
self._node = node
@property
def bounding_box(self):
# Use De Morgan's laws to distribute the complement operator so that it
# only applies to surface half-spaces, thus allowing us to calculate the
# bounding box in the usual recursive manner.
if isinstance(self.node, Union):
temp_region = Intersection(~n for n in self.node)
elif isinstance(self.node, Intersection):
temp_region = Union(~n for n in self.node)
elif isinstance(self.node, Complement):
temp_region = self.node.node
else:
temp_region = ~self.node
return temp_region.bounding_box
def get_surfaces(self, surfaces=None):
"""
Recursively find and return all the surfaces referenced by the node
Parameters
----------
surfaces: collections.OrderedDict, optional
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
Returns
-------
surfaces: collections.OrderedDict
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
"""
if surfaces is None:
surfaces = OrderedDict()
for region in self.node:
surfaces = region.get_surfaces(surfaces)
return surfaces
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
complement's node will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Complement
The clone of this complement
"""
if memo is None:
memo = {}
clone = deepcopy(self)
clone.node = self.node.clone(memo)
return clone
| mit |
squilter/ardupilot | Tools/autotest/arduplane.py | 1 | 85180 | #!/usr/bin/env python
# Fly ArduPlane in SITL
from __future__ import print_function
import math
import os
import time
from pymavlink import quaternion
from pymavlink import mavutil
from common import AutoTest
from common import AutoTestTimeoutException
from common import NotAchievedException
from common import PreconditionFailedException
import operator
# get location of scripts
testdir = os.path.dirname(os.path.realpath(__file__))
SITL_START_LOCATION = mavutil.location(-35.362938, 149.165085, 585, 354)
WIND = "0,180,0.2" # speed,direction,variance
class AutoTestPlane(AutoTest):
@staticmethod
def get_not_armable_mode_list():
return []
@staticmethod
def get_not_disarmed_settable_modes_list():
return ["FOLLOW"]
@staticmethod
def get_no_position_not_settable_modes_list():
return []
@staticmethod
def get_position_armable_modes_list():
return ["GUIDED", "AUTO"]
@staticmethod
def get_normal_armable_modes_list():
return ["MANUAL", "STABILIZE", "ACRO"]
def log_name(self):
return "ArduPlane"
def test_filepath(self):
return os.path.realpath(__file__)
def sitl_start_location(self):
return SITL_START_LOCATION
def defaults_filepath(self):
return os.path.join(testdir, 'default_params/plane-jsbsim.parm')
def set_current_test_name(self, name):
self.current_test_name_directory = "ArduPlane_Tests/" + name + "/"
def default_frame(self):
return "plane-elevrev"
def apply_defaultfile_parameters(self):
# plane passes in a defaults_filepath in place of applying
# parameters afterwards.
pass
def is_plane(self):
return True
def get_stick_arming_channel(self):
return int(self.get_parameter("RCMAP_YAW"))
def get_disarm_delay(self):
return int(self.get_parameter("LAND_DISARMDELAY"))
def set_autodisarm_delay(self, delay):
self.set_parameter("LAND_DISARMDELAY", delay)
def takeoff(self, alt=150, alt_max=None, relative=True):
"""Takeoff to altitude."""
if alt_max is None:
alt_max = alt + 30
self.change_mode("FBWA")
self.wait_ready_to_arm()
self.arm_vehicle()
# some rudder to counteract the prop torque
self.set_rc(4, 1700)
# some up elevator to keep the tail down
self.set_rc(2, 1200)
# get it moving a bit first
self.set_rc(3, 1300)
self.wait_groundspeed(6, 100)
# a bit faster again, straighten rudder
self.set_rc(3, 1600)
self.set_rc(4, 1500)
self.wait_groundspeed(12, 100)
# hit the gas harder now, and give it some more elevator
self.set_rc(2, 1100)
self.set_rc(3, 2000)
# gain a bit of altitude
self.wait_altitude(alt, alt_max, timeout=30, relative=relative)
# level off
self.set_rc(2, 1500)
self.progress("TAKEOFF COMPLETE")
def fly_left_circuit(self):
"""Fly a left circuit, 200m on a side."""
self.mavproxy.send('switch 4\n')
self.wait_mode('FBWA')
self.set_rc(3, 2000)
self.wait_level_flight()
self.progress("Flying left circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(1, 1000)
self.wait_heading(270 - (90*i), accuracy=10)
self.set_rc(1, 1500)
self.progress("Starting leg %u" % i)
self.wait_distance(100, accuracy=20)
self.progress("Circuit complete")
def fly_RTL(self):
"""Fly to home."""
self.progress("Flying home in RTL")
self.mavproxy.send('switch 2\n')
self.wait_mode('RTL')
self.wait_location(self.homeloc,
accuracy=120,
target_altitude=self.homeloc.alt+100,
height_accuracy=20,
timeout=180)
self.progress("RTL Complete")
def fly_LOITER(self, num_circles=4):
"""Loiter where we are."""
self.progress("Testing LOITER for %u turns" % num_circles)
self.mavproxy.send('loiter\n')
self.wait_mode('LOITER')
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
self.progress("Initial altitude %u\n" % initial_alt)
while num_circles > 0:
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
num_circles -= 1
self.progress("Loiter %u circles left" % num_circles)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
self.progress("Final altitude %u initial %u\n" %
(final_alt, initial_alt))
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
if abs(final_alt - initial_alt) > 20:
raise NotAchievedException("Failed to maintain altitude")
self.progress("Completed Loiter OK")
def fly_CIRCLE(self, num_circles=1):
"""Circle where we are."""
self.progress("Testing CIRCLE for %u turns" % num_circles)
self.mavproxy.send('mode CIRCLE\n')
self.wait_mode('CIRCLE')
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
self.progress("Initial altitude %u\n" % initial_alt)
while num_circles > 0:
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
num_circles -= 1
self.progress("CIRCLE %u circles left" % num_circles)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
self.progress("Final altitude %u initial %u\n" %
(final_alt, initial_alt))
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
if abs(final_alt - initial_alt) > 20:
raise NotAchievedException("Failed to maintain altitude")
self.progress("Completed CIRCLE OK")
def wait_level_flight(self, accuracy=5, timeout=30):
"""Wait for level flight."""
tstart = self.get_sim_time()
self.progress("Waiting for level flight")
self.set_rc(1, 1500)
self.set_rc(2, 1500)
self.set_rc(4, 1500)
while self.get_sim_time_cached() < tstart + timeout:
m = self.mav.recv_match(type='ATTITUDE', blocking=True)
roll = math.degrees(m.roll)
pitch = math.degrees(m.pitch)
self.progress("Roll=%.1f Pitch=%.1f" % (roll, pitch))
if math.fabs(roll) <= accuracy and math.fabs(pitch) <= accuracy:
self.progress("Attained level flight")
return
raise NotAchievedException("Failed to attain level flight")
def change_altitude(self, altitude, accuracy=30):
"""Get to a given altitude."""
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
alt_error = self.mav.messages['VFR_HUD'].alt - altitude
if alt_error > 0:
self.set_rc(2, 2000)
else:
self.set_rc(2, 1000)
self.wait_altitude(altitude-accuracy/2, altitude+accuracy/2)
self.set_rc(2, 1500)
self.progress("Reached target altitude at %u" %
self.mav.messages['VFR_HUD'].alt)
return self.wait_level_flight()
def axial_left_roll(self, count=1):
"""Fly a left axial roll."""
# full throttle!
self.set_rc(3, 2000)
self.change_altitude(self.homeloc.alt+300)
# fly the roll in manual
self.mavproxy.send('switch 6\n')
self.wait_mode('MANUAL')
while count > 0:
self.progress("Starting roll")
self.set_rc(1, 1000)
try:
self.wait_roll(-150, accuracy=90)
self.wait_roll(150, accuracy=90)
self.wait_roll(0, accuracy=90)
except Exception as e:
self.set_rc(1, 1500)
raise e
count -= 1
# back to FBWA
self.set_rc(1, 1500)
self.mavproxy.send('switch 4\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def inside_loop(self, count=1):
"""Fly a inside loop."""
# full throttle!
self.set_rc(3, 2000)
self.change_altitude(self.homeloc.alt+300)
# fly the loop in manual
self.mavproxy.send('switch 6\n')
self.wait_mode('MANUAL')
while count > 0:
self.progress("Starting loop")
self.set_rc(2, 1000)
self.wait_pitch(-60, accuracy=20)
self.wait_pitch(0, accuracy=20)
count -= 1
# back to FBWA
self.set_rc(2, 1500)
self.mavproxy.send('switch 4\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def set_attitude_target(self, tolerance=10):
"""Test setting of attitude target in guided mode."""
self.change_mode("GUIDED")
# self.set_parameter("STALL_PREVENTION", 0)
state_roll_over = "roll-over"
state_stabilize_roll = "stabilize-roll"
state_hold = "hold"
state_roll_back = "roll-back"
state_done = "done"
tstart = self.get_sim_time()
try:
state = state_roll_over
while state != state_done:
m = self.mav.recv_match(type='ATTITUDE',
blocking=True,
timeout=0.1)
now = self.get_sim_time_cached()
if now - tstart > 20:
raise AutoTestTimeoutException("Manuevers not completed")
if m is None:
continue
r = math.degrees(m.roll)
if state == state_roll_over:
target_roll_degrees = 60
if abs(r - target_roll_degrees) < tolerance:
state = state_stabilize_roll
stabilize_start = now
elif state == state_stabilize_roll:
# just give it a little time to sort it self out
if now - stabilize_start > 2:
state = state_hold
hold_start = now
elif state == state_hold:
target_roll_degrees = 60
if now - hold_start > tolerance:
state = state_roll_back
if abs(r - target_roll_degrees) > tolerance:
raise NotAchievedException("Failed to hold attitude")
elif state == state_roll_back:
target_roll_degrees = 0
if abs(r - target_roll_degrees) < tolerance:
state = state_done
else:
raise ValueError("Unknown state %s" % str(state))
m_nav = self.mav.messages['NAV_CONTROLLER_OUTPUT']
self.progress("%s Roll: %f desired=%f set=%f" %
(state, r, m_nav.nav_roll, target_roll_degrees))
time_boot_millis = 0 # FIXME
target_system = 1 # FIXME
target_component = 1 # FIXME
type_mask = 0b10000001 ^ 0xFF # FIXME
# attitude in radians:
q = quaternion.Quaternion([math.radians(target_roll_degrees),
0,
0])
roll_rate_radians = 0.5
pitch_rate_radians = 0
yaw_rate_radians = 0
thrust = 1.0
self.mav.mav.set_attitude_target_send(time_boot_millis,
target_system,
target_component,
type_mask,
q,
roll_rate_radians,
pitch_rate_radians,
yaw_rate_radians,
thrust)
except Exception as e:
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
raise e
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
self.wait_level_flight()
def test_stabilize(self, count=1):
"""Fly stabilize mode."""
# full throttle!
self.set_rc(3, 2000)
self.set_rc(2, 1300)
self.change_altitude(self.homeloc.alt+300)
self.set_rc(2, 1500)
self.mavproxy.send("mode STABILIZE\n")
self.wait_mode('STABILIZE')
while count > 0:
self.progress("Starting roll")
self.set_rc(1, 2000)
self.wait_roll(-150, accuracy=90)
self.wait_roll(150, accuracy=90)
self.wait_roll(0, accuracy=90)
count -= 1
self.set_rc(1, 1500)
self.wait_roll(0, accuracy=5)
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def test_acro(self, count=1):
"""Fly ACRO mode."""
# full throttle!
self.set_rc(3, 2000)
self.set_rc(2, 1300)
self.change_altitude(self.homeloc.alt+300)
self.set_rc(2, 1500)
self.mavproxy.send("mode ACRO\n")
self.wait_mode('ACRO')
while count > 0:
self.progress("Starting roll")
self.set_rc(1, 1000)
self.wait_roll(-150, accuracy=90)
self.wait_roll(150, accuracy=90)
self.wait_roll(0, accuracy=90)
count -= 1
self.set_rc(1, 1500)
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.wait_level_flight()
self.mavproxy.send("mode ACRO\n")
self.wait_mode('ACRO')
count = 2
while count > 0:
self.progress("Starting loop")
self.set_rc(2, 1000)
self.wait_pitch(-60, accuracy=20)
self.wait_pitch(0, accuracy=20)
count -= 1
self.set_rc(2, 1500)
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def test_FBWB(self, mode='FBWB'):
"""Fly FBWB or CRUISE mode."""
self.mavproxy.send("mode %s\n" % mode)
self.wait_mode(mode)
self.set_rc(3, 1700)
self.set_rc(2, 1500)
# lock in the altitude by asking for an altitude change then releasing
self.set_rc(2, 1000)
self.wait_distance(50, accuracy=20)
self.set_rc(2, 1500)
self.wait_distance(50, accuracy=20)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
self.progress("Initial altitude %u\n" % initial_alt)
self.progress("Flying right circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(1, 1800)
try:
self.wait_heading(0 + (90*i), accuracy=20, timeout=60)
except Exception as e:
self.set_rc(1, 1500)
raise e
self.set_rc(1, 1500)
self.progress("Starting leg %u" % i)
self.wait_distance(100, accuracy=20)
self.progress("Circuit complete")
self.progress("Flying rudder left circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(4, 1900)
try:
self.wait_heading(360 - (90*i), accuracy=20, timeout=60)
except Exception as e:
self.set_rc(4, 1500)
raise e
self.set_rc(4, 1500)
self.progress("Starting leg %u" % i)
self.wait_distance(100, accuracy=20)
self.progress("Circuit complete")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
self.progress("Final altitude %u initial %u\n" %
(final_alt, initial_alt))
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
if abs(final_alt - initial_alt) > 20:
raise NotAchievedException("Failed to maintain altitude")
return self.wait_level_flight()
def fly_mission(self, filename, mission_timeout=60.0):
"""Fly a mission from a file."""
self.progress("Flying mission %s" % filename)
self.load_mission(filename)
self.mavproxy.send('switch 1\n') # auto mode
self.wait_mode('AUTO')
self.wait_waypoint(1, 7, max_dist=60)
self.wait_groundspeed(0, 0.5, timeout=mission_timeout)
self.mavproxy.expect("Auto disarmed")
self.progress("Mission OK")
def fly_do_reposition(self):
self.progress("Takeoff")
self.takeoff(alt=50)
self.set_rc(3, 1500)
self.progress("Entering guided and flying somewhere constant")
self.change_mode("GUIDED")
loc = self.mav.location()
self.location_offset_ne(loc, 500, 500)
new_alt = 100
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
0,
0,
0,
int(loc.lat*1e7),
int(loc.lng*1e7),
new_alt, # alt
frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
)
self.wait_altitude(new_alt-10, new_alt, timeout=30, relative=True)
self.fly_home_land_and_disarm()
def fly_deepstall(self):
# self.fly_deepstall_absolute()
self.fly_deepstall_relative()
def fly_deepstall_absolute(self):
self.start_subtest("DeepStall Relative Absolute")
self.set_parameter("LAND_TYPE", 1)
deepstall_elevator_pwm = 1661
self.set_parameter("LAND_DS_ELEV_PWM", deepstall_elevator_pwm)
self.load_mission("plane-deepstall-mission.txt")
self.change_mode("AUTO")
self.wait_ready_to_arm()
self.arm_vehicle()
self.progress("Waiting for deepstall messages")
self.wait_text("Deepstall: Entry: ", timeout=240)
# assume elevator is on channel 2:
self.wait_servo_channel_value(2, deepstall_elevator_pwm)
self.disarm_wait(timeout=120)
self.progress("Flying home")
self.takeoff(10)
self.set_parameter("LAND_TYPE", 0)
self.fly_home_land_and_disarm()
def fly_deepstall_relative(self):
self.start_subtest("DeepStall Relative")
self.set_parameter("LAND_TYPE", 1)
deepstall_elevator_pwm = 1661
self.set_parameter("LAND_DS_ELEV_PWM", deepstall_elevator_pwm)
self.load_mission("plane-deepstall-relative-mission.txt")
self.change_mode("AUTO")
self.wait_ready_to_arm()
self.arm_vehicle()
self.progress("Waiting for deepstall messages")
self.wait_text("Deepstall: Entry: ", timeout=240)
# assume elevator is on channel 2:
self.wait_servo_channel_value(2, deepstall_elevator_pwm)
self.disarm_wait(timeout=120)
self.progress("Flying home")
self.takeoff(100)
self.set_parameter("LAND_TYPE", 0)
self.fly_home_land_and_disarm(timeout=240)
def fly_do_change_speed(self):
# the following lines ensure we revert these parameter values
# - DO_CHANGE_AIRSPEED is a permanent vehicle change!
self.set_parameter("TRIM_ARSPD_CM", self.get_parameter("TRIM_ARSPD_CM"))
self.set_parameter("MIN_GNDSPD_CM", self.get_parameter("MIN_GNDSPD_CM"))
self.progress("Takeoff")
self.takeoff(alt=100)
self.set_rc(3, 1500)
# ensure we know what the airspeed is:
self.progress("Entering guided and flying somewhere constant")
self.change_mode("GUIDED")
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
0,
0,
0,
12345, # lat*1e7
12345, # lon*1e7
100 # alt
)
self.delay_sim_time(10)
self.progress("Ensuring initial speed is known and relatively constant")
initial_speed = 21.5;
timeout = 10
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > timeout:
break
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.progress("GroundSpeed: %f want=%f" %
(m.groundspeed, initial_speed))
if abs(initial_speed - m.groundspeed) > 1:
raise NotAchievedException("Initial speed not as expected (want=%f got=%f" % (initial_speed, m.groundspeed))
self.progress("Setting groundspeed")
new_target_groundspeed = initial_speed + 5
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
1, # groundspeed
new_target_groundspeed,
-1, # throttle / no change
0, # absolute values
0,
0,
0)
self.wait_groundspeed(new_target_groundspeed-0.5, new_target_groundspeed+0.5, timeout=40)
self.progress("Adding some wind, ensuring groundspeed holds")
self.set_parameter("SIM_WIND_SPD", 5)
self.delay_sim_time(5)
self.wait_groundspeed(new_target_groundspeed-0.5, new_target_groundspeed+0.5, timeout=40)
self.set_parameter("SIM_WIND_SPD", 0)
self.progress("Setting airspeed")
new_target_airspeed = initial_speed + 5
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
0, # airspeed
new_target_airspeed,
-1, # throttle / no change
0, # absolute values
0,
0,
0)
self.wait_groundspeed(new_target_airspeed-0.5, new_target_airspeed+0.5)
self.progress("Adding some wind, hoping groundspeed increases/decreases")
self.set_parameter("SIM_WIND_SPD", 5)
self.set_parameter("SIM_WIND_DIR", 270)
self.delay_sim_time(5)
timeout = 10
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Did not achieve groundspeed delta")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
delta = abs(m.airspeed - m.groundspeed)
want_delta = 4
self.progress("groundspeed and airspeed should be different (have=%f want=%f)" % (delta, want_delta))
if delta > want_delta:
break
self.fly_home_land_and_disarm()
def fly_home_land_and_disarm(self, timeout=120):
filename = "flaps.txt"
self.progress("Using %s to fly home" % filename)
num_wp = self.load_mission(filename)
self.change_mode("AUTO")
self.mavproxy.send('wp set 7\n')
self.drain_mav()
# TODO: reflect on file to find this magic waypoint number?
# self.wait_waypoint(7, num_wp-1, timeout=500) # we tend to miss the final waypoint by a fair bit, and this is probably too noisy anyway?
self.wait_disarmed(timeout=timeout)
def fly_flaps(self):
"""Test flaps functionality."""
filename = "flaps.txt"
self.context_push()
ex = None
try:
flaps_ch = 5
servo_ch = 5
self.set_parameter("SERVO%u_FUNCTION" % servo_ch, 3) # flapsauto
self.set_parameter("RC%u_OPTION" % flaps_ch, 208) # Flaps RCx_OPTION
self.set_parameter("LAND_FLAP_PERCNT", 50)
self.set_parameter("LOG_DISARMED", 1)
flaps_ch_min = 1000
flaps_ch_trim = 1500
flaps_ch_max = 2000
self.set_parameter("RC%u_MIN" % flaps_ch, flaps_ch_min)
self.set_parameter("RC%u_MAX" % flaps_ch, flaps_ch_max)
self.set_parameter("RC%u_TRIM" % flaps_ch, flaps_ch_trim)
servo_ch_min = 1200
servo_ch_trim = 1300
servo_ch_max = 1800
self.set_parameter("SERVO%u_MIN" % servo_ch, servo_ch_min)
self.set_parameter("SERVO%u_MAX" % servo_ch, servo_ch_max)
self.set_parameter("SERVO%u_TRIM" % servo_ch, servo_ch_trim)
self.progress("check flaps are not deployed")
self.set_rc(flaps_ch, flaps_ch_min)
self.wait_servo_channel_value(servo_ch, servo_ch_min)
self.progress("deploy the flaps")
self.set_rc(flaps_ch, flaps_ch_max)
tstart = self.get_sim_time()
self.wait_servo_channel_value(servo_ch, servo_ch_max)
tstop = self.get_sim_time_cached()
delta_time = tstop - tstart
delta_time_min = 0.5
delta_time_max = 1.5
if delta_time < delta_time_min or delta_time > delta_time_max:
raise NotAchievedException((
"Flaps Slew not working (%f seconds)" % (delta_time,)))
self.progress("undeploy flaps")
self.set_rc(flaps_ch, flaps_ch_min)
self.wait_servo_channel_value(servo_ch, servo_ch_min)
self.progress("Flying mission %s" % filename)
self.load_mission(filename)
self.mavproxy.send('wp set 1\n')
self.mavproxy.send('switch 1\n') # auto mode
self.wait_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
last_mission_current_msg = 0
last_seq = None
while self.armed():
m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True)
time_delta = (self.get_sim_time_cached() -
last_mission_current_msg)
if (time_delta > 1 or m.seq != last_seq):
dist = None
x = self.mav.messages.get("NAV_CONTROLLER_OUTPUT", None)
if x is not None:
dist = x.wp_dist
self.progress("MISSION_CURRENT.seq=%u (dist=%s)" %
(m.seq, str(dist)))
last_mission_current_msg = self.get_sim_time_cached()
last_seq = m.seq
# flaps should undeploy at the end
self.wait_servo_channel_value(servo_ch, servo_ch_min, timeout=30)
# do a short flight in FBWA, watching for flaps
# self.mavproxy.send('switch 4\n')
# self.wait_mode('FBWA')
# self.delay_sim_time(10)
# self.mavproxy.send('switch 6\n')
# self.wait_mode('MANUAL')
# self.delay_sim_time(10)
self.progress("Flaps OK")
except Exception as e:
ex = e
self.context_pop()
if ex:
if self.armed():
self.disarm_vehicle()
raise ex
def test_rc_relay(self):
'''test toggling channel 12 toggles relay'''
self.set_parameter("RC12_OPTION", 28) # Relay On/Off
self.set_rc(12, 1000)
self.reboot_sitl() # needed for RC12_OPTION to take effect
off = self.get_parameter("SIM_PIN_MASK")
if off:
raise PreconditionFailedException("SIM_MASK_PIN off")
# allow time for the RC library to register initial value:
self.delay_sim_time(1)
self.set_rc(12, 2000)
self.wait_heartbeat()
self.wait_heartbeat()
on = self.get_parameter("SIM_PIN_MASK")
if not on:
raise NotAchievedException("SIM_PIN_MASK doesn't reflect ON")
self.set_rc(12, 1000)
self.wait_heartbeat()
self.wait_heartbeat()
off = self.get_parameter("SIM_PIN_MASK")
if off:
raise NotAchievedException("SIM_PIN_MASK doesn't reflect OFF")
def test_rc_option_camera_trigger(self):
'''test toggling channel 12 takes picture'''
self.set_parameter("RC12_OPTION", 9) # CameraTrigger
self.reboot_sitl() # needed for RC12_OPTION to take effect
x = self.mav.messages.get("CAMERA_FEEDBACK", None)
if x is not None:
raise PreconditionFailedException("Receiving CAMERA_FEEDBACK?!")
self.set_rc(12, 2000)
tstart = self.get_sim_time()
while self.get_sim_time_cached() - tstart < 10:
x = self.mav.messages.get("CAMERA_FEEDBACK", None)
if x is not None:
break
self.wait_heartbeat()
self.set_rc(12, 1000)
if x is None:
raise NotAchievedException("No CAMERA_FEEDBACK message received")
def test_throttle_failsafe(self):
self.change_mode('MANUAL')
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
receiver_bit = mavutil.mavlink.MAV_SYS_STATUS_SENSOR_RC_RECEIVER
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise PreconditionFailedException()
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise PreconditionFailedException()
self.progress("Testing receiver health")
if (not (m.onboard_control_sensors_health & receiver_bit)):
raise PreconditionFailedException()
self.progress("Ensure we know original throttle value")
self.wait_rc_channel_value(3, 1000)
self.set_parameter("THR_FS_VALUE", 960)
self.progress("Failing receiver (throttle-to-950)")
self.context_collect("HEARTBEAT")
self.set_parameter("SIM_RC_FAIL", 2) # throttle-to-950
self.wait_mode('RTL') # long failsafe
if (not self.get_mode_from_mode_mapping("CIRCLE") in [x.custom_mode for x in self.context_stop_collecting("HEARTBEAT")]):
raise NotAchievedException("Did not go via circle mode")
self.progress("Ensure we've had our throttle squashed to 950")
self.wait_rc_channel_value(3, 950)
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
# skip this until RC is fixed
# self.progress("Testing receiver health")
# if (m.onboard_control_sensors_health & receiver_bit):
# raise NotAchievedException("Sensor healthy when it shouldn't be")
self.set_parameter("SIM_RC_FAIL", 0)
self.drain_mav_unparsed()
# have to allow time for RC to be fetched from SITL
self.delay_sim_time(0.5)
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
self.progress("Testing receiver health")
if (not (m.onboard_control_sensors_health & receiver_bit)):
raise NotAchievedException("Receiver not healthy2")
self.change_mode('MANUAL')
self.progress("Failing receiver (no-pulses)")
self.context_collect("HEARTBEAT")
self.set_parameter("SIM_RC_FAIL", 1) # no-pulses
self.wait_mode('RTL') # long failsafe
if (not self.get_mode_from_mode_mapping("CIRCLE") in [x.custom_mode for x in self.context_stop_collecting("HEARTBEAT")]):
raise NotAchievedException("Did not go via circle mode")
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
self.progress("Testing receiver health")
if (m.onboard_control_sensors_health & receiver_bit):
raise NotAchievedException("Sensor healthy when it shouldn't be")
self.progress("Making RC work again")
self.set_parameter("SIM_RC_FAIL", 0)
# have to allow time for RC to be fetched from SITL
self.progress("Giving receiver time to recover")
self.delay_sim_time(0.5)
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
self.progress("Testing receiver health")
if (not (m.onboard_control_sensors_health & receiver_bit)):
raise NotAchievedException("Receiver not healthy")
self.change_mode('MANUAL')
self.progress("Ensure long failsafe can trigger when short failsafe disabled")
self.context_push()
self.context_collect("STATUSTEXT")
ex = None
try:
self.set_parameter("FS_SHORT_ACTN", 3) # 3 means disabled
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_statustext("Long event on", check_context=True)
self.wait_mode("RTL")
# self.context_clear_collection("STATUSTEXT")
self.set_parameter("SIM_RC_FAIL", 0)
self.wait_text("Long event off", check_context=True)
self.change_mode("MANUAL")
self.progress("Trying again with THR_FS_VALUE")
self.set_parameter("THR_FS_VALUE", 960)
self.set_parameter("SIM_RC_FAIL", 2)
self.wait_statustext("Long event on", check_context=True)
self.wait_mode("RTL")
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.context_pop()
if ex is not None:
raise ex
def test_throttle_failsafe_fence(self):
fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
self.progress("Checking fence is not present before being configured")
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
if (m.onboard_control_sensors_enabled & fence_bit):
raise NotAchievedException("Fence enabled before being configured")
self.change_mode('MANUAL')
self.wait_ready_to_arm()
self.load_fence("CMAC-fence.txt")
self.set_parameter("FENCE_CHANNEL", 7)
self.set_parameter("FENCE_ACTION", 4)
self.set_rc(3, 1000)
self.set_rc(7, 2000)
self.progress("Checking fence is initially OK")
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
if (not (m.onboard_control_sensors_enabled & fence_bit)):
raise NotAchievedException("Fence not initially enabled")
self.set_parameter("THR_FS_VALUE", 960)
self.progress("Failing receiver (throttle-to-950)")
self.set_parameter("SIM_RC_FAIL", 2) # throttle-to-950
self.wait_mode("CIRCLE")
self.delay_sim_time(1) # give
self.drain_mav_unparsed()
self.progress("Checking fence is OK after receiver failure (bind-values)")
fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
if (not (m.onboard_control_sensors_enabled & fence_bit)):
raise NotAchievedException("Fence not enabled after RC fail")
def test_gripper_mission(self):
self.context_push()
ex = None
try:
self.load_mission("plane-gripper-mission.txt")
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.mavproxy.expect("Gripper Grabbed")
self.mavproxy.expect("Gripper Released")
self.mavproxy.expect("Auto disarmed")
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.context_pop()
if ex is not None:
raise ex
def assert_fence_sys_status(self, present, enabled, health):
self.delay_sim_time(1)
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True, timeout=1)
if m is None:
raise NotAchievedException("Did not receive SYS_STATUS")
tests = [ ( "present", present, m.onboard_control_sensors_present ),
( "enabled", enabled, m.onboard_control_sensors_enabled ),
( "health", health, m.onboard_control_sensors_health ),
]
bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
for test in tests:
(name, want, field) = test
got = (field & bit) != 0
if want != got:
raise NotAchievedException("fence status incorrect; %s want=%u got=%u" %
(name, want, got))
def do_fence_en_or_dis_able(self, value, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED):
if value:
p1 = 1
else:
p1 = 0
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_FENCE_ENABLE,
p1, # param1
0, # param2
0, # param3
0, # param4
0, # param5
0, # param6
0, # param7
want_result=want_result)
def do_fence_enable(self, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED):
self.do_fence_en_or_dis_able(True, want_result=want_result)
def do_fence_disable(self, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED):
self.do_fence_en_or_dis_able(False, want_result=want_result)
def wait_circling_point_with_radius(self, loc, want_radius, epsilon=5.0, min_circle_time=5, timeout=120):
on_radius_start_heading = None
average_radius = 0.0
circle_time_start = 0
done_time = False
done_angle = False
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > timeout:
raise AutoTestTimeoutException("Did not get onto circle")
here = self.mav.location()
got_radius = self.get_distance(loc, here)
average_radius = 0.95*average_radius + 0.05*got_radius
on_radius = abs(got_radius - want_radius) < epsilon
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
heading = m.heading
on_string = "off"
got_angle = ""
if on_radius_start_heading is not None:
got_angle = "%0.2f" % abs(on_radius_start_heading - heading) # FIXME
on_string = "on"
want_angle = 180 # we don't actually get this (angle-substraction issue. But we get enough...
self.progress("wait-circling: got-r=%0.2f want-r=%f avg-r=%f %s want-a=%0.1f got-a=%s" %
(got_radius, want_radius, average_radius, on_string, want_angle, got_angle))
if on_radius:
if on_radius_start_heading is None:
on_radius_start_heading = heading
average_radius = got_radius
circle_time_start = self.get_sim_time()
continue
if abs(on_radius_start_heading - heading) > want_angle: # FIXME
done_angle = True
if self.get_sim_time() - circle_time_start > min_circle_time:
done_time = True
if done_time and done_angle:
return
continue
if on_radius_start_heading is not None:
average_radius = 0.0
on_radius_start_heading = None
circle_time_start = 0
def test_fence_static(self):
ex = None
try:
self.progress("Checking for bizarre healthy-when-not-present-or-enabled")
self.assert_fence_sys_status(False, False, True)
self.load_fence("CMAC-fence.txt")
m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2)
if m is not None:
raise NotAchievedException("Got FENCE_STATUS unexpectedly");
self.drain_mav_unparsed()
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_NONE) # report only
self.assert_fence_sys_status(False, False, True)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL) # report only
self.assert_fence_sys_status(True, False, True)
self.mavproxy.send('fence enable\n')
self.mavproxy.expect("fence enabled")
self.assert_fence_sys_status(True, True, True)
m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2)
if m is None:
raise NotAchievedException("Did not get FENCE_STATUS");
if m.breach_status:
raise NotAchievedException("Breached fence unexpectedly (%u)" %
(m.breach_status))
self.mavproxy.send('fence disable\n')
self.mavproxy.expect("fence disabled")
self.assert_fence_sys_status(True, False, True)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_NONE)
self.assert_fence_sys_status(False, False, True)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL)
self.assert_fence_sys_status(True, False, True)
self.mavproxy.send("fence clear\n")
self.mavproxy.expect("fence removed")
if self.get_parameter("FENCE_TOTAL") != 0:
raise NotAchievedException("Expected zero points remaining")
self.assert_fence_sys_status(False, False, True)
self.progress("Trying to enable fence with no points")
self.do_fence_enable(want_result=mavutil.mavlink.MAV_RESULT_FAILED)
# test a rather unfortunate behaviour:
self.progress("Killing a live fence with fence-clear")
self.load_fence("CMAC-fence.txt")
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL)
self.do_fence_enable()
self.assert_fence_sys_status(True, True, True)
self.mavproxy.send("fence clear\n")
self.mavproxy.expect("fence removed")
if self.get_parameter("FENCE_TOTAL") != 0:
raise NotAchievedException("Expected zero points remaining")
self.assert_fence_sys_status(False, False, True)
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.mavproxy.send('fence clear\n')
if ex is not None:
raise ex
def test_fence_breach_circle_at(self, loc, disable_on_breach=False):
ex = None
try:
self.load_fence("CMAC-fence.txt")
want_radius = 100
# when ArduPlane is fixed, remove this fudge factor
REALLY_BAD_FUDGE_FACTOR = 1.16
expected_radius = REALLY_BAD_FUDGE_FACTOR * want_radius
self.set_parameter("RTL_RADIUS", want_radius)
self.set_parameter("NAVL1_LIM_BANK", 60)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL)
self.do_fence_enable()
self.assert_fence_sys_status(True, True, True)
self.takeoff(alt=45, alt_max=300)
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 30:
raise NotAchievedException("Did not breach fence")
m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2)
if m is None:
raise NotAchievedException("Did not get FENCE_STATUS");
if m.breach_status == 0:
continue
# we've breached; check our state;
if m.breach_type != mavutil.mavlink.FENCE_BREACH_BOUNDARY:
raise NotAchievedException("Unexpected breach type %u" %
(m.breach_type,))
if m.breach_count == 0:
raise NotAchievedException("Unexpected breach count %u" %
(m.breach_count,))
self.assert_fence_sys_status(True, True, False)
break
if disable_on_breach:
self.do_fence_disable()
self.wait_circling_point_with_radius(loc, expected_radius)
self.disarm_vehicle(force=True)
self.reboot_sitl()
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.mavproxy.send('fence clear\n')
if ex is not None:
raise ex
def test_fence_rtl(self):
self.progress("Testing FENCE_ACTION_RTL no rally point")
# have to disable the fence once we've breached or we breach
# it as part of the loiter-at-home!
self.test_fence_breach_circle_at(self.home_position_as_mav_location(),
disable_on_breach=True)
def test_fence_rtl_rally(self):
ex = None
target_system = 1
target_component = 1
try:
self.progress("Testing FENCE_ACTION_RTL with rally point")
self.wait_ready_to_arm()
loc = self.home_position_as_mav_location()
self.location_offset_ne(loc, 50, -50)
self.set_parameter("RALLY_TOTAL", 1)
self.mav.mav.rally_point_send(target_system,
target_component,
0, # sequence number
1, # total count
int(loc.lat * 1e7),
int(loc.lng * 1e7),
15,
0, # "break" alt?!
0, # "land dir"
0) # flags
self.delay_sim_time(1)
self.mavproxy.send("rally list\n")
self.test_fence_breach_circle_at(loc)
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.mavproxy.send('rally clear\n')
if ex is not None:
raise ex
def test_parachute(self):
self.set_rc(9, 1000)
self.set_parameter("CHUTE_ENABLED", 1)
self.set_parameter("CHUTE_TYPE", 10)
self.set_parameter("SERVO9_FUNCTION", 27)
self.set_parameter("SIM_PARA_ENABLE", 1)
self.set_parameter("SIM_PARA_PIN", 9)
self.load_mission("plane-parachute-mission.txt")
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.mavproxy.expect("BANG")
self.disarm_vehicle(force=True)
self.reboot_sitl()
def test_parachute_sinkrate(self):
self.set_rc(9, 1000)
self.set_parameter("CHUTE_ENABLED", 1)
self.set_parameter("CHUTE_TYPE", 10)
self.set_parameter("SERVO9_FUNCTION", 27)
self.set_parameter("SIM_PARA_ENABLE", 1)
self.set_parameter("SIM_PARA_PIN", 9)
self.set_parameter("CHUTE_CRT_SINK", 9)
self.progress("Takeoff")
self.takeoff(alt=300)
self.progress("Diving")
self.set_rc(2, 2000)
self.mavproxy.expect("BANG")
self.disarm_vehicle(force=True)
self.reboot_sitl()
def run_subtest(self, desc, func):
self.start_subtest(desc)
func()
def test_main_flight(self):
self.change_mode('MANUAL')
self.progress("Asserting we don't support transfer of fence via mission item protocol")
self.assert_no_capability(mavutil.mavlink.MAV_PROTOCOL_CAPABILITY_MISSION_FENCE)
# grab home position:
self.mav.recv_match(type='HOME_POSITION', blocking=True)
self.homeloc = self.mav.location()
self.run_subtest("Takeoff", self.takeoff)
self.run_subtest("Set Attitude Target", self.set_attitude_target)
self.run_subtest("Fly left circuit", self.fly_left_circuit)
self.run_subtest("Left roll", lambda: self.axial_left_roll(1))
self.run_subtest("Inside loop", self.inside_loop)
self.run_subtest("Stablize test", self.test_stabilize)
self.run_subtest("ACRO test", self.test_acro)
self.run_subtest("FBWB test", self.test_FBWB)
self.run_subtest("CRUISE test", lambda: self.test_FBWB(mode='CRUISE'))
self.run_subtest("RTL test", self.fly_RTL)
self.run_subtest("LOITER test", self.fly_LOITER)
self.run_subtest("CIRCLE test", self.fly_CIRCLE)
self.run_subtest("Mission test",
lambda: self.fly_mission("ap1.txt"))
def airspeed_autocal(self):
self.progress("Ensure no AIRSPEED_AUTOCAL on ground")
self.set_parameter("ARSPD_AUTOCAL", 1)
m = self.mav.recv_match(type='AIRSPEED_AUTOCAL',
blocking=True,
timeout=5)
if m is not None:
raise NotAchievedException("Got autocal on ground")
mission_filepath = "flaps.txt"
num_wp = self.load_mission(mission_filepath)
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode("AUTO")
self.progress("Ensure AIRSPEED_AUTOCAL in air")
m = self.mav.recv_match(type='AIRSPEED_AUTOCAL',
blocking=True,
timeout=5)
self.wait_waypoint(7, num_wp-1, timeout=500)
self.wait_disarmed(timeout=120)
def deadreckoning_main(self, disable_airspeed_sensor=False):
self.gpi = None
self.simstate = None
self.last_print = 0
self.max_divergence = 0
def validate_global_position_int_against_simstate(mav, m):
if m.get_type() == 'GLOBAL_POSITION_INT':
self.gpi = m
elif m.get_type() == 'SIMSTATE':
self.simstate = m
if self.gpi is None:
return
if self.simstate is None:
return
divergence = self.get_distance_int(self.gpi, self.simstate)
max_allowed_divergence = 200
if time.time() - self.last_print > 1:
self.progress("position-estimate-divergence=%fm" % (divergence,))
self.last_print = time.time()
if divergence > max_allowed_divergence:
raise NotAchievedException("global-position-int diverged from simstate by >%fm" % (max_allowed_divergence,))
if divergence > self.max_divergence:
self.max_divergence = divergence
self.install_message_hook(validate_global_position_int_against_simstate)
try:
# wind is from the West:
self.set_parameter("SIM_WIND_DIR", 270)
# light winds:
self.set_parameter("SIM_WIND_SPD", 10)
if disable_airspeed_sensor:
self.set_parameter("ARSPD_USE", 0)
self.takeoff(50)
loc = self.mav.location()
loc.lat = -35.35690712
loc.lng = 149.17083386
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
mavutil.mavlink.MAV_DO_REPOSITION_FLAGS_CHANGE_MODE,
0,
0,
int(loc.lat*1e7),
int(loc.lng*1e7),
100, # alt
frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
)
self.wait_location(loc, accuracy=100)
self.progress("Stewing")
self.delay_sim_time(20)
self.set_parameter("SIM_GPS_DISABLE", 1)
self.progress("Roasting")
self.delay_sim_time(20)
self.change_mode("RTL")
self.wait_distance_to_home(100, 200, timeout=200)
self.set_parameter("SIM_GPS_DISABLE", 0)
self.delay_sim_time(10)
self.set_rc(3, 1000)
self.fly_home_land_and_disarm()
self.progress("max-divergence: %fm" % (self.max_divergence,))
finally:
self.remove_message_hook(validate_global_position_int_against_simstate)
def deadreckoning(self):
self.deadreckoning_main()
self.deadreckoning_main(disable_airspeed_sensor=True)
def sample_enable_parameter(self):
return "Q_ENABLE"
def test_rangefinder(self):
ex = None
self.context_push()
self.progress("Making sure we don't ordinarily get RANGEFINDER")
m = None
try:
m = self.mav.recv_match(type='RANGEFINDER',
blocking=True,
timeout=5)
except Exception as e:
self.progress("Caught exception: %s" %
self.get_exception_stacktrace(e))
if m is not None:
raise NotAchievedException("Received unexpected RANGEFINDER msg")
try:
self.set_analog_rangefinder_parameters()
self.reboot_sitl()
'''ensure rangefinder gives height-above-ground'''
self.load_mission("plane-gripper-mission.txt") # borrow this
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.wait_waypoint(5, 5, max_dist=100)
rf = self.mav.recv_match(type="RANGEFINDER", timeout=1, blocking=True)
if rf is None:
raise NotAchievedException("Did not receive rangefinder message")
gpi = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1)
if gpi is None:
raise NotAchievedException("Did not receive GLOBAL_POSITION_INT message")
if abs(rf.distance - gpi.relative_alt/1000.0) > 3:
raise NotAchievedException("rangefinder alt (%s) disagrees with global-position-int.relative_alt (%s)" % (rf.distance, gpi.relative_alt/1000.0))
self.mavproxy.expect("Auto disarmed")
self.progress("Ensure RFND messages in log")
if not self.current_onboard_log_contains_message("RFND"):
raise NotAchievedException("No RFND messages in log")
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def rc_defaults(self):
ret = super(AutoTestPlane, self).rc_defaults()
ret[3] = 1000
ret[8] = 1800
return ret
def default_mode(self):
return "MANUAL"
def test_pid_tuning(self):
self.change_mode("FBWA") # we don't update PIDs in MANUAL
super(AutoTestPlane, self).test_pid_tuning()
def test_setting_modes_via_auxswitches(self):
self.set_parameter("FLTMODE5", 1)
self.mavproxy.send('switch 1\n') # random mode
self.wait_heartbeat()
self.change_mode('MANUAL')
self.mavproxy.send('switch 5\n') # acro mode
self.wait_mode("CIRCLE")
self.set_rc(9, 1000)
self.set_rc(10, 1000)
self.set_parameter("RC9_OPTION", 4) # RTL
self.set_parameter("RC10_OPTION", 55) # guided
self.set_rc(9, 1900)
self.wait_mode("RTL")
self.set_rc(10, 1900)
self.wait_mode("GUIDED")
self.progress("resetting both switches - should go back to CIRCLE")
self.set_rc(9, 1000)
self.set_rc(10, 1000)
self.wait_mode("CIRCLE")
self.set_rc(9, 1900)
self.wait_mode("RTL")
self.set_rc(10, 1900)
self.wait_mode("GUIDED")
self.progress("Resetting switch should repoll mode switch")
self.set_rc(10, 1000) # this re-polls the mode switch
self.wait_mode("CIRCLE")
self.set_rc(9, 1000)
def wait_for_collision_threat_to_clear(self):
'''wait to get a "clear" collision message", then slurp remaining
messages'''
last_collision = self.get_sim_time()
while True:
now = self.get_sim_time()
if now - last_collision > 5:
return
self.progress("Waiting for collision message")
m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=1)
self.progress("Got (%s)" % str(m))
if m is None:
continue
last_collision = now
def test_adsb_send_threatening_adsb_message(self, here):
self.progress("Sending ABSD_VEHICLE message")
self.mav.mav.adsb_vehicle_send(37, # ICAO address
int(here.lat * 1e7),
int(here.lng * 1e7),
mavutil.mavlink.ADSB_ALTITUDE_TYPE_PRESSURE_QNH,
int(here.alt*1000 + 10000), # 10m up
0, # heading in cdeg
0, # horizontal velocity cm/s
0, # vertical velocity cm/s
"bob".encode("ascii"), # callsign
mavutil.mavlink.ADSB_EMITTER_TYPE_LIGHT,
1, # time since last communication
65535, # flags
17 # squawk
)
def test_adsb(self):
self.context_push()
ex = None
try:
# message ADSB_VEHICLE 37 -353632614 1491652305 0 584070 0 0 0 "bob" 3 1 255 17
self.set_parameter("RC12_OPTION", 38) # avoid-adsb
self.set_rc(12, 2000)
self.set_parameter("ADSB_ENABLE", 1)
self.set_parameter("AVD_ENABLE", 1)
self.set_parameter("AVD_F_ACTION", mavutil.mavlink.MAV_COLLISION_ACTION_RTL)
self.reboot_sitl()
self.wait_ready_to_arm()
here = self.mav.location()
self.change_mode("FBWA")
self.delay_sim_time(2) # TODO: work out why this is required...
self.test_adsb_send_threatening_adsb_message(here)
self.progress("Waiting for collision message")
m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=4)
if m is None:
raise NotAchievedException("Did not get collision message")
if m.threat_level != 2:
raise NotAchievedException("Expected some threat at least")
if m.action != mavutil.mavlink.MAV_COLLISION_ACTION_RTL:
raise NotAchievedException("Incorrect action; want=%u got=%u" %
(mavutil.mavlink.MAV_COLLISION_ACTION_RTL, m.action))
self.wait_mode("RTL")
self.progress("Sending far-away ABSD_VEHICLE message")
self.mav.mav.adsb_vehicle_send(37, # ICAO address
int(here.lat+1 * 1e7),
int(here.lng * 1e7),
mavutil.mavlink.ADSB_ALTITUDE_TYPE_PRESSURE_QNH,
int(here.alt*1000 + 10000), # 10m up
0, # heading in cdeg
0, # horizontal velocity cm/s
0, # vertical velocity cm/s
"bob".encode("ascii"), # callsign
mavutil.mavlink.ADSB_EMITTER_TYPE_LIGHT,
1, # time since last communication
65535, # flags
17 # squawk
)
self.wait_for_collision_threat_to_clear()
self.change_mode("FBWA")
self.progress("Disabling ADSB-avoidance with RC channel")
self.set_rc(12, 1000)
self.delay_sim_time(1) # let the switch get polled
self.test_adsb_send_threatening_adsb_message(here)
m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=4)
print("Got (%s)" % str(m))
if m is not None:
raise NotAchievedException("Got collision message when I shouldn't have")
except Exception as e:
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def fly_do_guided_request(self, target_system=1, target_component=1):
self.progress("Takeoff")
self.takeoff(alt=50)
self.set_rc(3, 1500)
self.start_subtest("Ensure command bounced outside guided mode")
desired_relative_alt = 33
loc = self.mav.location()
self.location_offset_ne(loc, 300, 300)
loc.alt += desired_relative_alt
self.mav.mav.mission_item_int_send(
target_system,
target_component,
0, # seq
mavutil.mavlink.MAV_FRAME_GLOBAL,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
2, # current - guided-mode request
0, # autocontinue
0, # p1
0, # p2
0, # p3
0, # p4
int(loc.lat *1e7), # latitude
int(loc.lng *1e7), # longitude
loc.alt, # altitude
mavutil.mavlink.MAV_MISSION_TYPE_MISSION)
m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get MISSION_ACK")
if m.type != mavutil.mavlink.MAV_MISSION_ERROR:
raise NotAchievedException("Did not get appropriate error")
self.start_subtest("Enter guided and flying somewhere constant")
self.change_mode("GUIDED")
self.mav.mav.mission_item_int_send(
target_system,
target_component,
0, # seq
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
2, # current - guided-mode request
0, # autocontinue
0, # p1
0, # p2
0, # p3
0, # p4
int(loc.lat *1e7), # latitude
int(loc.lng *1e7), # longitude
desired_relative_alt, # altitude
mavutil.mavlink.MAV_MISSION_TYPE_MISSION)
m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get MISSION_ACK")
if m.type != mavutil.mavlink.MAV_MISSION_ACCEPTED:
raise NotAchievedException("Did not get accepted response")
self.wait_location(loc, accuracy=100) # based on loiter radius
self.delay_sim_time(20)
self.wait_altitude(altitude_min=desired_relative_alt-3,
altitude_max=desired_relative_alt+3,
relative=True)
self.fly_home_land_and_disarm()
def LOITER(self):
self.takeoff(alt=200)
self.set_rc(3, 1500)
self.change_mode("LOITER")
self.progress("Doing a bit of loitering to start with")
tstart = self.get_sim_time()
while True:
now = self.get_sim_time_cached()
if now - tstart > 60:
break
m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get VFR_HUD")
new_throttle = m.throttle
alt = m.alt
m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get ATTITUDE")
pitch = math.degrees(m.pitch)
self.progress("Pitch:%f throttle:%u alt:%f" % (pitch, new_throttle, alt))
m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get VFR_HUD")
initial_throttle = m.throttle
initial_alt = m.alt
self.progress("Initial throttle: %u" % initial_throttle)
# pitch down, ensure throttle decreases:
rc2_max = self.get_parameter("RC2_MAX")
self.set_rc(2, rc2_max)
tstart = self.get_sim_time()
while True:
now = self.get_sim_time_cached()
'''stick-mixing is pushing the aircraft down. It doesn't want to go
down (the target loiter altitude hasn't changed), so it
tries to add energy by increasing the throttle.
'''
if now - tstart > 60:
raise NotAchievedException("Did not see increase in throttle")
m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get VFR_HUD")
new_throttle = m.throttle
alt = m.alt
m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get ATTITUDE")
pitch = math.degrees(m.pitch)
self.progress("Pitch:%f throttle:%u alt:%f" % (pitch, new_throttle, alt))
if new_throttle - initial_throttle > 20:
self.progress("Throttle delta achieved")
break
self.progress("Centering elevator and ensuring we get back to loiter altitude")
self.set_rc(2, 1500)
self.wait_altitude(initial_alt-1, initial_alt+1)
self.fly_home_land_and_disarm()
def CPUFailsafe(self):
'''In lockup Plane should copy RC inputs to RC outputs'''
self.plane_CPUFailsafe()
def test_large_missions(self):
self.load_mission("Kingaroy-vlarge.txt")
self.load_mission("Kingaroy-vlarge2.txt")
def fly_soaring(self):
model="plane-soaring"
self.customise_SITL_commandline([],
model=model,
defaults_filepath=self.model_defaults_filepath("ArduPlane",model),
wipe=True)
self.load_mission('CMAC-soar.txt')
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
# Enable thermalling RC
rc_chan = 0
for i in range(8):
rcx_option = self.get_parameter('RC{0}_OPTION'.format(i+1))
if rcx_option==88:
rc_chan = i+1;
break
if rc_chan==0:
raise NotAchievedException("Did not find soaring enable channel option.")
self.send_set_rc(rc_chan, 1900)
# Use trim airspeed.
self.send_set_rc(3, 1500)
# Wait to detect thermal
self.progress("Waiting for thermal")
self.wait_mode('THERMAL',timeout=600)
# Wait to climb to SOAR_ALT_MAX
self.progress("Waiting for climb to max altitude")
alt_max = self.get_parameter('SOAR_ALT_MAX')
self.wait_altitude(alt_max-10, alt_max, timeout=600, relative=True)
# Wait for AUTO
self.progress("Waiting for AUTO mode")
self.wait_mode('AUTO')
# Disable thermals
self.set_parameter("SIM_THML_SCENARI", 0)
# Wait to descend to SOAR_ALT_MIN
self.progress("Waiting for glide to min altitude")
alt_min = self.get_parameter('SOAR_ALT_MIN')
self.wait_altitude(alt_min-10, alt_min, timeout=600, relative=True)
self.progress("Waiting for throttle up")
self.wait_servo_channel_value(3, 1200, timeout=2, comparator=operator.gt)
self.progress("Waiting for climb to cutoff altitude")
alt_ctf = self.get_parameter('SOAR_ALT_CUTOFF')
self.wait_altitude(alt_ctf-10, alt_ctf, timeout=600, relative=True)
# Allow time to suppress throttle and start descent.
self.delay_sim_time(20)
# Now set FBWB mode
self.change_mode('FBWB')
self.delay_sim_time(5)
# Now disable soaring (should hold altitude)
self.set_parameter("SOAR_ENABLE", 0)
self.delay_sim_time(10)
#And reenable. This should force throttle-down
self.set_parameter("SOAR_ENABLE", 1)
self.delay_sim_time(10)
# Now wait for descent and check throttle up
self.wait_altitude(alt_min-10, alt_min, timeout=600, relative=True)
self.progress("Waiting for climb")
self.wait_altitude(alt_ctf-10, alt_ctf, timeout=600, relative=True)
# Back to auto
self.change_mode('AUTO')
# Reenable thermals
self.set_parameter("SIM_THML_SCENARI", 1)
# Disable soaring using RC channel.
self.send_set_rc(rc_chan, 1100)
# Wait to get back to waypoint before thermal.
self.progress("Waiting to get back to position")
self.wait_current_waypoint(3,timeout=1200)
# Enable soaring with mode changes suppressed)
self.send_set_rc(rc_chan, 1500)
# Make sure this causes throttle down.
self.wait_servo_channel_value(3, 1200, timeout=2, comparator=operator.lt)
self.progress("Waiting for next WP with no thermalling")
self.wait_waypoint(4,4,timeout=1200,max_dist=120)
# Disarm
self.disarm_vehicle()
self.progress("Mission OK")
def fly_terrain_mission(self):
self.customise_SITL_commandline([], wipe=True)
self.mavproxy.send("wp set 1\n")
self.wait_ready_to_arm()
self.arm_vehicle()
self.fly_mission("ap-terrain.txt", mission_timeout=600)
def ekf_lane_switch(self):
self.context_push()
ex = None
# new lane swtich available only with EK3
self.set_parameter("EK3_ENABLE", 1)
self.set_parameter("EK2_ENABLE", 0)
self.set_parameter("AHRS_EKF_TYPE", 3)
self.set_parameter("EK3_AFFINITY", 15) # enable affinity for all sensors
self.set_parameter("EK3_IMU_MASK", 3) # use only 2 IMUs
self.set_parameter("GPS_TYPE2", 1)
self.set_parameter("SIM_GPS2_DISABLE", 0)
self.set_parameter("SIM_BARO2_DISABL", 0)
self.set_parameter("SIM_BARO_COUNT", 2)
self.set_parameter("ARSPD2_TYPE", 2)
self.set_parameter("ARSPD2_USE", 1)
self.set_parameter("ARSPD2_PIN", 2)
# some parameters need reboot to take effect
self.reboot_sitl()
self.lane_switches = []
# add an EKF lane switch hook
def statustext_hook(mav, message):
if message.get_type() != 'STATUSTEXT':
return
# example msg: EKF3 lane switch 1
if not message.text.startswith("EKF3 lane switch "):
return
newlane = int(message.text[-1])
self.lane_switches.append(newlane)
self.install_message_hook(statustext_hook)
# get flying
self.takeoff(alt=50)
self.change_mode('CIRCLE')
try:
#####################################################################################################################################################
self.progress("Checking EKF3 Lane Switching trigger from all sensors")
#####################################################################################################################################################
self.start_subtest("ACCELEROMETER: Change z-axis offset")
# create an accelerometer error by changing the Z-axis offset
self.context_collect("STATUSTEXT")
old_parameter = self.get_parameter("INS_ACCOFFS_Z")
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("INS_ACCOFFS_Z", old_parameter + 5), check_context=True)
if self.lane_switches != [1]:
raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_parameter("INS_ACCOFFS_Z", old_parameter)
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("BAROMETER: Freeze to last measured value")
self.context_collect("STATUSTEXT")
# create a barometer error by inhibiting any pressure change while changing altitude
old_parameter = self.get_parameter("SIM_BARO2_FREEZE")
self.set_parameter("SIM_BARO2_FREEZE", 1)
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=lambda: self.set_rc(2, 2000), check_context=True)
if self.lane_switches != [1, 0]:
raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_rc(2, 1500)
self.set_parameter("SIM_BARO2_FREEZE", old_parameter)
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("GPS: Apply GPS Velocity Error in NED")
self.context_push()
self.context_collect("STATUSTEXT")
# create a GPS velocity error by adding a random 2m/s noise on each axis
def sim_gps_verr():
self.set_parameter("SIM_GPS_VERR_X", self.get_parameter("SIM_GPS_VERR_X") + 2)
self.set_parameter("SIM_GPS_VERR_Y", self.get_parameter("SIM_GPS_VERR_Y") + 2)
self.set_parameter("SIM_GPS_VERR_Z", self.get_parameter("SIM_GPS_VERR_Z") + 2)
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=sim_gps_verr(), check_context=True)
if self.lane_switches != [1, 0, 1]:
raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.context_pop()
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("MAGNETOMETER: Change X-Axis Offset")
self.context_collect("STATUSTEXT")
# create a magnetometer error by changing the X-axis offset
old_parameter = self.get_parameter("SIM_MAG2_OFS_X")
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("SIM_MAG2_OFS_X", old_parameter + 150), check_context=True)
if self.lane_switches != [1, 0, 1, 0]:
raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_parameter("SIM_MAG2_OFS_X", old_parameter)
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("AIRSPEED: Fail to constant value")
self.context_push()
self.context_collect("STATUSTEXT")
# create an airspeed sensor error by freezing to the current airspeed then changing the groundspeed
old_parameter = self.get_parameter("SIM_ARSPD_FAIL")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.set_parameter("SIM_ARSPD_FAIL", m.airspeed)
def change_speed():
self.change_mode("GUIDED")
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
0,
0,
0,
12345, # lat*1e7
12345, # lon*1e7
50 # alt
)
self.delay_sim_time(5)
new_target_groundspeed = m.groundspeed + 5
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
1, # groundspeed
new_target_groundspeed,
-1, # throttle / no change
0, # absolute values
0,
0,
0
)
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=change_speed(), check_context=True)
if self.lane_switches != [1, 0, 1, 0, 1]:
raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.change_mode('CIRCLE')
self.context_pop()
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.progress("GYROSCOPE: Change Y-Axis Offset")
self.context_collect("STATUSTEXT")
# create a gyroscope error by changing the Y-axis offset
old_parameter = self.get_parameter("INS_GYR2OFFS_Y")
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("INS_GYR2OFFS_Y", old_parameter + 1), check_context=True)
if self.lane_switches != [1, 0, 1, 0, 1, 0]:
raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_parameter("INS_GYR2OFFS_Y", old_parameter)
self.context_clear_collection("STATUSTEXT")
#####################################################################################################################################################
self.disarm_vehicle()
except Exception as e:
self.progress("Caught exception: %s" % self.get_exception_stacktrace(e))
ex = e
self.remove_message_hook(statustext_hook)
self.context_pop()
if ex is not None:
raise ex
def tests(self):
'''return list of all tests'''
ret = super(AutoTestPlane, self).tests()
ret.extend([
("AuxModeSwitch",
"Set modes via auxswitches",
self.test_setting_modes_via_auxswitches),
("TestRCCamera",
"Test RC Option - Camera Trigger",
self.test_rc_option_camera_trigger),
("TestRCRelay", "Test Relay RC Channel Option", self.test_rc_relay),
("ThrottleFailsafe",
"Fly throttle failsafe",
self.test_throttle_failsafe),
("ThrottleFailsafeFence",
"Fly fence survives throttle failsafe",
self.test_throttle_failsafe_fence),
("TestFlaps", "Flaps", self.fly_flaps),
("DO_CHANGE_SPEED", "Test mavlink DO_CHANGE_SPEED command", self.fly_do_change_speed),
("DO_REPOSITION",
"Test mavlink DO_REPOSITION command",
self.fly_do_reposition),
("GuidedRequest",
"Test handling of MISSION_ITEM in guided mode",
self.fly_do_guided_request),
("MainFlight",
"Lots of things in one flight",
self.test_main_flight),
("TestGripperMission",
"Test Gripper mission items",
self.test_gripper_mission),
("Parachute", "Test Parachute", self.test_parachute),
("ParachuteSinkRate", "Test Parachute (SinkRate triggering)", self.test_parachute_sinkrate),
("AIRSPEED_AUTOCAL", "Test AIRSPEED_AUTOCAL", self.airspeed_autocal),
("RangeFinder",
"Test RangeFinder Basic Functionality",
self.test_rangefinder),
("FenceStatic",
"Test Basic Fence Functionality",
self.test_fence_static),
("FenceRTL",
"Test Fence RTL",
self.test_fence_rtl),
("FenceRTLRally",
"Test Fence RTL Rally",
self.test_fence_rtl_rally),
("ADSB",
"Test ADSB",
self.test_adsb),
("Button",
"Test Buttons",
self.test_button),
("FRSkySPort",
"Test FrSky SPort mode",
self.test_frsky_sport),
("FRSkyPassThrough",
"Test FrSky PassThrough serial output",
self.test_frsky_passthrough),
("FRSkyD",
"Test FrSkyD serial output",
self.test_frsky_d),
("LTM",
"Test LTM serial output",
self.test_ltm),
("AdvancedFailsafe",
"Test Advanced Failsafe",
self.test_advanced_failsafe),
("LOITER",
"Test Loiter mode",
self.LOITER),
("DeepStall",
"Test DeepStall Landing",
self.fly_deepstall),
("LargeMissions",
"Test Manipulation of Large missions",
self.test_large_missions),
("Soaring",
"Test Soaring feature",
self.fly_soaring),
("Terrain",
"Test terrain following in mission",
self.fly_terrain_mission),
("Deadreckoning",
"Test deadreckoning support",
self.deadreckoning),
("EKFlaneswitch",
"Test EKF3 Affinity and Lane Switching",
self.ekf_lane_switch),
("LogUpload",
"Log upload",
self.log_upload),
])
return ret
def disabled_tests(self):
return {
"Button": "See https://github.com/ArduPilot/ardupilot/issues/15259",
}
| gpl-3.0 |
JelleZijlstra/cython | Cython/Compiler/Errors.py | 17 | 7277 | #
# Errors
#
from __future__ import absolute_import
try:
from __builtin__ import basestring as any_string_type
except ImportError:
any_string_type = (bytes, str)
import sys
from ..Utils import open_new_file
from . import DebugFlags
from . import Options
class PyrexError(Exception):
pass
class PyrexWarning(Exception):
pass
def context(position):
source = position[0]
assert not (isinstance(source, any_string_type)), (
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
try:
F = source.get_lines()
except UnicodeDecodeError:
# file has an encoding problem
s = u"[unprintable code]\n"
else:
s = u''.join(F[max(0, position[1]-6):position[1]])
s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1))
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
return s
def format_position(position):
if position:
return u"%s:%d:%d: " % (position[0].get_error_description(),
position[1], position[2])
return u''
def format_error(message, position):
if position:
pos_str = format_position(position)
cont = context(position)
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
return message
class CompileError(PyrexError):
def __init__(self, position = None, message = u""):
self.position = position
self.message_only = message
self.formatted_message = format_error(message, position)
self.reported = False
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, self.formatted_message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
self.args = (position, message)
def __str__(self):
return self.formatted_message
class CompileWarning(PyrexWarning):
def __init__(self, position = None, message = ""):
self.position = position
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, format_position(position) + message)
class InternalError(Exception):
# If this is ever raised, there is a bug in the compiler.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Internal compiler error: %s"
% message)
class AbortError(Exception):
# Throw this to stop the compilation immediately.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Abort error: %s" % message)
class CompilerCrash(CompileError):
# raised when an unexpected exception occurs in a transform
def __init__(self, pos, context, message, cause, stacktrace=None):
if message:
message = u'\n' + message
else:
message = u'\n'
self.message_only = message
if context:
message = u"Compiler crash in %s%s" % (context, message)
if stacktrace:
import traceback
message += (
u'\n\nCompiler crash traceback from this point on:\n' +
u''.join(traceback.format_tb(stacktrace)))
if cause:
if not stacktrace:
message += u'\n'
message += u'%s: %s' % (cause.__class__.__name__, cause)
CompileError.__init__(self, pos, message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
self.args = (pos, context, message, cause, stacktrace)
class NoElementTreeInstalledException(PyrexError):
"""raised when the user enabled options.gdb_debug but no ElementTree
implementation was found
"""
listing_file = None
num_errors = 0
echo_file = None
def open_listing_file(path, echo_to_stderr = 1):
# Begin a new error listing. If path is None, no file
# is opened, the error counter is just reset.
global listing_file, num_errors, echo_file
if path is not None:
listing_file = open_new_file(path)
else:
listing_file = None
if echo_to_stderr:
echo_file = sys.stderr
else:
echo_file = None
num_errors = 0
def close_listing_file():
global listing_file
if listing_file:
listing_file.close()
listing_file = None
def report_error(err):
if error_stack:
error_stack[-1].append(err)
else:
global num_errors
# See Main.py for why dual reporting occurs. Quick fix for now.
if err.reported: return
err.reported = True
try: line = u"%s\n" % err
except UnicodeEncodeError:
# Python <= 2.5 does this for non-ASCII Unicode exceptions
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
getattr(err, 'position', None)) + u'\n'
if listing_file:
try: listing_file.write(line)
except UnicodeEncodeError:
listing_file.write(line.encode('ASCII', 'replace'))
if echo_file:
try: echo_file.write(line)
except UnicodeEncodeError:
echo_file.write(line.encode('ASCII', 'replace'))
num_errors += 1
if Options.fast_fail:
raise AbortError("fatal errors")
def error(position, message):
#print("Errors.error:", repr(position), repr(message)) ###
if position is None:
raise InternalError(message)
err = CompileError(position, message)
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
report_error(err)
return err
LEVEL = 1 # warn about all errors level 1 or higher
def message(position, message, level=1):
if level < LEVEL:
return
warn = CompileWarning(position, message)
line = "note: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
def warning(position, message, level=0):
if level < LEVEL:
return
if Options.warning_errors and position:
return error(position, message)
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
_warn_once_seen = {}
def warn_once(position, message, level=0):
if level < LEVEL or message in _warn_once_seen:
return
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
_warn_once_seen[message] = True
return warn
# These functions can be used to momentarily suppress errors.
error_stack = []
def hold_errors():
error_stack.append([])
def release_errors(ignore=False):
held_errors = error_stack.pop()
if not ignore:
for err in held_errors:
report_error(err)
def held_errors():
return error_stack[-1]
# this module needs a redesign to support parallel cythonisation, but
# for now, the following works at least in sequential compiler runs
def reset():
_warn_once_seen.clear()
del error_stack[:]
| apache-2.0 |
akinaru/ffmpeg-image-sequencer | ffmpeg-appender-test.py | 1 | 3157 | #!/usr/bin/python
#####################################################################################
#####################################################################################
#
# title : ffmpeg-appender-test.py
# authors : Bertrand Martel
# copyrights : Copyright (c) 2015 Bertrand Martel
# license : The MIT License (MIT)
# date : 16/08/2015
# description : create video if not exist and append a series of image to this video taken from WEB
# usage : python ffmpeg-appender-test.py
#
#####################################################################################
#####################################################################################
import sys, getopt, os, subprocess
def main(argv):
output_file_name = "video_space"
temporary_file_name = "temp_space"
temporary_file_name_video = "temp_video"
picture_array = [ "https://upload.wikimedia.org/wikipedia/commons/4/4e/Anttlers101.jpg", \
"https://upload.wikimedia.org/wikipedia/commons/3/3b/NASA-SpiralGalaxyM101-20140505.jpg", \
"https://upload.wikimedia.org/wikipedia/commons/b/b0/Supernova_in_M101_2011-08-25.jpg", \
"http://1.1.1.5/bmi/images.nationalgeographic.com/wpf/media-live/photos/000/061/cache/earth-full-view_6125_990x742.jpg" ]
this_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(this_dir)
output_file_path = ''.join([this_dir , "/",output_file_name,".avi"])
temporary_file_path_avi = ''.join([this_dir,"/",temporary_file_name,".avi"])
temporary_file_name_jpg = ''.join([this_dir,"/",temporary_file_name,".jpg"])
temporary_file_name_video = ''.join([this_dir,"/",temporary_file_name_video,".avi"])
#remove files
try:
os.remove(output_file_path)
except OSError:
pass
try:
os.remove(temporary_file_path_avi)
except OSError:
pass
try:
os.remove(temporary_file_name_jpg)
except OSError:
pass
try:
os.remove(temporary_file_name_video)
except OSError:
pass
for picture in picture_array:
subprocess.call(["wget", picture, "-O", temporary_file_name_jpg])
subprocess.call(["ffmpeg -nostdin -v verbose -f image2 -pattern_type sequence -start_number 0 -r 1 -i " + temporary_file_name_jpg + " -s 1920x1080 " + temporary_file_path_avi],shell=True)
try:
os.remove(temporary_file_name_jpg)
except OSError:
pass
if os.path.exists(output_file_path):
# concat this video and former video
subprocess.call(['cd ' + this_dir + ' | ffmpeg -nostdin -v verbose -i "concat:' + output_file_name + '.avi|' + temporary_file_name + '.avi" -c copy ' + temporary_file_name_video],shell=True)
try:
os.remove(temporary_file_path_avi)
except OSError:
pass
try:
os.remove(output_file_path)
except OSError:
pass
os.rename(temporary_file_name_video, output_file_path)
else:
os.rename(temporary_file_path_avi, output_file_path)
if __name__ == "__main__":
main(sys.argv[1:])
__author__ = "Bertrand Martel"
__copyright__ = "Copyright 2015, Bertrand Martel"
__credits__ = ["Bertrand Martel"]
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Bertrand Martel"
__email__ = "bmartel.fr@gmail.com"
__status__ = "POC" | mit |
csangani/ReproducingSprout | extract_traces.py | 1 | 1317 | ## Create a network trace from the saturator output
import glob
import os
import sys
INPUT_PATH = 'raw_traces'
OUTPUT_PATH = 'cleaned_traces'
def extract_trace(filePath, targetFilePath):
with open(filePath) as f:
with open(targetFilePath, 'w+') as wf:
firstLine = True
for line in f:
value = long(line.lstrip('recv_time=').rstrip(',\n'))
if firstLine:
base = value
firstLine = False
value = (value - base) / 1000000
wf.write('%s\n' % value)
if __name__ == '__main__':
if len(sys.argv) >= 2:
source = sys.argv[1]
else:
source = INPUT_PATH
if len(sys.argv) >= 3:
destination = sys.argv[2]
else:
destination = OUTPUT_PATH
if not os.path.exists(destination):
os.makedirs(destination)
networks = glob.glob('%s/*' % source)
for network in networks:
if not os.path.exists(network.replace(source, destination)):
os.makedirs(network.replace(source, destination))
files = glob.glob('%s/*.rx' % network)
for file in files:
extract_trace(file, file.replace(source, destination).replace('.rx', '.pps'))
| mit |
sjlehtin/django | tests/queries/test_q.py | 25 | 3262 | from django.db.models import F, Q
from django.test import SimpleTestCase
class QTests(SimpleTestCase):
def test_combine_and_empty(self):
q = Q(x=1)
self.assertEqual(q & Q(), q)
self.assertEqual(Q() & q, q)
def test_combine_and_both_empty(self):
self.assertEqual(Q() & Q(), Q())
def test_combine_or_empty(self):
q = Q(x=1)
self.assertEqual(q | Q(), q)
self.assertEqual(Q() | q, q)
def test_combine_or_both_empty(self):
self.assertEqual(Q() | Q(), Q())
def test_deconstruct(self):
q = Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.query_utils.Q')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'price__gt': F('discounted_price')})
def test_deconstruct_negated(self):
q = ~Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.query_utils.Q')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'price__gt': F('discounted_price'),
'_negated': True,
})
def test_deconstruct_or(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 | q2
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.query_utils.Q')
self.assertEqual(args, (
('price__gt', F('discounted_price')),
('price', F('discounted_price')),
))
self.assertEqual(kwargs, {'_connector': 'OR'})
def test_deconstruct_and(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 & q2
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.query_utils.Q')
self.assertEqual(args, (
('price__gt', F('discounted_price')),
('price', F('discounted_price')),
))
self.assertEqual(kwargs, {'_connector': 'AND'})
def test_deconstruct_nested(self):
q = Q(Q(price__gt=F('discounted_price')))
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.query_utils.Q')
self.assertEqual(args, (Q(price__gt=F('discounted_price')),))
self.assertEqual(kwargs, {'_connector': 'AND'})
def test_reconstruct(self):
q = Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_negated(self):
q = ~Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_or(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 | q2
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_and(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 & q2
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
| bsd-3-clause |
meshulam/sly | deps/shapely/geos.py | 1 | 25191 | """
Proxies for the libgeos_c shared lib, GEOS-specific exceptions, and utilities
"""
import os
import re
import sys
import atexit
import logging
import threading
from ctypes import CDLL, cdll, pointer, c_void_p, c_size_t, c_char_p, string_at
from ctypes.util import find_library
from . import ftools
from .ctypes_declarations import prototype, EXCEPTION_HANDLER_FUNCTYPE
# Add message handler to this module's logger
LOG = logging.getLogger(__name__)
if 'all' in sys.warnoptions:
# show GEOS messages in console with: python -W all
logging.basicConfig()
else:
# no handler messages shown
class NullHandler(logging.Handler):
def emit(self, record):
pass
LOG.addHandler(NullHandler())
# Find and load the GEOS and C libraries
# If this ever gets any longer, we'll break it into separate modules
def load_dll(libname, fallbacks=None):
lib = find_library(libname)
if lib is not None:
try:
return CDLL(lib)
except OSError:
pass
if fallbacks is not None:
for name in fallbacks:
try:
return CDLL(name)
except OSError:
# move on to the next fallback
pass
# No shared library was loaded. Raise OSError.
raise OSError(
"Could not find library %s or load any of its variants %s" % (
libname, fallbacks or []))
if sys.platform.startswith('linux'):
_lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
free = load_dll('c').free
free.argtypes = [c_void_p]
free.restype = None
elif sys.platform == 'darwin':
if hasattr(sys, 'frozen'):
# .app file from py2app
alt_paths = [os.path.join(os.environ['RESOURCEPATH'],
'..', 'Frameworks', 'libgeos_c.dylib')]
else:
alt_paths = [
# The Framework build from Kyng Chaos:
"/Library/Frameworks/GEOS.framework/Versions/Current/GEOS",
# macports
'/opt/local/lib/libgeos_c.dylib',
]
_lgeos = load_dll('geos_c', fallbacks=alt_paths)
free = load_dll('c').free
free.argtypes = [c_void_p]
free.restype = None
elif sys.platform == 'win32':
try:
egg_dlls = os.path.abspath(os.path.join(os.path.dirname(__file__),
"DLLs"))
wininst_dlls = os.path.abspath(os.__file__ + "../../../DLLs")
original_path = os.environ['PATH']
os.environ['PATH'] = "%s;%s;%s" % \
(egg_dlls, wininst_dlls, original_path)
_lgeos = CDLL("geos.dll")
except (ImportError, WindowsError, OSError):
raise
def free(m):
try:
cdll.msvcrt.free(m)
except WindowsError:
# XXX: See http://trac.gispython.org/projects/PCL/ticket/149
pass
elif sys.platform == 'sunos5':
_lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
free = CDLL('libc.so.1').free
free.argtypes = [c_void_p]
free.restype = None
else: # other *nix systems
_lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
free = load_dll('c', fallbacks=['libc.so.6']).free
free.argtypes = [c_void_p]
free.restype = None
def _geos_version():
# extern const char GEOS_DLL *GEOSversion();
GEOSversion = _lgeos.GEOSversion
GEOSversion.restype = c_char_p
GEOSversion.argtypes = []
#define GEOS_CAPI_VERSION "@VERSION@-CAPI-@CAPI_VERSION@"
geos_version_string = GEOSversion()
if sys.version_info[0] >= 3:
geos_version_string = geos_version_string.decode('ascii')
res = re.findall(r'(\d+)\.(\d+)\.(\d+)', geos_version_string)
assert len(res) == 2, res
geos_version = tuple(int(x) for x in res[0])
capi_version = tuple(int(x) for x in res[1])
return geos_version_string, geos_version, capi_version
geos_version_string, geos_version, geos_capi_version = _geos_version()
# If we have the new interface, then record a baseline so that we know what
# additional functions are declared in ctypes_declarations.
if geos_version >= (3, 1, 0):
start_set = set(_lgeos.__dict__)
# Apply prototypes for the libgeos_c functions
prototype(_lgeos, geos_version)
# If we have the new interface, automatically detect all function
# declarations, and declare their re-entrant counterpart.
if geos_version >= (3, 1, 0):
end_set = set(_lgeos.__dict__)
new_func_names = end_set - start_set
for func_name in new_func_names:
new_func_name = "%s_r" % func_name
if hasattr(_lgeos, new_func_name):
new_func = getattr(_lgeos, new_func_name)
old_func = getattr(_lgeos, func_name)
new_func.restype = old_func.restype
if old_func.argtypes is None:
# Handle functions that didn't take an argument before,
# finishGEOS.
new_func.argtypes = [c_void_p]
else:
new_func.argtypes = [c_void_p] + old_func.argtypes
if old_func.errcheck is not None:
new_func.errcheck = old_func.errcheck
# Handle special case.
_lgeos.initGEOS_r.restype = c_void_p
_lgeos.initGEOS_r.argtypes = \
[EXCEPTION_HANDLER_FUNCTYPE, EXCEPTION_HANDLER_FUNCTYPE]
_lgeos.finishGEOS_r.argtypes = [c_void_p]
# Exceptions
class ReadingError(Exception):
pass
class DimensionError(Exception):
pass
class TopologicalError(Exception):
pass
class PredicateError(Exception):
pass
def error_handler(fmt, *args):
if sys.version_info[0] >= 3:
fmt = fmt.decode('ascii')
args = [arg.decode('ascii') for arg in args]
LOG.error(fmt, *args)
def notice_handler(fmt, args):
if sys.version_info[0] >= 3:
fmt = fmt.decode('ascii')
args = args.decode('ascii')
LOG.warning(fmt, args)
error_h = EXCEPTION_HANDLER_FUNCTYPE(error_handler)
notice_h = EXCEPTION_HANDLER_FUNCTYPE(notice_handler)
class WKTReader(object):
_lgeos = None
_reader = None
def __init__(self, lgeos):
"""Create WKT Reader"""
self._lgeos = lgeos
self._reader = self._lgeos.GEOSWKTReader_create()
def __del__(self):
"""Destroy WKT Reader"""
if self._lgeos is not None:
self._lgeos.GEOSWKTReader_destroy(self._reader)
self._reader = None
self._lgeos = None
def read(self, text):
"""Returns geometry from WKT"""
if sys.version_info[0] >= 3:
text = text.encode('ascii')
geom = self._lgeos.GEOSWKTReader_read(self._reader, c_char_p(text))
if not geom:
raise ReadingError("Could not create geometry because of errors "
"while reading input.")
# avoid circular import dependency
from shapely.geometry.base import geom_factory
return geom_factory(geom)
class WKTWriter(object):
_lgeos = None
_writer = None
# Establish default output settings
defaults = {}
if geos_version >= (3, 3, 0):
defaults['trim'] = True
defaults['output_dimension'] = 3
# GEOS' defaults for methods without "get"
_trim = False
_rounding_precision = -1
_old_3d = False
@property
def trim(self):
"""Trimming of unnecessary decimals (default: True)"""
return getattr(self, '_trim')
@trim.setter
def trim(self, value):
self._trim = bool(value)
self._lgeos.GEOSWKTWriter_setTrim(self._writer, self._trim)
@property
def rounding_precision(self):
"""Rounding precision when writing the WKT.
A precision of -1 (default) disables it."""
return getattr(self, '_rounding_precision')
@rounding_precision.setter
def rounding_precision(self, value):
self._rounding_precision = int(value)
self._lgeos.GEOSWKTWriter_setRoundingPrecision(
self._writer, self._rounding_precision)
@property
def output_dimension(self):
"""Output dimension, either 2 or 3 (default)"""
return self._lgeos.GEOSWKTWriter_getOutputDimension(
self._writer)
@output_dimension.setter
def output_dimension(self, value):
self._lgeos.GEOSWKTWriter_setOutputDimension(
self._writer, int(value))
@property
def old_3d(self):
"""Show older style for 3D WKT, without 'Z' (default: False)"""
return getattr(self, '_old_3d')
@old_3d.setter
def old_3d(self, value):
self._old_3d = bool(value)
self._lgeos.GEOSWKTWriter_setOld3D(self._writer, self._old_3d)
def __init__(self, lgeos, **settings):
"""Create WKT Writer
Note: writer defaults are set differently for GEOS 3.3.0 and up.
For example, with 'POINT Z (1 2 3)':
newer: POINT Z (1 2 3)
older: POINT (1.0000000000000000 2.0000000000000000)
The older formatting can be achieved for GEOS 3.3.0 and up by setting
the properties:
trim = False
output_dimension = 2
"""
self._lgeos = lgeos
self._writer = self._lgeos.GEOSWKTWriter_create()
applied_settings = self.defaults.copy()
applied_settings.update(settings)
for name in applied_settings:
setattr(self, name, applied_settings[name])
def __setattr__(self, name, value):
"""Limit setting attributes"""
if hasattr(self, name):
object.__setattr__(self, name, value)
else:
raise AttributeError('%r object has no attribute %r' %
(self.__class__.__name__, name))
def __del__(self):
"""Destroy WKT Writer"""
if self._lgeos is not None:
self._lgeos.GEOSWKTWriter_destroy(self._writer)
self._writer = None
self._lgeos = None
def write(self, geom):
"""Returns WKT string for geometry"""
if geom is None or geom._geom is None:
raise ValueError("Null geometry supports no operations")
result = self._lgeos.GEOSWKTWriter_write(self._writer, geom._geom)
text = string_at(result)
lgeos.GEOSFree(result)
if sys.version_info[0] >= 3:
return text.decode('ascii')
else:
return text
class WKBReader(object):
_lgeos = None
_reader = None
def __init__(self, lgeos):
"""Create WKB Reader"""
self._lgeos = lgeos
self._reader = self._lgeos.GEOSWKBReader_create()
def __del__(self):
"""Destroy WKB Reader"""
if self._lgeos is not None:
self._lgeos.GEOSWKBReader_destroy(self._reader)
self._reader = None
self._lgeos = None
def read(self, data):
"""Returns geometry from WKB"""
geom = self._lgeos.GEOSWKBReader_read(
self._reader, c_char_p(data), c_size_t(len(data)))
if not geom:
raise ReadingError("Could not create geometry because of errors "
"while reading input.")
# avoid circular import dependency
from shapely import geometry
return geometry.base.geom_factory(geom)
def read_hex(self, data):
"""Returns geometry from WKB hex"""
if sys.version_info[0] >= 3:
data = data.encode('ascii')
geom = self._lgeos.GEOSWKBReader_readHEX(
self._reader, c_char_p(data), c_size_t(len(data)))
if not geom:
raise ReadingError("Could not create geometry because of errors "
"while reading input.")
# avoid circular import dependency
from shapely import geometry
return geometry.base.geom_factory(geom)
class WKBWriter(object):
_lgeos = None
_writer = None
# EndianType enum in ByteOrderValues.h
_ENDIAN_BIG = 0
_ENDIAN_LITTLE = 1
# Establish default output setting
defaults = {'output_dimension': 3}
@property
def output_dimension(self):
"""Output dimension, either 2 or 3 (default)"""
return self._lgeos.GEOSWKBWriter_getOutputDimension(self._writer)
@output_dimension.setter
def output_dimension(self, value):
self._lgeos.GEOSWKBWriter_setOutputDimension(
self._writer, int(value))
@property
def big_endian(self):
"""Byte order is big endian, True (default) or False"""
return (self._lgeos.GEOSWKBWriter_getByteOrder(self._writer) ==
self._ENDIAN_BIG)
@big_endian.setter
def big_endian(self, value):
self._lgeos.GEOSWKBWriter_setByteOrder(
self._writer, self._ENDIAN_BIG if value else self._ENDIAN_LITTLE)
@property
def include_srid(self):
"""Include SRID, True or False (default)"""
return bool(self._lgeos.GEOSWKBWriter_getIncludeSRID(self._writer))
@include_srid.setter
def include_srid(self, value):
self._lgeos.GEOSWKBWriter_setIncludeSRID(self._writer, bool(value))
def __init__(self, lgeos, **settings):
"""Create WKB Writer"""
self._lgeos = lgeos
self._writer = self._lgeos.GEOSWKBWriter_create()
applied_settings = self.defaults.copy()
applied_settings.update(settings)
for name in applied_settings:
setattr(self, name, applied_settings[name])
def __setattr__(self, name, value):
"""Limit setting attributes"""
if hasattr(self, name):
object.__setattr__(self, name, value)
else:
raise AttributeError('%r object has no attribute %r' %
(self.__class__.__name__, name))
def __del__(self):
"""Destroy WKB Writer"""
if self._lgeos is not None:
self._lgeos.GEOSWKBWriter_destroy(self._writer)
self._writer = None
self._lgeos = None
def write(self, geom):
"""Returns WKB byte string for geometry"""
if geom is None or geom._geom is None:
raise ValueError("Null geometry supports no operations")
size = c_size_t()
result = self._lgeos.GEOSWKBWriter_write(
self._writer, geom._geom, pointer(size))
data = string_at(result, size.value)
lgeos.GEOSFree(result)
return data
def write_hex(self, geom):
"""Returns WKB hex string for geometry"""
if geom is None or geom._geom is None:
raise ValueError("Null geometry supports no operations")
size = c_size_t()
result = self._lgeos.GEOSWKBWriter_writeHEX(
self._writer, geom._geom, pointer(size))
data = string_at(result, size.value)
lgeos.GEOSFree(result)
if sys.version_info[0] >= 3:
return data.decode('ascii')
else:
return data
# Errcheck functions for ctypes
def errcheck_wkb(result, func, argtuple):
'''Returns bytes from a C pointer'''
if not result:
return None
size_ref = argtuple[-1]
size = size_ref.contents
retval = string_at(result, size.value)[:]
lgeos.GEOSFree(result)
return retval
def errcheck_just_free(result, func, argtuple):
'''Returns string from a C pointer'''
retval = string_at(result)
lgeos.GEOSFree(result)
if sys.version_info[0] >= 3:
return retval.decode('ascii')
else:
return retval
def errcheck_predicate(result, func, argtuple):
'''Result is 2 on exception, 1 on True, 0 on False'''
if result == 2:
raise PredicateError("Failed to evaluate %s" % repr(func))
return result
class LGEOSBase(threading.local):
"""Proxy for GEOS C API
This is a base class. Do not instantiate.
"""
methods = {}
def __init__(self, dll):
self._lgeos = dll
self.geos_handle = None
def __del__(self):
"""Cleanup GEOS related processes"""
if self._lgeos is not None:
self._lgeos.finishGEOS()
self._lgeos = None
self.geos_handle = None
class LGEOS300(LGEOSBase):
"""Proxy for GEOS 3.0.0-CAPI-1.4.1
"""
geos_version = (3, 0, 0)
geos_capi_version = (1, 4, 0)
def __init__(self, dll):
super(LGEOS300, self).__init__(dll)
self.geos_handle = self._lgeos.initGEOS(notice_h, error_h)
keys = list(self._lgeos.__dict__.keys())
for key in keys:
setattr(self, key, getattr(self._lgeos, key))
self.GEOSFree = self._lgeos.free
# Deprecated
self.GEOSGeomToWKB_buf.errcheck = errcheck_wkb
self.GEOSGeomToWKT.errcheck = errcheck_just_free
self.GEOSRelate.errcheck = errcheck_just_free
for pred in (
self.GEOSDisjoint,
self.GEOSTouches,
self.GEOSIntersects,
self.GEOSCrosses,
self.GEOSWithin,
self.GEOSContains,
self.GEOSOverlaps,
self.GEOSEquals,
self.GEOSEqualsExact,
self.GEOSisEmpty,
self.GEOSisValid,
self.GEOSisSimple,
self.GEOSisRing,
self.GEOSHasZ):
pred.errcheck = errcheck_predicate
self.methods['area'] = self.GEOSArea
self.methods['boundary'] = self.GEOSBoundary
self.methods['buffer'] = self.GEOSBuffer
self.methods['centroid'] = self.GEOSGetCentroid
self.methods['representative_point'] = self.GEOSPointOnSurface
self.methods['convex_hull'] = self.GEOSConvexHull
self.methods['distance'] = self.GEOSDistance
self.methods['envelope'] = self.GEOSEnvelope
self.methods['length'] = self.GEOSLength
self.methods['has_z'] = self.GEOSHasZ
self.methods['is_empty'] = self.GEOSisEmpty
self.methods['is_ring'] = self.GEOSisRing
self.methods['is_simple'] = self.GEOSisSimple
self.methods['is_valid'] = self.GEOSisValid
self.methods['disjoint'] = self.GEOSDisjoint
self.methods['touches'] = self.GEOSTouches
self.methods['intersects'] = self.GEOSIntersects
self.methods['crosses'] = self.GEOSCrosses
self.methods['within'] = self.GEOSWithin
self.methods['contains'] = self.GEOSContains
self.methods['overlaps'] = self.GEOSOverlaps
self.methods['equals'] = self.GEOSEquals
self.methods['equals_exact'] = self.GEOSEqualsExact
self.methods['relate'] = self.GEOSRelate
self.methods['difference'] = self.GEOSDifference
self.methods['symmetric_difference'] = self.GEOSSymDifference
self.methods['union'] = self.GEOSUnion
self.methods['intersection'] = self.GEOSIntersection
self.methods['simplify'] = self.GEOSSimplify
self.methods['topology_preserve_simplify'] = \
self.GEOSTopologyPreserveSimplify
class LGEOS310(LGEOSBase):
"""Proxy for GEOS 3.1.0-CAPI-1.5.0
"""
geos_version = (3, 1, 0)
geos_capi_version = (1, 5, 0)
def __init__(self, dll):
super(LGEOS310, self).__init__(dll)
self.geos_handle = self._lgeos.initGEOS_r(notice_h, error_h)
keys = list(self._lgeos.__dict__.keys())
for key in [x for x in keys if not x.endswith('_r')]:
if key + '_r' in keys:
reentr_func = getattr(self._lgeos, key + '_r')
attr = ftools.partial(reentr_func, self.geos_handle)
attr.__name__ = reentr_func.__name__
setattr(self, key, attr)
else:
setattr(self, key, getattr(self._lgeos, key))
if not hasattr(self, 'GEOSFree'):
# GEOS < 3.1.1
self.GEOSFree = self._lgeos.free
# Deprecated
self.GEOSGeomToWKB_buf.func.errcheck = errcheck_wkb
self.GEOSGeomToWKT.func.errcheck = errcheck_just_free
self.GEOSRelate.func.errcheck = errcheck_just_free
for pred in (
self.GEOSDisjoint,
self.GEOSTouches,
self.GEOSIntersects,
self.GEOSCrosses,
self.GEOSWithin,
self.GEOSContains,
self.GEOSOverlaps,
self.GEOSEquals,
self.GEOSEqualsExact,
self.GEOSisEmpty,
self.GEOSisValid,
self.GEOSisSimple,
self.GEOSisRing,
self.GEOSHasZ):
pred.func.errcheck = errcheck_predicate
self.GEOSisValidReason.func.errcheck = errcheck_just_free
self.methods['area'] = self.GEOSArea
self.methods['boundary'] = self.GEOSBoundary
self.methods['buffer'] = self.GEOSBuffer
self.methods['centroid'] = self.GEOSGetCentroid
self.methods['representative_point'] = self.GEOSPointOnSurface
self.methods['convex_hull'] = self.GEOSConvexHull
self.methods['distance'] = self.GEOSDistance
self.methods['envelope'] = self.GEOSEnvelope
self.methods['length'] = self.GEOSLength
self.methods['has_z'] = self.GEOSHasZ
self.methods['is_empty'] = self.GEOSisEmpty
self.methods['is_ring'] = self.GEOSisRing
self.methods['is_simple'] = self.GEOSisSimple
self.methods['is_valid'] = self.GEOSisValid
self.methods['disjoint'] = self.GEOSDisjoint
self.methods['touches'] = self.GEOSTouches
self.methods['intersects'] = self.GEOSIntersects
self.methods['crosses'] = self.GEOSCrosses
self.methods['within'] = self.GEOSWithin
self.methods['contains'] = self.GEOSContains
self.methods['overlaps'] = self.GEOSOverlaps
self.methods['equals'] = self.GEOSEquals
self.methods['equals_exact'] = self.GEOSEqualsExact
self.methods['relate'] = self.GEOSRelate
self.methods['difference'] = self.GEOSDifference
self.methods['symmetric_difference'] = self.GEOSSymDifference
self.methods['union'] = self.GEOSUnion
self.methods['intersection'] = self.GEOSIntersection
self.methods['prepared_intersects'] = self.GEOSPreparedIntersects
self.methods['prepared_contains'] = self.GEOSPreparedContains
self.methods['prepared_contains_properly'] = \
self.GEOSPreparedContainsProperly
self.methods['prepared_covers'] = self.GEOSPreparedCovers
self.methods['simplify'] = self.GEOSSimplify
self.methods['topology_preserve_simplify'] = \
self.GEOSTopologyPreserveSimplify
self.methods['cascaded_union'] = self.GEOSUnionCascaded
class LGEOS311(LGEOS310):
"""Proxy for GEOS 3.1.1-CAPI-1.6.0
"""
geos_version = (3, 1, 1)
geos_capi_version = (1, 6, 0)
def __init__(self, dll):
super(LGEOS311, self).__init__(dll)
class LGEOS320(LGEOS311):
"""Proxy for GEOS 3.2.0-CAPI-1.6.0
"""
geos_version = (3, 2, 0)
geos_capi_version = (1, 6, 0)
def __init__(self, dll):
super(LGEOS320, self).__init__(dll)
self.methods['parallel_offset'] = self.GEOSSingleSidedBuffer
self.methods['project'] = self.GEOSProject
self.methods['project_normalized'] = self.GEOSProjectNormalized
self.methods['interpolate'] = self.GEOSInterpolate
self.methods['interpolate_normalized'] = \
self.GEOSInterpolateNormalized
self.methods['buffer_with_style'] = self.GEOSBufferWithStyle
class LGEOS330(LGEOS320):
"""Proxy for GEOS 3.3.0-CAPI-1.7.0
"""
geos_version = (3, 3, 0)
geos_capi_version = (1, 7, 0)
def __init__(self, dll):
super(LGEOS330, self).__init__(dll)
# GEOS 3.3.8 from homebrew has, but doesn't advertise
# GEOSPolygonize_full. We patch it in explicitly here.
key = 'GEOSPolygonize_full'
func = getattr(self._lgeos, key + '_r')
attr = ftools.partial(func, self.geos_handle)
attr.__name__ = func.__name__
setattr(self, key, attr)
for pred in (self.GEOSisClosed,):
pred.func.errcheck = errcheck_predicate
self.methods['unary_union'] = self.GEOSUnaryUnion
self.methods['is_closed'] = self.GEOSisClosed
self.methods['cascaded_union'] = self.methods['unary_union']
self.methods['snap'] = self.GEOSSnap
class LGEOS340(LGEOS330):
"""Proxy for GEOS 3.4.0-CAPI-1.8.0
"""
geos_version = (3, 4, 0)
geos_capi_version = (1, 8, 0)
def __init__(self, dll):
super(LGEOS340, self).__init__(dll)
self.methods['delaunay_triangulation'] = self.GEOSDelaunayTriangulation
self.methods['nearest_points'] = self.GEOSNearestPoints
if geos_version >= (3, 4, 0):
L = LGEOS340
elif geos_version >= (3, 3, 0):
L = LGEOS330
elif geos_version >= (3, 2, 0):
L = LGEOS320
elif geos_version >= (3, 1, 1):
L = LGEOS311
elif geos_version >= (3, 1, 0):
L = LGEOS310
else:
L = LGEOS300
lgeos = L(_lgeos)
def cleanup(proxy):
del proxy
atexit.register(cleanup, lgeos)
| mit |
Event38/MissionPlanner | Lib/ctypes/macholib/dyld.py | 57 | 5510 | ######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
"""
dyld emulation
"""
import os
from framework import framework_info
from dylib import dylib_info
from itertools import *
__all__ = [
'dyld_find', 'framework_find',
'framework_info', 'dylib_info',
]
# These are the defaults as per man dyld(1)
#
DEFAULT_FRAMEWORK_FALLBACK = [
os.path.expanduser("~/Library/Frameworks"),
"/Library/Frameworks",
"/Network/Library/Frameworks",
"/System/Library/Frameworks",
]
DEFAULT_LIBRARY_FALLBACK = [
os.path.expanduser("~/lib"),
"/usr/local/lib",
"/lib",
"/usr/lib",
]
def ensure_utf8(s):
"""Not all of PyObjC and Python understand unicode paths very well yet"""
if isinstance(s, unicode):
return s.encode('utf8')
return s
def dyld_env(env, var):
if env is None:
env = os.environ
rval = env.get(var)
if rval is None:
return []
return rval.split(':')
def dyld_image_suffix(env=None):
if env is None:
env = os.environ
return env.get('DYLD_IMAGE_SUFFIX')
def dyld_framework_path(env=None):
return dyld_env(env, 'DYLD_FRAMEWORK_PATH')
def dyld_library_path(env=None):
return dyld_env(env, 'DYLD_LIBRARY_PATH')
def dyld_fallback_framework_path(env=None):
return dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
def dyld_fallback_library_path(env=None):
return dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
def dyld_image_suffix_search(iterator, env=None):
"""For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
suffix = dyld_image_suffix(env)
if suffix is None:
return iterator
def _inject(iterator=iterator, suffix=suffix):
for path in iterator:
if path.endswith('.dylib'):
yield path[:-len('.dylib')] + suffix + '.dylib'
else:
yield path + suffix
yield path
return _inject()
def dyld_override_search(name, env=None):
# If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
# framework name, use the first file that exists in the framework
# path if any. If there is none go on to search the DYLD_LIBRARY_PATH
# if any.
framework = framework_info(name)
if framework is not None:
for path in dyld_framework_path(env):
yield os.path.join(path, framework['name'])
# If DYLD_LIBRARY_PATH is set then use the first file that exists
# in the path. If none use the original name.
for path in dyld_library_path(env):
yield os.path.join(path, os.path.basename(name))
def dyld_executable_path_search(name, executable_path=None):
# If we haven't done any searching and found a library and the
# dylib_name starts with "@executable_path/" then construct the
# library name.
if name.startswith('@executable_path/') and executable_path is not None:
yield os.path.join(executable_path, name[len('@executable_path/'):])
def dyld_default_search(name, env=None):
yield name
framework = framework_info(name)
if framework is not None:
fallback_framework_path = dyld_fallback_framework_path(env)
for path in fallback_framework_path:
yield os.path.join(path, framework['name'])
fallback_library_path = dyld_fallback_library_path(env)
for path in fallback_library_path:
yield os.path.join(path, os.path.basename(name))
if framework is not None and not fallback_framework_path:
for path in DEFAULT_FRAMEWORK_FALLBACK:
yield os.path.join(path, framework['name'])
if not fallback_library_path:
for path in DEFAULT_LIBRARY_FALLBACK:
yield os.path.join(path, os.path.basename(name))
def dyld_find(name, executable_path=None, env=None):
"""
Find a library or framework using dyld semantics
"""
name = ensure_utf8(name)
executable_path = ensure_utf8(executable_path)
for path in dyld_image_suffix_search(chain(
dyld_override_search(name, env),
dyld_executable_path_search(name, executable_path),
dyld_default_search(name, env),
), env):
if os.path.isfile(path):
return path
raise ValueError("dylib %s could not be found" % (name,))
def framework_find(fn, executable_path=None, env=None):
"""
Find a framework using dyld semantics in a very loose manner.
Will take input such as:
Python
Python.framework
Python.framework/Versions/Current
"""
try:
return dyld_find(fn, executable_path=executable_path, env=env)
except ValueError, e:
pass
fmwk_index = fn.rfind('.framework')
if fmwk_index == -1:
fmwk_index = len(fn)
fn += '.framework'
fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
try:
return dyld_find(fn, executable_path=executable_path, env=env)
except ValueError:
raise e
def test_dyld_find():
env = {}
assert dyld_find('libSystem.dylib') == '/usr/lib/libSystem.dylib'
assert dyld_find('System.framework/System') == '/System/Library/Frameworks/System.framework/System'
if __name__ == '__main__':
test_dyld_find()
| gpl-3.0 |
clarkhale/external-storage | iscsi/targetd/vendor/k8s.io/kubernetes/cluster/juju/layers/kubernetes-master/lib/charms/kubernetes/flagmanager.py | 182 | 4599 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.core import unitdata
class FlagManager:
'''
FlagManager - A Python class for managing the flags to pass to an
application without remembering what's been set previously.
This is a blind class assuming the operator knows what they are doing.
Each instance of this class should be initialized with the intended
application to manage flags. Flags are then appended to a data-structure
and cached in unitdata for later recall.
THe underlying data-provider is backed by a SQLITE database on each unit,
tracking the dictionary, provided from the 'charmhelpers' python package.
Summary:
opts = FlagManager('docker')
opts.add('bip', '192.168.22.2')
opts.to_s()
'''
def __init__(self, daemon, opts_path=None):
self.db = unitdata.kv()
self.daemon = daemon
if not self.db.get(daemon):
self.data = {}
else:
self.data = self.db.get(daemon)
def __save(self):
self.db.set(self.daemon, self.data)
def add(self, key, value, strict=False):
'''
Adds data to the map of values for the DockerOpts file.
Supports single values, or "multiopt variables". If you
have a flag only option, like --tlsverify, set the value
to None. To preserve the exact value, pass strict
eg:
opts.add('label', 'foo')
opts.add('label', 'foo, bar, baz')
opts.add('flagonly', None)
opts.add('cluster-store', 'consul://a:4001,b:4001,c:4001/swarm',
strict=True)
'''
if strict:
self.data['{}-strict'.format(key)] = value
self.__save()
return
if value:
values = [x.strip() for x in value.split(',')]
# handle updates
if key in self.data and self.data[key] is not None:
item_data = self.data[key]
for c in values:
c = c.strip()
if c not in item_data:
item_data.append(c)
self.data[key] = item_data
else:
# handle new
self.data[key] = values
else:
# handle flagonly
self.data[key] = None
self.__save()
def remove(self, key, value):
'''
Remove a flag value from the DockerOpts manager
Assuming the data is currently {'foo': ['bar', 'baz']}
d.remove('foo', 'bar')
> {'foo': ['baz']}
:params key:
:params value:
'''
self.data[key].remove(value)
self.__save()
def destroy(self, key, strict=False):
'''
Destructively remove all values and key from the FlagManager
Assuming the data is currently {'foo': ['bar', 'baz']}
d.wipe('foo')
>{}
:params key:
:params strict:
'''
try:
if strict:
self.data.pop('{}-strict'.format(key))
else:
self.data.pop('key')
except KeyError:
pass
def to_s(self):
'''
Render the flags to a single string, prepared for the Docker
Defaults file. Typically in /etc/default/docker
d.to_s()
> "--foo=bar --foo=baz"
'''
flags = []
for key in self.data:
if self.data[key] is None:
# handle flagonly
flags.append("{}".format(key))
elif '-strict' in key:
# handle strict values, and do it in 2 steps.
# If we rstrip -strict it strips a tailing s
proper_key = key.rstrip('strict').rstrip('-')
flags.append("{}={}".format(proper_key, self.data[key]))
else:
# handle multiopt and typical flags
for item in self.data[key]:
flags.append("{}={}".format(key, item))
return ' '.join(flags)
| apache-2.0 |
brookehus/msmbuilder | msmbuilder/tests/test_rmsdfeaturizer.py | 9 | 8866 | import warnings
import mdtraj as md
import numpy as np
from msmbuilder.example_datasets import AlanineDipeptide
from msmbuilder.featurizer import Featurizer
from msmbuilder.featurizer import RMSDFeaturizer
class OldRMSDFeaturizer(Featurizer):
"""Featurizer based on RMSD to a series of reference frames.
Parameters
----------
trj0 : mdtraj.Trajectory
Reference trajectory. trj0.n_frames gives the number of features
in this Featurizer.
atom_indices : np.ndarray, default=None
Which atom indices to use during RMSD calculation. If None, MDTraj
should default to all atoms.
Notes
-----
This was the RMSDFeaturizer until version 3.4
"""
def __init__(self, trj0, atom_indices=None):
self.n_features = trj0.n_frames
self.trj0 = trj0
self.atom_indices = atom_indices
def partial_transform(self, traj):
"""Featurize an MD trajectory into a vector space by calculating
the RMSD to each frame in a reference trajectory.
Parameters
----------
traj : mdtraj.Trajectory
A molecular dynamics trajectory to featurize.
Returns
-------
features : np.ndarray, dtype=float, shape=(n_samples, n_features)
A featurized trajectory is a 2D array of shape
`(length_of_trajectory x n_features)` where each `features[i]`
vector is computed by applying the featurization function
to the `i`th snapshot of the input trajectory.
See Also
--------
transform : simultaneously featurize a collection of MD trajectories
"""
X = np.zeros((traj.n_frames, self.n_features))
for frame in range(self.n_features):
X[:, frame] = md.rmsd(traj, self.trj0,
atom_indices=self.atom_indices,
frame=frame)
return X
def test_alanine_dipeptide_basic():
# This test takes the rmsd of the 0th set of alanine dipeptide
# trajectories relative to the 0th frame of the dataset.
# The test asserts that all rmsd's calculated will be equal
# to the ones that would be calculated straight from mdtraj.
trajectories = AlanineDipeptide().get_cached().trajectories
featurizer = RMSDFeaturizer(trajectories[0][0])
data = featurizer.transform(trajectories[0:1])
true_rmsd = md.rmsd(trajectories[0], trajectories[0][0])
np.testing.assert_array_almost_equal(data[0][:, 0], true_rmsd, decimal=4)
def test_omitting_indices():
# This test verifies that the result produced when
# atom_indices are omitted is the same as the result
# produced when atom_indices is all atom indices.
trajectories = AlanineDipeptide().get_cached().trajectories
featurizer_indices = RMSDFeaturizer(trajectories[0][0],
np.arange(trajectories[0].n_atoms))
data_indices = featurizer_indices.transform(trajectories[0:1])
featurizer = RMSDFeaturizer(trajectories[0][0])
data = featurizer.transform(trajectories[0:1])
np.testing.assert_array_almost_equal(data[0][:, 0],
data_indices[0][:, 0], decimal=4)
def test_different_indices():
# This test verifies that the rmsd's calculated from
# different sets of atom indices are not the same,
# but that the arrays are still the same shape.
trajectories = AlanineDipeptide().get_cached().trajectories
n_atoms = trajectories[0].n_atoms
halfway_point = n_atoms // 2
featurizer_first_half = RMSDFeaturizer(trajectories[0][0],
np.arange(halfway_point))
data_first_half = featurizer_first_half.transform(trajectories[0:1])
featurizer_second_half = RMSDFeaturizer(trajectories[0][0],
np.arange(halfway_point, n_atoms))
data_second_half = featurizer_second_half.transform(trajectories[0:1])
assert data_first_half[0].shape == data_second_half[0].shape
# janky way to show that the arrays shouldn't be equal here
assert sum(data_first_half[0][:, 0]) != sum(data_second_half[0][:, 0])
def test_two_refs_basic():
# This test uses the 0th and 1st frames of the 0th set of
# adp trajectories as the two reference trajectories and
# ensures that the rmsd of the 0th frame of the dataset with
# the 0th reference are identical and the 1st frame of the
# dataset with the 1st reference are identical.
trajectories = AlanineDipeptide().get_cached().trajectories
featurizer = RMSDFeaturizer(trajectories[0][0:2])
data = featurizer.transform(trajectories[0:1])
true_rmsd = np.zeros((trajectories[0].n_frames, 2))
for frame in range(2):
true_rmsd[:, frame] = md.rmsd(trajectories[0], trajectories[0][frame])
np.testing.assert_almost_equal(data[0][0, 0], data[0][1, 1], decimal=3)
np.testing.assert_almost_equal(data[0][1, 0], data[0][0, 1], decimal=3)
np.testing.assert_array_almost_equal(data[0], true_rmsd, decimal=4)
def test_two_refs_omitting_indices():
# This test verifies that the result produced when
# atom_indices are omitted is the same as the result
# produced when atom_indices is all atom indices.
trajectories = AlanineDipeptide().get_cached().trajectories
featurizer_indices = RMSDFeaturizer(trajectories[0][0:2],
np.arange(trajectories[0].n_atoms))
data_indices = featurizer_indices.transform(trajectories[0:1])
featurizer = RMSDFeaturizer(trajectories[0][0:2])
data = featurizer.transform(trajectories[0:1])
np.testing.assert_array_almost_equal(data[0], data_indices[0], decimal=4)
def test_two_refs_different_indices():
# This test verifies that the rmsd's calculated from
# different sets of atom indices are not the same,
# but that the arrays are still the same shape.
trajectories = AlanineDipeptide().get_cached().trajectories
n_atoms = trajectories[0].n_atoms
halfway_point = n_atoms // 2
featurizer_first_half = RMSDFeaturizer(trajectories[0][0:2],
np.arange(halfway_point))
data_first_half = featurizer_first_half.transform(trajectories[0:1])
featurizer_second_half = RMSDFeaturizer(trajectories[0][0:2],
np.arange(halfway_point, n_atoms))
data_second_half = featurizer_second_half.transform(trajectories[0:1])
assert data_first_half[0].shape == data_second_half[0].shape
# janky way to show that the arrays shouldn't be equal here
assert sum(data_first_half[0][:, 0]) != sum(data_second_half[0][:, 0])
assert sum(data_first_half[0][:, 1]) != sum(data_second_half[0][:, 1])
def _random_trajs():
top = md.Topology()
c = top.add_chain()
r = top.add_residue('HET', c)
for _ in range(101):
top.add_atom('CA', md.element.carbon, r)
traj1 = md.Trajectory(xyz=np.random.uniform(size=(100, 101, 3)),
topology=top,
time=np.arange(100))
traj2 = md.Trajectory(xyz=np.random.uniform(size=(100, 101, 3)),
topology=top,
time=np.arange(100))
ref = md.Trajectory(xyz=np.random.uniform(size=(7, 101, 3)),
topology=top,
time=np.arange(7))
return traj1, traj2, ref
def test_api_still_works_names():
traj1, traj2, ref = _random_trajs()
old = OldRMSDFeaturizer(trj0=ref, atom_indices=np.arange(50))
with warnings.catch_warnings(record=True) as w:
new = RMSDFeaturizer(trj0=ref, atom_indices=np.arange(50))
assert "deprecated" in str(w[-1].message)
assert "trj0" in str(w[-1].message)
data_old = old.fit_transform([traj1, traj2])
data_new = new.fit_transform([traj1, traj2])
for do, dn in zip(data_old, data_new):
np.testing.assert_array_almost_equal(do, dn)
assert dn.shape == (100, 7)
def test_api_still_works_order():
traj1, traj2, ref = _random_trajs()
old = OldRMSDFeaturizer(ref, atom_indices=np.arange(50))
new = RMSDFeaturizer(ref, atom_indices=np.arange(50))
data_old = old.fit_transform([traj1, traj2])
data_new = new.fit_transform([traj1, traj2])
for do, dn in zip(data_old, data_new):
np.testing.assert_array_almost_equal(do, dn)
assert dn.shape == (100, 7)
def test_api_still_works_allframes():
traj1, traj2, ref = _random_trajs()
old = OldRMSDFeaturizer(ref)
new = RMSDFeaturizer(ref)
data_old = old.fit_transform([traj1, traj2])
data_new = new.fit_transform([traj1, traj2])
for do, dn in zip(data_old, data_new):
np.testing.assert_array_almost_equal(do, dn)
assert dn.shape == (100, 7)
| lgpl-2.1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.