repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
aeischeid/servo | tests/wpt/css-tests/tools/pywebsocket/src/mod_pywebsocket/stream.py | 673 | 2748 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file exports public symbols.
"""
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import InvalidUTF8Exception
from mod_pywebsocket._stream_base import UnsupportedFrameException
from mod_pywebsocket._stream_hixie75 import StreamHixie75
from mod_pywebsocket._stream_hybi import Frame
from mod_pywebsocket._stream_hybi import Stream
from mod_pywebsocket._stream_hybi import StreamOptions
# These methods are intended to be used by WebSocket client developers to have
# their implementations receive broken data in tests.
from mod_pywebsocket._stream_hybi import create_close_frame
from mod_pywebsocket._stream_hybi import create_header
from mod_pywebsocket._stream_hybi import create_length_header
from mod_pywebsocket._stream_hybi import create_ping_frame
from mod_pywebsocket._stream_hybi import create_pong_frame
from mod_pywebsocket._stream_hybi import create_binary_frame
from mod_pywebsocket._stream_hybi import create_text_frame
from mod_pywebsocket._stream_hybi import create_closing_handshake_body
# vi:sts=4 sw=4 et
| mpl-2.0 |
paulfitz/phantomjs | src/qt/qtwebkit/Tools/BuildSlaveSupport/wait-for-SVN-server.py | 118 | 2670 | #!/usr/bin/env python
#
# Copyright (C) 2006 John Pye
# Copyright (C) 2012 University of Szeged
#
# This script is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from optparse import OptionParser
import exceptions
import sys
import time
import xml.dom.minidom
import os
import subprocess
def getLatestSVNRevision(SVNServer):
try:
p = subprocess.Popen(["svn", "log", "--non-interactive", "--verbose", "--xml", "--limit=1", SVNServer], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
response = p.communicate()[0]
doc = xml.dom.minidom.parseString(response)
el = doc.getElementsByTagName("logentry")[0]
return el.getAttribute("revision")
except xml.parsers.expat.ExpatError, e:
print "FAILED TO PARSE 'svn log' XML:"
print str(e)
print "----"
print "RECEIVED TEXT:"
print response
sys.exit(1)
def waitForSVNRevision(SVNServer, revision):
if not revision or not revision.isdigit():
latestRevision = int(getLatestSVNRevision(SVNServer))
print "Latest SVN revision on %s is r%d. Don't wait, because revision argument isn't a valid SVN revision." % (SVNServer, latestRevision)
return
revision = int(revision)
while True:
latestRevision = int(getLatestSVNRevision(SVNServer))
if latestRevision < revision:
print "Latest SVN revision on %s is r%d, but we are waiting for r%d. Sleeping for 5 seconds." % (SVNServer, latestRevision, revision)
time.sleep(5)
else:
print "Latest SVN revision on %s is r%d, which is newer or equal than r%d." % (SVNServer, latestRevision, revision)
break
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-r", "--revision", dest="revision", help="SVN revision number")
parser.add_option("-s", "--svn-server", dest="SVNServer", help="SVN server")
options, args = parser.parse_args()
waitForSVNRevision(options.SVNServer, options.revision)
| bsd-3-clause |
jelugbo/ddi | lms/djangoapps/instructor_analytics/basic.py | 13 | 16648 | """
Student and course analytics.
Serve miscellaneous course and student data
"""
from shoppingcart.models import (
PaidCourseRegistration, CouponRedemption, Invoice, CourseRegCodeItem,
OrderTypes, RegistrationCodeRedemption, CourseRegistrationCode
)
from django.contrib.auth.models import User
import xmodule.graders as xmgraders
from django.core.exceptions import ObjectDoesNotExist
STUDENT_FEATURES = ('id', 'username', 'first_name', 'last_name', 'is_staff', 'email')
PROFILE_FEATURES = ('name', 'language', 'location', 'year_of_birth', 'gender',
'level_of_education', 'mailing_address', 'goals', 'meta')
ORDER_ITEM_FEATURES = ('list_price', 'unit_cost', 'order_id')
ORDER_FEATURES = ('purchase_time',)
SALE_FEATURES = ('total_amount', 'company_name', 'company_contact_name', 'company_contact_email', 'recipient_name',
'recipient_email', 'customer_reference_number', 'internal_reference')
SALE_ORDER_FEATURES = ('id', 'company_name', 'company_contact_name', 'company_contact_email', 'purchase_time',
'customer_reference_number', 'recipient_name', 'recipient_email', 'bill_to_street1',
'bill_to_street2', 'bill_to_city', 'bill_to_state', 'bill_to_postalcode',
'bill_to_country', 'order_type',)
AVAILABLE_FEATURES = STUDENT_FEATURES + PROFILE_FEATURES
COURSE_REGISTRATION_FEATURES = ('code', 'course_id', 'created_by', 'created_at')
COUPON_FEATURES = ('course_id', 'percentage_discount', 'description')
def sale_order_record_features(course_id, features):
"""
Return list of sale orders features as dictionaries.
sales_records(course_id, ['company_name, total_codes', total_amount])
would return [
{'company_name': 'group_A', 'total_codes': '1', total_amount:'total_amount1 in decimal'.}
{'company_name': 'group_B', 'total_codes': '2', total_amount:'total_amount2 in decimal'.}
{'company_name': 'group_C', 'total_codes': '3', total_amount:'total_amount3 in decimal'.}
]
"""
purchased_courses = PaidCourseRegistration.objects.filter(course_id=course_id, status='purchased').order_by('order')
purchased_course_reg_codes = CourseRegCodeItem.objects.filter(course_id=course_id, status='purchased').order_by('order')
def sale_order_info(purchased_course, features):
"""
convert purchase transactions to dictionary
"""
sale_order_features = [x for x in SALE_ORDER_FEATURES if x in features]
course_reg_features = [x for x in COURSE_REGISTRATION_FEATURES if x in features]
# Extracting order information
sale_order_dict = dict((feature, getattr(purchased_course.order, feature))
for feature in sale_order_features)
quantity = int(getattr(purchased_course, 'qty'))
unit_cost = float(getattr(purchased_course, 'unit_cost'))
sale_order_dict.update({"total_amount": quantity * unit_cost})
sale_order_dict.update({"logged_in_username": purchased_course.order.user.username})
sale_order_dict.update({"logged_in_email": purchased_course.order.user.email})
sale_order_dict.update({"total_codes": 'N/A'})
sale_order_dict.update({'total_used_codes': 'N/A'})
if getattr(purchased_course.order, 'order_type') == OrderTypes.BUSINESS:
registration_codes = CourseRegistrationCode.objects.filter(order=purchased_course.order, course_id=course_id)
sale_order_dict.update({"total_codes": registration_codes.count()})
sale_order_dict.update({'total_used_codes': RegistrationCodeRedemption.objects.filter(registration_code__in=registration_codes).count()})
codes = list()
for reg_code in registration_codes:
codes.append(reg_code.code)
# Extracting registration code information
obj_course_reg_code = registration_codes.all()[:1].get()
course_reg_dict = dict((feature, getattr(obj_course_reg_code, feature))
for feature in course_reg_features)
course_reg_dict['course_id'] = course_id.to_deprecated_string()
course_reg_dict.update({'codes': ", ".join(codes)})
sale_order_dict.update(dict(course_reg_dict.items()))
return sale_order_dict
csv_data = [sale_order_info(purchased_course, features) for purchased_course in purchased_courses]
csv_data.extend([sale_order_info(purchased_course_reg_code, features) for purchased_course_reg_code in purchased_course_reg_codes])
return csv_data
def sale_record_features(course_id, features):
"""
Return list of sales features as dictionaries.
sales_records(course_id, ['company_name, total_codes', total_amount])
would return [
{'company_name': 'group_A', 'total_codes': '1', total_amount:'total_amount1 in decimal'.}
{'company_name': 'group_B', 'total_codes': '2', total_amount:'total_amount2 in decimal'.}
{'company_name': 'group_C', 'total_codes': '3', total_amount:'total_amount3 in decimal'.}
]
"""
sales = Invoice.objects.filter(course_id=course_id)
def sale_records_info(sale, features):
""" convert sales records to dictionary """
sale_features = [x for x in SALE_FEATURES if x in features]
course_reg_features = [x for x in COURSE_REGISTRATION_FEATURES if x in features]
# Extracting sale information
sale_dict = dict((feature, getattr(sale, feature))
for feature in sale_features)
total_used_codes = RegistrationCodeRedemption.objects.filter(registration_code__in=sale.courseregistrationcode_set.all()).count()
sale_dict.update({"invoice_number": getattr(sale, 'id')})
sale_dict.update({"total_codes": sale.courseregistrationcode_set.all().count()})
sale_dict.update({'total_used_codes': total_used_codes})
codes = list()
for reg_code in sale.courseregistrationcode_set.all():
codes.append(reg_code.code)
# Extracting registration code information
obj_course_reg_code = sale.courseregistrationcode_set.all()[:1].get()
course_reg_dict = dict((feature, getattr(obj_course_reg_code, feature))
for feature in course_reg_features)
course_reg_dict['course_id'] = course_id.to_deprecated_string()
course_reg_dict.update({'codes': ", ".join(codes)})
sale_dict.update(dict(course_reg_dict.items()))
return sale_dict
return [sale_records_info(sale, features) for sale in sales]
def purchase_transactions(course_id, features):
"""
Return list of purchased transactions features as dictionaries.
purchase_transactions(course_id, ['username, email','created_by', unit_cost])
would return [
{'username': 'username1', 'email': 'email1', unit_cost:'cost1 in decimal'.}
{'username': 'username2', 'email': 'email2', unit_cost:'cost2 in decimal'.}
{'username': 'username3', 'email': 'email3', unit_cost:'cost3 in decimal'.}
]
"""
purchased_courses = PaidCourseRegistration.objects.filter(course_id=course_id, status='purchased').order_by('user')
def purchase_transactions_info(purchased_course, features):
""" convert purchase transactions to dictionary """
coupon_code_dict = dict()
student_features = [x for x in STUDENT_FEATURES if x in features]
order_features = [x for x in ORDER_FEATURES if x in features]
order_item_features = [x for x in ORDER_ITEM_FEATURES if x in features]
# Extracting user information
student_dict = dict((feature, getattr(purchased_course.user, feature))
for feature in student_features)
# Extracting Order information
order_dict = dict((feature, getattr(purchased_course.order, feature))
for feature in order_features)
# Extracting OrderItem information
order_item_dict = dict((feature, getattr(purchased_course, feature))
for feature in order_item_features)
order_item_dict.update({"orderitem_id": getattr(purchased_course, 'id')})
coupon_redemption = CouponRedemption.objects.select_related('coupon').filter(order_id=purchased_course.order_id)
if coupon_redemption:
# we format the coupon codes in comma separated way if there are more then one coupon against a order id.
coupon_codes = list()
for redemption in coupon_redemption:
coupon_codes.append(redemption.coupon.code)
coupon_code_dict = {'coupon_code': ", ".join(coupon_codes)}
else:
coupon_code_dict = {'coupon_code': 'None'}
student_dict.update(dict(order_dict.items() + order_item_dict.items() + coupon_code_dict.items()))
student_dict.update({'course_id': course_id.to_deprecated_string()})
return student_dict
return [purchase_transactions_info(purchased_course, features) for purchased_course in purchased_courses]
def enrolled_students_features(course_key, features):
"""
Return list of student features as dictionaries.
enrolled_students_features(course_key, ['username', 'first_name'])
would return [
{'username': 'username1', 'first_name': 'firstname1'}
{'username': 'username2', 'first_name': 'firstname2'}
{'username': 'username3', 'first_name': 'firstname3'}
]
"""
include_cohort_column = 'cohort' in features
students = User.objects.filter(
courseenrollment__course_id=course_key,
courseenrollment__is_active=1,
).order_by('username').select_related('profile')
if include_cohort_column:
students = students.prefetch_related('course_groups')
def extract_student(student, features):
""" convert student to dictionary """
student_features = [x for x in STUDENT_FEATURES if x in features]
profile_features = [x for x in PROFILE_FEATURES if x in features]
student_dict = dict((feature, getattr(student, feature))
for feature in student_features)
profile = student.profile
if profile is not None:
profile_dict = dict((feature, getattr(profile, feature))
for feature in profile_features)
student_dict.update(profile_dict)
if include_cohort_column:
# Note that we use student.course_groups.all() here instead of
# student.course_groups.filter(). The latter creates a fresh query,
# therefore negating the performance gain from prefetch_related().
student_dict['cohort'] = next(
(cohort.name for cohort in student.course_groups.all() if cohort.course_id == course_key),
"[unassigned]"
)
return student_dict
return [extract_student(student, features) for student in students]
def coupon_codes_features(features, coupons_list):
"""
Return list of Coupon Codes as dictionaries.
coupon_codes_features
would return [
{'course_id': 'edX/Open_DemoX/edx_demo_course,, 'discount': '213' ..... }
{'course_id': 'edX/Open_DemoX/edx_demo_course,, 'discount': '234' ..... }
]
"""
def extract_coupon(coupon, features):
""" convert coupon_codes to dictionary
:param coupon_codes:
:param features:
"""
coupon_features = [x for x in COUPON_FEATURES if x in features]
coupon_dict = dict((feature, getattr(coupon, feature)) for feature in coupon_features)
coupon_dict['code_redeemed_count'] = coupon.couponredemption_set.all().count()
# we have to capture the redeemed_by value in the case of the downloading and spent registration
# codes csv. In the case of active and generated registration codes the redeemed_by value will be None.
# They have not been redeemed yet
coupon_dict['course_id'] = coupon_dict['course_id'].to_deprecated_string()
return coupon_dict
return [extract_coupon(coupon, features) for coupon in coupons_list]
def course_registration_features(features, registration_codes, csv_type):
"""
Return list of Course Registration Codes as dictionaries.
course_registration_features
would return [
{'code': 'code1', 'course_id': 'edX/Open_DemoX/edx_demo_course, ..... }
{'code': 'code2', 'course_id': 'edX/Open_DemoX/edx_demo_course, ..... }
]
"""
def extract_course_registration(registration_code, features, csv_type):
""" convert registration_code to dictionary
:param registration_code:
:param features:
:param csv_type:
"""
registration_features = [x for x in COURSE_REGISTRATION_FEATURES if x in features]
course_registration_dict = dict((feature, getattr(registration_code, feature)) for feature in registration_features)
course_registration_dict['company_name'] = None
if registration_code.invoice:
course_registration_dict['company_name'] = getattr(registration_code.invoice, 'company_name')
course_registration_dict['redeemed_by'] = None
if registration_code.invoice:
sale_invoice = Invoice.objects.get(id=registration_code.invoice_id)
course_registration_dict['invoice_id'] = sale_invoice.id
course_registration_dict['purchaser'] = sale_invoice.recipient_name
course_registration_dict['customer_reference_number'] = sale_invoice.customer_reference_number
course_registration_dict['internal_reference'] = sale_invoice.internal_reference
# we have to capture the redeemed_by value in the case of the downloading and spent registration
# codes csv. In the case of active and generated registration codes the redeemed_by value will be None.
# They have not been redeemed yet
if csv_type is not None:
try:
redeemed_by = getattr(registration_code.registrationcoderedemption_set.get(registration_code=registration_code), 'redeemed_by')
course_registration_dict['redeemed_by'] = getattr(redeemed_by, 'email')
except ObjectDoesNotExist:
pass
course_registration_dict['course_id'] = course_registration_dict['course_id'].to_deprecated_string()
return course_registration_dict
return [extract_course_registration(code, features, csv_type) for code in registration_codes]
def dump_grading_context(course):
"""
Render information about course grading context
(e.g. which problems are graded in what assignments)
Useful for debugging grading_policy.json and policy.json
Returns HTML string
"""
hbar = "{}\n".format("-" * 77)
msg = hbar
msg += "Course grader:\n"
msg += '%s\n' % course.grader.__class__
graders = {}
if isinstance(course.grader, xmgraders.WeightedSubsectionsGrader):
msg += '\n'
msg += "Graded sections:\n"
for subgrader, category, weight in course.grader.sections:
msg += " subgrader=%s, type=%s, category=%s, weight=%s\n"\
% (subgrader.__class__, subgrader.type, category, weight)
subgrader.index = 1
graders[subgrader.type] = subgrader
msg += hbar
msg += "Listing grading context for course %s\n" % course.id.to_deprecated_string()
gcontext = course.grading_context
msg += "graded sections:\n"
msg += '%s\n' % gcontext['graded_sections'].keys()
for (gsomething, gsvals) in gcontext['graded_sections'].items():
msg += "--> Section %s:\n" % (gsomething)
for sec in gsvals:
sdesc = sec['section_descriptor']
frmat = getattr(sdesc, 'format', None)
aname = ''
if frmat in graders:
gform = graders[frmat]
aname = '%s %02d' % (gform.short_label, gform.index)
gform.index += 1
elif sdesc.display_name in graders:
gform = graders[sdesc.display_name]
aname = '%s' % gform.short_label
notes = ''
if getattr(sdesc, 'score_by_attempt', False):
notes = ', score by attempt!'
msg += " %s (format=%s, Assignment=%s%s)\n"\
% (sdesc.display_name, frmat, aname, notes)
msg += "all descriptors:\n"
msg += "length=%d\n" % len(gcontext['all_descriptors'])
msg = '<pre>%s</pre>' % msg.replace('<', '<')
return msg
| agpl-3.0 |
kernow/ansible-modules-core | cloud/openstack/os_security_group_rule.py | 32 | 10923 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013, Benno Joy <benno@ansible.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_security_group_rule
short_description: Add/Delete rule from an existing security group
extends_documentation_fragment: openstack
version_added: "2.0"
description:
- Add or Remove rule from an existing security group
options:
security_group:
description:
- Name or ID of the security group
required: true
protocol:
description:
- IP protocol
choices: ['tcp', 'udp', 'icmp', None]
default: None
port_range_min:
description:
- Starting port
required: false
default: None
port_range_max:
description:
- Ending port
required: false
default: None
remote_ip_prefix:
description:
- Source IP address(es) in CIDR notation (exclusive with remote_group)
required: false
remote_group:
description:
- Name or ID of the Security group to link (exclusive with
remote_ip_prefix)
required: false
ethertype:
description:
- Must be IPv4 or IPv6, and addresses represented in CIDR must
match the ingress or egress rules. Not all providers support IPv6.
choices: ['IPv4', 'IPv6']
default: IPv4
direction:
description:
- The direction in which the security group rule is applied. Not
all providers support egress.
choices: ['egress', 'ingress']
default: ingress
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
requirements: ["shade"]
'''
EXAMPLES = '''
# Create a security group rule
- os_security_group_rule:
cloud: mordred
security_group: foo
protocol: tcp
port_range_min: 80
port_range_max: 80
remote_ip_prefix: 0.0.0.0/0
# Create a security group rule for ping
- os_security_group_rule:
cloud: mordred
security_group: foo
protocol: icmp
remote_ip_prefix: 0.0.0.0/0
# Another way to create the ping rule
- os_security_group_rule:
cloud: mordred
security_group: foo
protocol: icmp
port_range_min: -1
port_range_max: -1
remote_ip_prefix: 0.0.0.0/0
# Create a TCP rule covering all ports
- os_security_group_rule:
cloud: mordred
security_group: foo
protocol: tcp
port_range_min: 1
port_range_max: 65535
remote_ip_prefix: 0.0.0.0/0
# Another way to create the TCP rule above (defaults to all ports)
- os_security_group_rule:
cloud: mordred
security_group: foo
protocol: tcp
remote_ip_prefix: 0.0.0.0/0
'''
RETURN = '''
id:
description: Unique rule UUID.
type: string
direction:
description: The direction in which the security group rule is applied.
type: string
sample: 'egress'
ethertype:
description: One of IPv4 or IPv6.
type: string
sample: 'IPv4'
port_range_min:
description: The minimum port number in the range that is matched by
the security group rule.
type: int
sample: 8000
port_range_max:
description: The maximum port number in the range that is matched by
the security group rule.
type: int
sample: 8000
protocol:
description: The protocol that is matched by the security group rule.
type: string
sample: 'tcp'
remote_ip_prefix:
description: The remote IP prefix to be associated with this security group rule.
type: string
sample: '0.0.0.0/0'
security_group_id:
description: The security group ID to associate with this security group rule.
type: string
'''
def _ports_match(protocol, module_min, module_max, rule_min, rule_max):
"""
Capture the complex port matching logic.
The port values coming in for the module might be -1 (for ICMP),
which will work only for Nova, but this is handled by shade. Likewise,
they might be None, which works for Neutron, but not Nova. This too is
handled by shade. Since shade will consistently return these port
values as None, we need to convert any -1 values input to the module
to None here for comparison.
For TCP and UDP protocols, None values for both min and max are
represented as the range 1-65535 for Nova, but remain None for
Neutron. Shade returns the full range when Nova is the backend (since
that is how Nova stores them), and None values for Neutron. If None
values are input to the module for both values, then we need to adjust
for comparison.
"""
# Check if the user is supplying -1 for ICMP.
if protocol == 'icmp':
if module_min and int(module_min) == -1:
module_min = None
if module_max and int(module_max) == -1:
module_max = None
# Check if user is supplying None values for full TCP/UDP port range.
if protocol in ['tcp', 'udp'] and module_min is None and module_max is None:
if (rule_min and int(rule_min) == 1
and rule_max and int(rule_max) == 65535):
# (None, None) == (1, 65535)
return True
# Sanity check to make sure we don't have type comparison issues.
if module_min:
module_min = int(module_min)
if module_max:
module_max = int(module_max)
if rule_min:
rule_min = int(rule_min)
if rule_max:
rule_max = int(rule_max)
return module_min == rule_min and module_max == rule_max
def _find_matching_rule(module, secgroup, remotegroup):
"""
Find a rule in the group that matches the module parameters.
:returns: The matching rule dict, or None if no matches.
"""
protocol = module.params['protocol']
remote_ip_prefix = module.params['remote_ip_prefix']
ethertype = module.params['ethertype']
direction = module.params['direction']
remote_group_id = remotegroup['id']
for rule in secgroup['security_group_rules']:
if (protocol == rule['protocol']
and remote_ip_prefix == rule['remote_ip_prefix']
and ethertype == rule['ethertype']
and direction == rule['direction']
and remote_group_id == rule['remote_group_id']
and _ports_match(protocol,
module.params['port_range_min'],
module.params['port_range_max'],
rule['port_range_min'],
rule['port_range_max'])):
return rule
return None
def _system_state_change(module, secgroup, remotegroup):
state = module.params['state']
if secgroup:
rule_exists = _find_matching_rule(module, secgroup, remotegroup)
else:
return False
if state == 'present' and not rule_exists:
return True
if state == 'absent' and rule_exists:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
security_group = dict(required=True),
# NOTE(Shrews): None is an acceptable protocol value for
# Neutron, but Nova will balk at this.
protocol = dict(default=None,
choices=[None, 'tcp', 'udp', 'icmp']),
port_range_min = dict(required=False, type='int'),
port_range_max = dict(required=False, type='int'),
remote_ip_prefix = dict(required=False, default=None),
remote_group = dict(required=False, default=None),
ethertype = dict(default='IPv4',
choices=['IPv4', 'IPv6']),
direction = dict(default='ingress',
choices=['egress', 'ingress']),
state = dict(default='present',
choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['remote_ip_prefix', 'remote_group'],
]
)
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
state = module.params['state']
security_group = module.params['security_group']
remote_group = module.params['remote_group']
changed = False
try:
cloud = shade.openstack_cloud(**module.params)
secgroup = cloud.get_security_group(security_group)
if remote_group:
remotegroup = cloud.get_security_group(remote_group)
else:
remotegroup = { 'id' : None }
if module.check_mode:
module.exit_json(changed=_system_state_change(module, secgroup, remotegroup))
if state == 'present':
if not secgroup:
module.fail_json(msg='Could not find security group %s' %
security_group)
rule = _find_matching_rule(module, secgroup, remotegroup)
if not rule:
rule = cloud.create_security_group_rule(
secgroup['id'],
port_range_min=module.params['port_range_min'],
port_range_max=module.params['port_range_max'],
protocol=module.params['protocol'],
remote_ip_prefix=module.params['remote_ip_prefix'],
remote_group_id=remotegroup['id'],
direction=module.params['direction'],
ethertype=module.params['ethertype']
)
changed = True
module.exit_json(changed=changed, rule=rule, id=rule['id'])
if state == 'absent' and secgroup:
rule = _find_matching_rule(module, secgroup, remotegroup)
if rule:
cloud.delete_security_group_rule(rule['id'])
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
mailjainrahul/cloudify-docker-plugin | docker_plugin/utils.py | 2 | 6574 | # coding=utf-8
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Third-party Imports
from docker.errors import APIError
# Cloudify Imports
from cloudify import ctx
from cloudify.exceptions import RecoverableError, NonRecoverableError
def get_image_id(tag, repository, client):
try:
images = client.images()
except APIError as e:
raise NonRecoverableError(
'Unable to get last created image: {0}'.format(e))
for image in images:
if '{0}:{1}'.format(repository, tag) in image.get('RepoTags'):
return image.get('Id')
raise NonRecoverableError(
'Could not find an image that matches repository:tag'
' {0}:{1}.'.format(repository, tag))
def inspect_container(client):
"""Inspect container.
Call inspect with container id from
ctx.instance.runtime_properties['container_id'].
:param client: docker client
:return: container_info
"""
container = ctx.instance.runtime_properties.get('container_id')
if container is not None:
try:
output = client.inspect_container(container)
except APIError as e:
raise NonRecoverableError(
'Unable to inspect container: {0}'.format(str(e)))
else:
return output
else:
return None
def wait_for_processes(process_names, retry_interval, client):
""" The user may provide a node param in the blueprint wait_for_processes.
This is a list of processes to verify are active on the container
before completing the start operation. If all processes are not active
the function will be retried.
:param retry_interval: the number of seconds between retries.
:param client: the client. see docker_client.
:param ctx: the cloudify context.
"""
ctx.logger.info('Waiting for these processes to finish: '
'{}'.format(process_names))
container = ctx.instance.runtime_properties.get('container_id')
try:
top_result = client.top(container)
except APIError as e:
raise NonRecoverableError(
'Unable get container processes from top: {0}'.format(str(e)))
top_result_processes = top_result.get('Processes')
all_active = all([
any([
# last element of list is the command executed
process_name in top_result_process[-1]
for top_result_process in top_result_processes
])
for process_name in process_names
])
if all_active:
ctx.logger.info('Container.top(): {0}'.format(top_result))
return all_active
else:
raise RecoverableError(
'Waiting for all these processes. Retrying...',
retry_after=retry_interval)
def get_container_dictionary(client):
""" Gets the container ID from the cloudify context.
Searches Docker for that container ID.
Returns dockers dictionary for that container ID.
Get list of containers dictionaries from docker containers function.
Find container which is specified in
ctx.instance.runtime_properties['container'].
:param client: the client. see docker_client.
:param ctx: the cloudify context.
:return: container dictionary
"""
container_id = ctx.instance.runtime_properties.get('container_id')
if container_id is None:
ctx.logger.debug(
'Unable to retrieve container dictionary.'
'ctx container ID value is None')
return None
try:
all_containers = client.containers(all=True)
except APIError as e:
raise NonRecoverableError(
'Unable to list all containers: {0}.'.format(str(e)))
for container in all_containers:
if container_id in \
container.get('Id'):
return container
else:
ctx.logger.debug(
'Unable to retrieve container dictionary.'
'container with ID {} does not exist.'
.format(container_id))
return None
def check_container_status(client):
""" Gets the status value from the container info dictionary
:param client: the client. see docker_client.
:param ctx: the cloudify context.
returns status or None if not found.
"""
container = get_container_dictionary(client)
if container is None:
return None
return container.get('Status', None)
def get_container_id_from_name(name, client):
""" Queries the local list of containers for a container with name.
:param name: the name of a container.
:param client: the client. see docker_client.
:param ctx: the cloudify context.
if name is in the list of containers return the id of the container.
if name is not in the list of containers raise NonRecoverableError
"""
for n, i in \
[(c.get('Names'),
c.get('Id')) for c in client.containers(all=True)]:
if name in n:
return i
else:
raise NonRecoverableError(
'No such container: {0}.'.format(name))
def get_top_info(client):
"""Get container top info.
Get container top info using docker top function with container id
from ctx.instance.runtime_properties['container'].
Transforms data into a simple top table.
:param client: docker client
:return: top_table
:raises NonRecoverableError:
when container in ctx.instance.runtime_properties is None.
"""
def format_as_table(top_dict):
top_table = ' '.join(top_dict['Titles']) + '\n'
top_table += '\n'.join(' '.join(p) for p in top_dict['Processes'])
return top_table
ctx.logger.info('Getting TOP info of container.')
container = ctx.instance.runtime_properties.get('container_id')
try:
top_dict = client.top(container)
except APIError as e:
raise NonRecoverableError(
'Unable get container processes from top: {0}'.format(str(e)))
else:
return format_as_table(top_dict)
| apache-2.0 |
YOTOV-LIMITED/kuma | kuma/core/tests/test_helpers.py | 7 | 9094 | # -*- coding: utf-8 -*-
from collections import namedtuple
from datetime import datetime
from babel.dates import format_date, format_time, format_datetime
import bitly_api
import jingo
import mock
from nose.tools import eq_, ok_, assert_raises
from pyquery import PyQuery as pq
import pytz
from soapbox.models import Message
from django.conf import settings
from django.test import RequestFactory
from kuma.core.cache import memcache
from kuma.core.helpers import bitly_shorten, bitly
from kuma.core.tests import KumaTestCase
from kuma.core.urlresolvers import reverse
from kuma.users.tests import UserTestCase
from ..exceptions import DateTimeFormatError
from ..helpers import (timesince, yesno, urlencode,
soapbox_messages, get_soapbox_messages,
datetimeformat, jsonencode, number)
def render(s, context={}):
t = jingo.env.from_string(s)
return t.render(**context)
class TestHelpers(KumaTestCase):
def setUp(self):
jingo.load_helpers()
def test_number(self):
context = {'request': namedtuple('R', 'locale')('en-US')}
eq_('5,000', number(context, 5000))
eq_('', number(context, None))
def test_yesno(self):
eq_('Yes', yesno(True))
eq_('No', yesno(False))
eq_('Yes', yesno(1))
eq_('No', yesno(0))
class TimesinceTests(KumaTestCase):
"""Tests for the timesince filter"""
def test_none(self):
"""If None is passed in, timesince returns ''."""
eq_('', timesince(None))
def test_trunc(self):
"""Assert it returns only the most significant time division."""
eq_('1 year ago',
timesince(datetime(2000, 1, 2), now=datetime(2001, 2, 3)))
def test_future(self):
"""
Test behavior when date is in the future and also when omitting the
now kwarg.
"""
eq_('', timesince(datetime(9999, 1, 2)))
class TestUrlEncode(KumaTestCase):
def test_utf8_urlencode(self):
"""Bug 689056: Unicode strings with non-ASCII characters should not
throw a KeyError when filtered through URL encoding"""
try:
s = u"Someguy Dude\xc3\xaas Lastname"
urlencode(s)
except KeyError:
self.fail("There should be no KeyError")
class TestSoapbox(KumaTestCase):
def test_global_message(self):
m = Message(message="Global", is_global=True, is_active=True, url="/")
m.save()
eq_(m.message, get_soapbox_messages("/")[0].message)
eq_(m.message, get_soapbox_messages("/en-US/")[0].message)
eq_(m.message, get_soapbox_messages("/fr/demos/")[0].message)
def test_subsection_message(self):
m = Message(message="Derby", is_global=False, is_active=True,
url="/demos/devderby")
m.save()
eq_(0, len(get_soapbox_messages("/")))
eq_(0, len(get_soapbox_messages("/demos")))
eq_(0, len(get_soapbox_messages("/en-US/demos")))
eq_(m.message, get_soapbox_messages(
"/en-US/demos/devderby")[0].message)
eq_(m.message, get_soapbox_messages("/de/demos/devderby")[0].message)
def test_message_with_url_is_link(self):
m = Message(message="Go to http://bit.ly/sample-demo", is_global=True,
is_active=True, url="/")
m.save()
ok_('Go to <a href="http://bit.ly/sample-demo">'
'http://bit.ly/sample-demo</a>' in
soapbox_messages(get_soapbox_messages("/")))
class TestDateTimeFormat(UserTestCase):
def setUp(self):
super(TestDateTimeFormat, self).setUp()
url_ = reverse('home')
self.context = {'request': RequestFactory().get(url_)}
self.context['request'].locale = u'en-US'
self.context['request'].user = self.user_model.objects.get(username='testuser01')
def test_today(self):
"""Expects shortdatetime, format: Today at {time}."""
date_today = datetime.today()
value_returned = unicode(datetimeformat(self.context, date_today))
value_expected = 'Today at %s' % format_time(date_today,
format='short',
locale=u'en_US')
eq_(pq(value_returned)('time').text(), value_expected)
def test_locale(self):
"""Expects shortdatetime in French."""
self.context['request'].locale = u'fr'
value_test = datetime.fromordinal(733900)
value_expected = format_datetime(value_test, format='short',
locale=u'fr')
value_returned = datetimeformat(self.context, value_test)
eq_(pq(value_returned)('time').text(), value_expected)
def test_default(self):
"""Expects shortdatetime."""
value_test = datetime.fromordinal(733900)
value_expected = format_datetime(value_test, format='short',
locale=u'en_US')
value_returned = datetimeformat(self.context, value_test)
eq_(pq(value_returned)('time').text(), value_expected)
def test_longdatetime(self):
"""Expects long format."""
value_test = datetime.fromordinal(733900)
tzvalue = pytz.timezone(settings.TIME_ZONE).localize(value_test)
value_expected = format_datetime(tzvalue, format='long',
locale=u'en_US')
value_returned = datetimeformat(self.context, value_test,
format='longdatetime')
eq_(pq(value_returned)('time').text(), value_expected)
def test_date(self):
"""Expects date format."""
value_test = datetime.fromordinal(733900)
value_expected = format_date(value_test, locale=u'en_US')
value_returned = datetimeformat(self.context, value_test,
format='date')
eq_(pq(value_returned)('time').text(), value_expected)
def test_time(self):
"""Expects time format."""
value_test = datetime.fromordinal(733900)
value_expected = format_time(value_test, locale=u'en_US')
value_returned = datetimeformat(self.context, value_test,
format='time')
eq_(pq(value_returned)('time').text(), value_expected)
def test_datetime(self):
"""Expects datetime format."""
value_test = datetime.fromordinal(733900)
value_expected = format_datetime(value_test, locale=u'en_US')
value_returned = datetimeformat(self.context, value_test,
format='datetime')
eq_(pq(value_returned)('time').text(), value_expected)
def test_unknown_format(self):
"""Unknown format raises DateTimeFormatError."""
date_today = datetime.today()
assert_raises(DateTimeFormatError, datetimeformat, self.context,
date_today, format='unknown')
def test_invalid_value(self):
"""Passing invalid value raises ValueError."""
assert_raises(ValueError, datetimeformat, self.context, 'invalid')
def test_json_helper(self):
eq_('false', jsonencode(False))
eq_('{"foo": "bar"}', jsonencode({'foo': 'bar'}))
def test_user_timezone(self):
"""Shows time in user timezone."""
value_test = datetime.fromordinal(733900)
# Choose user with non default timezone
user = self.user_model.objects.get(username='admin')
self.context['request'].user = user
# Convert tzvalue to user timezone
default_tz = pytz.timezone(settings.TIME_ZONE)
user_tz = pytz.timezone(user.timezone)
tzvalue = default_tz.localize(value_test)
tzvalue = user_tz.normalize(tzvalue.astimezone(user_tz))
value_expected = format_datetime(tzvalue, format='long',
locale=u'en_US')
value_returned = datetimeformat(self.context, value_test,
format='longdatetime')
eq_(pq(value_returned)('time').text(), value_expected)
class BitlyTestCase(KumaTestCase):
@mock.patch.object(memcache, 'set') # prevent caching
@mock.patch.object(bitly, 'shorten')
def test_bitly_shorten(self, shorten, cache_set):
long_url = 'http://example.com/long-url'
short_url = 'http://bit.ly/short-url'
# the usual case of returning a dict with a URL
def short_mock(*args, **kwargs):
return {'url': short_url}
shorten.side_effect = short_mock
eq_(bitly_shorten(long_url), short_url)
shorten.assert_called_with(long_url)
# in case of a key error
def short_mock(*args, **kwargs):
return {}
shorten.side_effect = short_mock
eq_(bitly_shorten(long_url), long_url)
shorten.assert_called_with(long_url)
# in case of an upstream error
shorten.side_effect = bitly_api.BitlyError('500', 'fail fail fail')
eq_(bitly_shorten(long_url), long_url)
| mpl-2.0 |
akaariai/django | tests/humanize_tests/tests.py | 336 | 13163 | from __future__ import unicode_literals
import datetime
from decimal import Decimal
from unittest import skipIf
from django.contrib.humanize.templatetags import humanize
from django.template import Context, Template, defaultfilters
from django.test import TestCase, modify_settings, override_settings
from django.utils import translation
from django.utils.html import escape
from django.utils.timezone import get_fixed_timezone, utc
from django.utils.translation import ugettext as _
try:
import pytz
except ImportError:
pytz = None
# Mock out datetime in some tests so they don't fail occasionally when they
# run too slow. Use a fixed datetime for datetime.now(). DST change in
# America/Chicago (the default time zone) happened on March 11th in 2012.
now = datetime.datetime(2012, 3, 9, 22, 30)
class MockDateTime(datetime.datetime):
@classmethod
def now(cls, tz=None):
if tz is None or tz.utcoffset(now) is None:
return now
else:
# equals now.replace(tzinfo=utc)
return now.replace(tzinfo=tz) + tz.utcoffset(now)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.humanize'})
class HumanizeTests(TestCase):
def humanize_tester(self, test_list, result_list, method, normalize_result_func=escape):
for test_content, result in zip(test_list, result_list):
t = Template('{%% load humanize %%}{{ test_content|%s }}' % method)
rendered = t.render(Context(locals())).strip()
self.assertEqual(rendered, normalize_result_func(result),
msg="%s test failed, produced '%s', should've produced '%s'" % (method, rendered, result))
def test_ordinal(self):
test_list = ('1', '2', '3', '4', '11', '12',
'13', '101', '102', '103', '111',
'something else', None)
result_list = ('1st', '2nd', '3rd', '4th', '11th',
'12th', '13th', '101st', '102nd', '103rd',
'111th', 'something else', None)
with translation.override('en'):
self.humanize_tester(test_list, result_list, 'ordinal')
def test_i18n_html_ordinal(self):
"""Allow html in output on i18n strings"""
test_list = ('1', '2', '3', '4', '11', '12',
'13', '101', '102', '103', '111',
'something else', None)
result_list = ('1<sup>er</sup>', '2<sup>e</sup>', '3<sup>e</sup>', '4<sup>e</sup>',
'11<sup>e</sup>', '12<sup>e</sup>', '13<sup>e</sup>', '101<sup>er</sup>',
'102<sup>e</sup>', '103<sup>e</sup>', '111<sup>e</sup>', 'something else',
'None')
with translation.override('fr-fr'):
self.humanize_tester(test_list, result_list, 'ordinal', lambda x: x)
def test_intcomma(self):
test_list = (100, 1000, 10123, 10311, 1000000, 1234567.25,
'100', '1000', '10123', '10311', '1000000', '1234567.1234567', Decimal('1234567.1234567'),
None)
result_list = ('100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.25',
'100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.1234567', '1,234,567.1234567',
None)
with translation.override('en'):
self.humanize_tester(test_list, result_list, 'intcomma')
def test_l10n_intcomma(self):
test_list = (100, 1000, 10123, 10311, 1000000, 1234567.25,
'100', '1000', '10123', '10311', '1000000', '1234567.1234567', Decimal('1234567.1234567'),
None)
result_list = ('100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.25',
'100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.1234567', '1,234,567.1234567',
None)
with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=False):
with translation.override('en'):
self.humanize_tester(test_list, result_list, 'intcomma')
def test_intcomma_without_number_grouping(self):
# Regression for #17414
with translation.override('ja'), self.settings(USE_L10N=True):
self.humanize_tester([100], ['100'], 'intcomma')
def test_intword(self):
test_list = ('100', '1000000', '1200000', '1290000',
'1000000000', '2000000000', '6000000000000',
'1300000000000000', '3500000000000000000000',
'8100000000000000000000000000000000', None)
result_list = ('100', '1.0 million', '1.2 million', '1.3 million',
'1.0 billion', '2.0 billion', '6.0 trillion',
'1.3 quadrillion', '3.5 sextillion',
'8.1 decillion', None)
with translation.override('en'):
self.humanize_tester(test_list, result_list, 'intword')
def test_i18n_intcomma(self):
test_list = (100, 1000, 10123, 10311, 1000000, 1234567.25,
'100', '1000', '10123', '10311', '1000000', None)
result_list = ('100', '1.000', '10.123', '10.311', '1.000.000', '1.234.567,25',
'100', '1.000', '10.123', '10.311', '1.000.000', None)
with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True):
with translation.override('de'):
self.humanize_tester(test_list, result_list, 'intcomma')
def test_i18n_intword(self):
test_list = ('100', '1000000', '1200000', '1290000',
'1000000000', '2000000000', '6000000000000')
result_list = ('100', '1,0 Million', '1,2 Millionen', '1,3 Millionen',
'1,0 Milliarde', '2,0 Milliarden', '6,0 Billionen')
with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True):
with translation.override('de'):
self.humanize_tester(test_list, result_list, 'intword')
def test_apnumber(self):
test_list = [str(x) for x in range(1, 11)]
test_list.append(None)
result_list = ('one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', '10', None)
with translation.override('en'):
self.humanize_tester(test_list, result_list, 'apnumber')
def test_naturalday(self):
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
someday = today - datetime.timedelta(days=10)
notdate = "I'm not a date value"
test_list = (today, yesterday, tomorrow, someday, notdate, None)
someday_result = defaultfilters.date(someday)
result_list = (_('today'), _('yesterday'), _('tomorrow'),
someday_result, "I'm not a date value", None)
self.humanize_tester(test_list, result_list, 'naturalday')
def test_naturalday_tz(self):
today = datetime.date.today()
tz_one = get_fixed_timezone(-720)
tz_two = get_fixed_timezone(720)
# Can be today or yesterday
date_one = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_one)
naturalday_one = humanize.naturalday(date_one)
# Can be today or tomorrow
date_two = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_two)
naturalday_two = humanize.naturalday(date_two)
# As 24h of difference they will never be the same
self.assertNotEqual(naturalday_one, naturalday_two)
@skipIf(pytz is None, "this test requires pytz")
def test_naturalday_uses_localtime(self):
# Regression for #18504
# This is 2012-03-08HT19:30:00-06:00 in America/Chicago
dt = datetime.datetime(2012, 3, 9, 1, 30, tzinfo=utc)
orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime
try:
with override_settings(TIME_ZONE="America/Chicago", USE_TZ=True):
with translation.override('en'):
self.humanize_tester([dt], ['yesterday'], 'naturalday')
finally:
humanize.datetime = orig_humanize_datetime
def test_naturaltime(self):
class naive(datetime.tzinfo):
def utcoffset(self, dt):
return None
test_list = [
now,
now - datetime.timedelta(seconds=1),
now - datetime.timedelta(seconds=30),
now - datetime.timedelta(minutes=1, seconds=30),
now - datetime.timedelta(minutes=2),
now - datetime.timedelta(hours=1, minutes=30, seconds=30),
now - datetime.timedelta(hours=23, minutes=50, seconds=50),
now - datetime.timedelta(days=1),
now - datetime.timedelta(days=500),
now + datetime.timedelta(seconds=1),
now + datetime.timedelta(seconds=30),
now + datetime.timedelta(minutes=1, seconds=30),
now + datetime.timedelta(minutes=2),
now + datetime.timedelta(hours=1, minutes=30, seconds=30),
now + datetime.timedelta(hours=23, minutes=50, seconds=50),
now + datetime.timedelta(days=1),
now + datetime.timedelta(days=2, hours=6),
now + datetime.timedelta(days=500),
now.replace(tzinfo=naive()),
now.replace(tzinfo=utc),
]
result_list = [
'now',
'a second ago',
'30\xa0seconds ago',
'a minute ago',
'2\xa0minutes ago',
'an hour ago',
'23\xa0hours ago',
'1\xa0day ago',
'1\xa0year, 4\xa0months ago',
'a second from now',
'30\xa0seconds from now',
'a minute from now',
'2\xa0minutes from now',
'an hour from now',
'23\xa0hours from now',
'1\xa0day from now',
'2\xa0days, 6\xa0hours from now',
'1\xa0year, 4\xa0months from now',
'now',
'now',
]
# Because of the DST change, 2 days and 6 hours after the chosen
# date in naive arithmetic is only 2 days and 5 hours after in
# aware arithmetic.
result_list_with_tz_support = result_list[:]
assert result_list_with_tz_support[-4] == '2\xa0days, 6\xa0hours from now'
result_list_with_tz_support[-4] == '2\xa0days, 5\xa0hours from now'
orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime
try:
with translation.override('en'):
self.humanize_tester(test_list, result_list, 'naturaltime')
with override_settings(USE_TZ=True):
self.humanize_tester(
test_list, result_list_with_tz_support, 'naturaltime')
finally:
humanize.datetime = orig_humanize_datetime
def test_naturaltime_as_documented(self):
"""
#23340 -- Verify the documented behavior of humanize.naturaltime.
"""
time_format = '%d %b %Y %H:%M:%S'
documented_now = datetime.datetime.strptime('17 Feb 2007 16:30:00', time_format)
test_data = (
('17 Feb 2007 16:30:00', 'now'),
('17 Feb 2007 16:29:31', '29 seconds ago'),
('17 Feb 2007 16:29:00', 'a minute ago'),
('17 Feb 2007 16:25:35', '4 minutes ago'),
('17 Feb 2007 15:30:29', '59 minutes ago'),
('17 Feb 2007 15:30:01', '59 minutes ago'),
('17 Feb 2007 15:30:00', 'an hour ago'),
('17 Feb 2007 13:31:29', '2 hours ago'),
('16 Feb 2007 13:31:29', '1 day, 2 hours ago'),
('16 Feb 2007 13:30:01', '1 day, 2 hours ago'),
('16 Feb 2007 13:30:00', '1 day, 3 hours ago'),
('17 Feb 2007 16:30:30', '30 seconds from now'),
('17 Feb 2007 16:30:29', '29 seconds from now'),
('17 Feb 2007 16:31:00', 'a minute from now'),
('17 Feb 2007 16:34:35', '4 minutes from now'),
('17 Feb 2007 17:30:29', 'an hour from now'),
('17 Feb 2007 18:31:29', '2 hours from now'),
('18 Feb 2007 16:31:29', '1 day from now'),
('26 Feb 2007 18:31:29', '1 week, 2 days from now'),
)
class DocumentedMockDateTime(datetime.datetime):
@classmethod
def now(cls, tz=None):
if tz is None or tz.utcoffset(documented_now) is None:
return documented_now
else:
return documented_now.replace(tzinfo=tz) + tz.utcoffset(now)
orig_humanize_datetime = humanize.datetime
humanize.datetime = DocumentedMockDateTime
try:
for test_time_string, expected_natural_time in test_data:
test_time = datetime.datetime.strptime(test_time_string, time_format)
natural_time = humanize.naturaltime(test_time).replace('\xa0', ' ')
self.assertEqual(expected_natural_time, natural_time)
finally:
humanize.datetime = orig_humanize_datetime
| bsd-3-clause |
lahosken/pants | tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen_integration.py | 8 | 1286 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class AntlrJavaGenIntegrationTest(PantsRunIntegrationTest):
def test_run_antlr3(self):
stdout_data = self.bundle_and_run('examples/src/java/org/pantsbuild/example/antlr3',
'examples.src.java.org.pantsbuild.example.antlr3.antlr3',
bundle_jar_name='antlr3',
args=['7*8'])
self.assertEquals('56.0', stdout_data.rstrip(), msg="got output:{0}".format(stdout_data))
def test_run_antlr4(self):
stdout_data = self.bundle_and_run('examples/src/java/org/pantsbuild/example/antlr4',
'examples.src.java.org.pantsbuild.example.antlr4.antlr4',
bundle_jar_name='antlr4',
args=['7*6'])
self.assertEquals('42.0', stdout_data.rstrip(), msg="got output:{0}".format(stdout_data))
| apache-2.0 |
dhermes/google-cloud-python | bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py | 2 | 36159 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigtable/admin_v2/proto/table.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/bigtable/admin_v2/proto/table.proto",
package="google.bigtable.admin.v2",
syntax="proto3",
serialized_pb=_b(
'\n0google/cloud/bigtable/admin_v2/proto/table.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x06\n\x05Table\x12\x0c\n\x04name\x18\x01 \x01(\t\x12J\n\x0e\x63luster_states\x18\x02 \x03(\x0b\x32\x32.google.bigtable.admin.v2.Table.ClusterStatesEntry\x12L\n\x0f\x63olumn_families\x18\x03 \x03(\x0b\x32\x33.google.bigtable.admin.v2.Table.ColumnFamiliesEntry\x12I\n\x0bgranularity\x18\x04 \x01(\x0e\x32\x34.google.bigtable.admin.v2.Table.TimestampGranularity\x1a\xe2\x01\n\x0c\x43lusterState\x12X\n\x11replication_state\x18\x01 \x01(\x0e\x32=.google.bigtable.admin.v2.Table.ClusterState.ReplicationState"x\n\x10ReplicationState\x12\x13\n\x0fSTATE_NOT_KNOWN\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x17\n\x13PLANNED_MAINTENANCE\x10\x02\x12\x19\n\x15UNPLANNED_MAINTENANCE\x10\x03\x12\t\n\x05READY\x10\x04\x1a\x62\n\x12\x43lusterStatesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.google.bigtable.admin.v2.Table.ClusterState:\x02\x38\x01\x1a]\n\x13\x43olumnFamiliesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.bigtable.admin.v2.ColumnFamily:\x02\x38\x01"I\n\x14TimestampGranularity\x12%\n!TIMESTAMP_GRANULARITY_UNSPECIFIED\x10\x00\x12\n\n\x06MILLIS\x10\x01"\\\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\r\n\tNAME_ONLY\x10\x01\x12\x0f\n\x0bSCHEMA_VIEW\x10\x02\x12\x14\n\x10REPLICATION_VIEW\x10\x03\x12\x08\n\x04\x46ULL\x10\x04"A\n\x0c\x43olumnFamily\x12\x31\n\x07gc_rule\x18\x01 \x01(\x0b\x32 .google.bigtable.admin.v2.GcRule"\xd5\x02\n\x06GcRule\x12\x1a\n\x10max_num_versions\x18\x01 \x01(\x05H\x00\x12,\n\x07max_age\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x45\n\x0cintersection\x18\x03 \x01(\x0b\x32-.google.bigtable.admin.v2.GcRule.IntersectionH\x00\x12\x37\n\x05union\x18\x04 \x01(\x0b\x32&.google.bigtable.admin.v2.GcRule.UnionH\x00\x1a?\n\x0cIntersection\x12/\n\x05rules\x18\x01 \x03(\x0b\x32 .google.bigtable.admin.v2.GcRule\x1a\x38\n\x05Union\x12/\n\x05rules\x18\x01 \x03(\x0b\x32 .google.bigtable.admin.v2.GcRuleB\x06\n\x04rule"\xcf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x35\n\x0csource_table\x18\x02 \x01(\x0b\x32\x1f.google.bigtable.admin.v2.Table\x12\x17\n\x0f\x64\x61ta_size_bytes\x18\x03 \x01(\x03\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x64\x65lete_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x05state\x18\x06 \x01(\x0e\x32(.google.bigtable.admin.v2.Snapshot.State\x12\x13\n\x0b\x64\x65scription\x18\x07 \x01(\t"5\n\x05State\x12\x13\n\x0fSTATE_NOT_KNOWN\x10\x00\x12\t\n\x05READY\x10\x01\x12\x0c\n\x08\x43REATING\x10\x02\x42\xad\x01\n\x1c\x63om.google.bigtable.admin.v2B\nTableProtoP\x01Z=google.golang.org/genproto/googleapis/bigtable/admin/v2;admin\xaa\x02\x1eGoogle.Cloud.Bigtable.Admin.V2\xca\x02\x1eGoogle\\Cloud\\Bigtable\\Admin\\V2b\x06proto3'
),
dependencies=[
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,
google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
],
)
_TABLE_CLUSTERSTATE_REPLICATIONSTATE = _descriptor.EnumDescriptor(
name="ReplicationState",
full_name="google.bigtable.admin.v2.Table.ClusterState.ReplicationState",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="STATE_NOT_KNOWN", index=0, number=0, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="INITIALIZING", index=1, number=1, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="PLANNED_MAINTENANCE", index=2, number=2, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="UNPLANNED_MAINTENANCE", index=3, number=3, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="READY", index=4, number=4, options=None, type=None
),
],
containing_type=None,
options=None,
serialized_start=533,
serialized_end=653,
)
_sym_db.RegisterEnumDescriptor(_TABLE_CLUSTERSTATE_REPLICATIONSTATE)
_TABLE_TIMESTAMPGRANULARITY = _descriptor.EnumDescriptor(
name="TimestampGranularity",
full_name="google.bigtable.admin.v2.Table.TimestampGranularity",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="TIMESTAMP_GRANULARITY_UNSPECIFIED",
index=0,
number=0,
options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="MILLIS", index=1, number=1, options=None, type=None
),
],
containing_type=None,
options=None,
serialized_start=850,
serialized_end=923,
)
_sym_db.RegisterEnumDescriptor(_TABLE_TIMESTAMPGRANULARITY)
_TABLE_VIEW = _descriptor.EnumDescriptor(
name="View",
full_name="google.bigtable.admin.v2.Table.View",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="VIEW_UNSPECIFIED", index=0, number=0, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="NAME_ONLY", index=1, number=1, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="SCHEMA_VIEW", index=2, number=2, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="REPLICATION_VIEW", index=3, number=3, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="FULL", index=4, number=4, options=None, type=None
),
],
containing_type=None,
options=None,
serialized_start=925,
serialized_end=1017,
)
_sym_db.RegisterEnumDescriptor(_TABLE_VIEW)
_SNAPSHOT_STATE = _descriptor.EnumDescriptor(
name="State",
full_name="google.bigtable.admin.v2.Snapshot.State",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="STATE_NOT_KNOWN", index=0, number=0, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="READY", index=1, number=1, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="CREATING", index=2, number=2, options=None, type=None
),
],
containing_type=None,
options=None,
serialized_start=1713,
serialized_end=1766,
)
_sym_db.RegisterEnumDescriptor(_SNAPSHOT_STATE)
_TABLE_CLUSTERSTATE = _descriptor.Descriptor(
name="ClusterState",
full_name="google.bigtable.admin.v2.Table.ClusterState",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="replication_state",
full_name="google.bigtable.admin.v2.Table.ClusterState.replication_state",
index=0,
number=1,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[_TABLE_CLUSTERSTATE_REPLICATIONSTATE],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=427,
serialized_end=653,
)
_TABLE_CLUSTERSTATESENTRY = _descriptor.Descriptor(
name="ClusterStatesEntry",
full_name="google.bigtable.admin.v2.Table.ClusterStatesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.bigtable.admin.v2.Table.ClusterStatesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.bigtable.admin.v2.Table.ClusterStatesEntry.value",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=655,
serialized_end=753,
)
_TABLE_COLUMNFAMILIESENTRY = _descriptor.Descriptor(
name="ColumnFamiliesEntry",
full_name="google.bigtable.admin.v2.Table.ColumnFamiliesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.bigtable.admin.v2.Table.ColumnFamiliesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.bigtable.admin.v2.Table.ColumnFamiliesEntry.value",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=755,
serialized_end=848,
)
_TABLE = _descriptor.Descriptor(
name="Table",
full_name="google.bigtable.admin.v2.Table",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.bigtable.admin.v2.Table.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="cluster_states",
full_name="google.bigtable.admin.v2.Table.cluster_states",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="column_families",
full_name="google.bigtable.admin.v2.Table.column_families",
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="granularity",
full_name="google.bigtable.admin.v2.Table.granularity",
index=3,
number=4,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[
_TABLE_CLUSTERSTATE,
_TABLE_CLUSTERSTATESENTRY,
_TABLE_COLUMNFAMILIESENTRY,
],
enum_types=[_TABLE_TIMESTAMPGRANULARITY, _TABLE_VIEW],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=174,
serialized_end=1017,
)
_COLUMNFAMILY = _descriptor.Descriptor(
name="ColumnFamily",
full_name="google.bigtable.admin.v2.ColumnFamily",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="gc_rule",
full_name="google.bigtable.admin.v2.ColumnFamily.gc_rule",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1019,
serialized_end=1084,
)
_GCRULE_INTERSECTION = _descriptor.Descriptor(
name="Intersection",
full_name="google.bigtable.admin.v2.GcRule.Intersection",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="rules",
full_name="google.bigtable.admin.v2.GcRule.Intersection.rules",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1299,
serialized_end=1362,
)
_GCRULE_UNION = _descriptor.Descriptor(
name="Union",
full_name="google.bigtable.admin.v2.GcRule.Union",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="rules",
full_name="google.bigtable.admin.v2.GcRule.Union.rules",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1364,
serialized_end=1420,
)
_GCRULE = _descriptor.Descriptor(
name="GcRule",
full_name="google.bigtable.admin.v2.GcRule",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="max_num_versions",
full_name="google.bigtable.admin.v2.GcRule.max_num_versions",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="max_age",
full_name="google.bigtable.admin.v2.GcRule.max_age",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="intersection",
full_name="google.bigtable.admin.v2.GcRule.intersection",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="union",
full_name="google.bigtable.admin.v2.GcRule.union",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="rule",
full_name="google.bigtable.admin.v2.GcRule.rule",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=1087,
serialized_end=1428,
)
_SNAPSHOT = _descriptor.Descriptor(
name="Snapshot",
full_name="google.bigtable.admin.v2.Snapshot",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.bigtable.admin.v2.Snapshot.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="source_table",
full_name="google.bigtable.admin.v2.Snapshot.source_table",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data_size_bytes",
full_name="google.bigtable.admin.v2.Snapshot.data_size_bytes",
index=2,
number=3,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="create_time",
full_name="google.bigtable.admin.v2.Snapshot.create_time",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="delete_time",
full_name="google.bigtable.admin.v2.Snapshot.delete_time",
index=4,
number=5,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="state",
full_name="google.bigtable.admin.v2.Snapshot.state",
index=5,
number=6,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="description",
full_name="google.bigtable.admin.v2.Snapshot.description",
index=6,
number=7,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_SNAPSHOT_STATE],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1431,
serialized_end=1766,
)
_TABLE_CLUSTERSTATE.fields_by_name[
"replication_state"
].enum_type = _TABLE_CLUSTERSTATE_REPLICATIONSTATE
_TABLE_CLUSTERSTATE.containing_type = _TABLE
_TABLE_CLUSTERSTATE_REPLICATIONSTATE.containing_type = _TABLE_CLUSTERSTATE
_TABLE_CLUSTERSTATESENTRY.fields_by_name["value"].message_type = _TABLE_CLUSTERSTATE
_TABLE_CLUSTERSTATESENTRY.containing_type = _TABLE
_TABLE_COLUMNFAMILIESENTRY.fields_by_name["value"].message_type = _COLUMNFAMILY
_TABLE_COLUMNFAMILIESENTRY.containing_type = _TABLE
_TABLE.fields_by_name["cluster_states"].message_type = _TABLE_CLUSTERSTATESENTRY
_TABLE.fields_by_name["column_families"].message_type = _TABLE_COLUMNFAMILIESENTRY
_TABLE.fields_by_name["granularity"].enum_type = _TABLE_TIMESTAMPGRANULARITY
_TABLE_TIMESTAMPGRANULARITY.containing_type = _TABLE
_TABLE_VIEW.containing_type = _TABLE
_COLUMNFAMILY.fields_by_name["gc_rule"].message_type = _GCRULE
_GCRULE_INTERSECTION.fields_by_name["rules"].message_type = _GCRULE
_GCRULE_INTERSECTION.containing_type = _GCRULE
_GCRULE_UNION.fields_by_name["rules"].message_type = _GCRULE
_GCRULE_UNION.containing_type = _GCRULE
_GCRULE.fields_by_name[
"max_age"
].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_GCRULE.fields_by_name["intersection"].message_type = _GCRULE_INTERSECTION
_GCRULE.fields_by_name["union"].message_type = _GCRULE_UNION
_GCRULE.oneofs_by_name["rule"].fields.append(_GCRULE.fields_by_name["max_num_versions"])
_GCRULE.fields_by_name["max_num_versions"].containing_oneof = _GCRULE.oneofs_by_name[
"rule"
]
_GCRULE.oneofs_by_name["rule"].fields.append(_GCRULE.fields_by_name["max_age"])
_GCRULE.fields_by_name["max_age"].containing_oneof = _GCRULE.oneofs_by_name["rule"]
_GCRULE.oneofs_by_name["rule"].fields.append(_GCRULE.fields_by_name["intersection"])
_GCRULE.fields_by_name["intersection"].containing_oneof = _GCRULE.oneofs_by_name["rule"]
_GCRULE.oneofs_by_name["rule"].fields.append(_GCRULE.fields_by_name["union"])
_GCRULE.fields_by_name["union"].containing_oneof = _GCRULE.oneofs_by_name["rule"]
_SNAPSHOT.fields_by_name["source_table"].message_type = _TABLE
_SNAPSHOT.fields_by_name[
"create_time"
].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_SNAPSHOT.fields_by_name[
"delete_time"
].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_SNAPSHOT.fields_by_name["state"].enum_type = _SNAPSHOT_STATE
_SNAPSHOT_STATE.containing_type = _SNAPSHOT
DESCRIPTOR.message_types_by_name["Table"] = _TABLE
DESCRIPTOR.message_types_by_name["ColumnFamily"] = _COLUMNFAMILY
DESCRIPTOR.message_types_by_name["GcRule"] = _GCRULE
DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Table = _reflection.GeneratedProtocolMessageType(
"Table",
(_message.Message,),
dict(
ClusterState=_reflection.GeneratedProtocolMessageType(
"ClusterState",
(_message.Message,),
dict(
DESCRIPTOR=_TABLE_CLUSTERSTATE,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""The state of a table's data in a particular cluster.
Attributes:
replication_state:
(``OutputOnly``) The state of replication for the table in
this cluster.
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table.ClusterState)
),
),
ClusterStatesEntry=_reflection.GeneratedProtocolMessageType(
"ClusterStatesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_TABLE_CLUSTERSTATESENTRY,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2"
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table.ClusterStatesEntry)
),
),
ColumnFamiliesEntry=_reflection.GeneratedProtocolMessageType(
"ColumnFamiliesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_TABLE_COLUMNFAMILIESENTRY,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2"
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table.ColumnFamiliesEntry)
),
),
DESCRIPTOR=_TABLE,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""A collection of user data indexed by row, column, and timestamp. Each
table is served using the resources of its parent cluster.
Attributes:
name:
(``OutputOnly``) The unique name of the table. Values are of
the form ``projects/<project>/instances/<instance>/tables/[_a-
zA-Z0-9][-_.a-zA-Z0-9]*``. Views: ``NAME_ONLY``,
``SCHEMA_VIEW``, ``REPLICATION_VIEW``, ``FULL``
cluster_states:
(``OutputOnly``) Map from cluster ID to per-cluster table
state. If it could not be determined whether or not the table
has data in a particular cluster (for example, if its zone is
unavailable), then there will be an entry for the cluster with
UNKNOWN ``replication_status``. Views: ``REPLICATION_VIEW``,
``FULL``
column_families:
(``CreationOnly``) The column families configured for this
table, mapped by column family ID. Views: ``SCHEMA_VIEW``,
``FULL``
granularity:
(``CreationOnly``) The granularity (i.e. ``MILLIS``) at which
timestamps are stored in this table. Timestamps not matching
the granularity will be rejected. If unspecified at creation
time, the value will be set to ``MILLIS``. Views:
``SCHEMA_VIEW``, ``FULL``
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table)
),
)
_sym_db.RegisterMessage(Table)
_sym_db.RegisterMessage(Table.ClusterState)
_sym_db.RegisterMessage(Table.ClusterStatesEntry)
_sym_db.RegisterMessage(Table.ColumnFamiliesEntry)
ColumnFamily = _reflection.GeneratedProtocolMessageType(
"ColumnFamily",
(_message.Message,),
dict(
DESCRIPTOR=_COLUMNFAMILY,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""A set of columns within a table which share a common configuration.
Attributes:
gc_rule:
Garbage collection rule specified as a protobuf. Must
serialize to at most 500 bytes. NOTE: Garbage collection
executes opportunistically in the background, and so it's
possible for reads to return a cell even if it matches the
active GC expression for its family.
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ColumnFamily)
),
)
_sym_db.RegisterMessage(ColumnFamily)
GcRule = _reflection.GeneratedProtocolMessageType(
"GcRule",
(_message.Message,),
dict(
Intersection=_reflection.GeneratedProtocolMessageType(
"Intersection",
(_message.Message,),
dict(
DESCRIPTOR=_GCRULE_INTERSECTION,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""A GcRule which deletes cells matching all of the given rules.
Attributes:
rules:
Only delete cells which would be deleted by every element of
``rules``.
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule.Intersection)
),
),
Union=_reflection.GeneratedProtocolMessageType(
"Union",
(_message.Message,),
dict(
DESCRIPTOR=_GCRULE_UNION,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""A GcRule which deletes cells matching any of the given rules.
Attributes:
rules:
Delete cells which would be deleted by any element of
``rules``.
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule.Union)
),
),
DESCRIPTOR=_GCRULE,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""Rule for determining which cells to delete during garbage collection.
Attributes:
rule:
Garbage collection rules.
max_num_versions:
Delete all cells in a column except the most recent N.
max_age:
Delete cells in a column older than the given age. Values must
be at least one millisecond, and will be truncated to
microsecond granularity.
intersection:
Delete cells that would be deleted by every nested rule.
union:
Delete cells that would be deleted by any nested rule.
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule)
),
)
_sym_db.RegisterMessage(GcRule)
_sym_db.RegisterMessage(GcRule.Intersection)
_sym_db.RegisterMessage(GcRule.Union)
Snapshot = _reflection.GeneratedProtocolMessageType(
"Snapshot",
(_message.Message,),
dict(
DESCRIPTOR=_SNAPSHOT,
__module__="google.cloud.bigtable.admin_v2.proto.table_pb2",
__doc__="""A snapshot of a table at a particular time. A snapshot can be used as a
checkpoint for data restoration or a data source for a new table.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers.
This feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or
deprecation policy.
Attributes:
name:
(``OutputOnly``) The unique name of the snapshot. Values are
of the form ``projects/<project>/instances/<instance>/clusters
/<cluster>/snapshots/<snapshot>``.
source_table:
(``OutputOnly``) The source table at the time the snapshot was
taken.
data_size_bytes:
(``OutputOnly``) The size of the data in the source table at
the time the snapshot was taken. In some cases, this value may
be computed asynchronously via a background process and a
placeholder of 0 will be used in the meantime.
create_time:
(``OutputOnly``) The time when the snapshot is created.
delete_time:
(``OutputOnly``) The time when the snapshot will be deleted.
The maximum amount of time a snapshot can stay active is 365
days. If 'ttl' is not specified, the default maximum of 365
days will be used.
state:
(``OutputOnly``) The current state of the snapshot.
description:
(``OutputOnly``) Description of the snapshot.
""",
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Snapshot)
),
)
_sym_db.RegisterMessage(Snapshot)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(
descriptor_pb2.FileOptions(),
_b(
"\n\034com.google.bigtable.admin.v2B\nTableProtoP\001Z=google.golang.org/genproto/googleapis/bigtable/admin/v2;admin\252\002\036Google.Cloud.Bigtable.Admin.V2\312\002\036Google\\Cloud\\Bigtable\\Admin\\V2"
),
)
_TABLE_CLUSTERSTATESENTRY.has_options = True
_TABLE_CLUSTERSTATESENTRY._options = _descriptor._ParseOptions(
descriptor_pb2.MessageOptions(), _b("8\001")
)
_TABLE_COLUMNFAMILIESENTRY.has_options = True
_TABLE_COLUMNFAMILIESENTRY._options = _descriptor._ParseOptions(
descriptor_pb2.MessageOptions(), _b("8\001")
)
# @@protoc_insertion_point(module_scope)
| apache-2.0 |
Enchufa2/video-tester | VideoTester/gui.py | 1 | 23890 | # coding=UTF8
## This file is part of VideoTester
## See https://github.com/Enchufa2/video-tester for more information
## Copyright 2011-2016 Iñaki Úcar <iucar@fedoraproject.org>
## This program is published under a GPLv3 license
import wx, wx.aui, pickle, textwrap, logging
import matplotlib as mpl
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as Canvas
from matplotlib.backends.backend_wx import NavigationToolbar2Wx as Toolbar
from gi.repository import Gst, GstVideo, GObject
from . import __version__, VTLOG, VTClient, netifaces, \
supported_codecs, supported_protocols
from .resources import getVTIcon, getVTBitmap
class FuncLog(logging.Handler):
'''
A logging handler that sends logs to an update function.
'''
def __init__(self, textctrl):
logging.Handler.__init__(self)
self.textctrl = textctrl
def emit(self, record):
self.textctrl.SetInsertionPointEnd()
self.textctrl.WriteText(self.format(record) + '\n')
class VTframe(wx.Frame):
'''
Main window.
'''
def __init__(self, conf):
self.main = VTClient(conf)
wx.Frame.__init__(self, None)
self.SetIcon(getVTIcon())
# Menu Bar
self.vtmenubar = wx.MenuBar()
menu = wx.Menu()
self.m_files = menu.Append(wx.ID_OPEN, '&Open files...', 'Select Pickle files to plot')
menu.AppendSeparator()
self.m_exit = menu.Append(wx.ID_EXIT, 'E&xit', 'Exit program')
self.vtmenubar.Append(menu, '&File')
menu = wx.Menu()
self.m_run = menu.Append(wx.ID_REFRESH, '&Run...', 'Run test')
self.vtmenubar.Append(menu, 'R&un')
menu = wx.Menu()
self.m_about = menu.Append(wx.ID_ABOUT, '&About', 'About this program')
self.vtmenubar.Append(menu, '&Help')
self.SetMenuBar(self.vtmenubar)
self.vtstatusbar = self.CreateStatusBar(1, 0)
self.tabs = wx.Notebook(self, -1, style=0)
self.conf_tab = wx.Panel(self.tabs, -1)
self.video_label = wx.StaticText(self.conf_tab, -1, 'Choose a video:')
self.video = wx.Choice(self.conf_tab, -1, choices=[x[0] for x in self.main.videos])
self.codec_label = wx.StaticText(self.conf_tab, -1, 'Choose a codec:')
self.codec = wx.Choice(self.conf_tab, -1, choices=supported_codecs.keys())
self.bitrate_label = wx.StaticText(self.conf_tab, -1, 'Select the bitrate:')
self.bitrate = wx.Slider(self.conf_tab, -1, self.main.conf['bitrate'], 64, 1024, style=wx.SL_HORIZONTAL | wx.SL_LABELS)
self.framerate_label = wx.StaticText(self.conf_tab, -1, 'Select the framerate:')
self.framerate = wx.Slider(self.conf_tab, -1, self.main.conf['framerate'], 1, 100, style=wx.SL_HORIZONTAL | wx.SL_LABELS)
self.sb_video = wx.StaticBox(self.conf_tab, -1, 'Video options:')
self.iface_label = wx.StaticText(self.conf_tab, -1, 'Interface:')
self.iface = wx.Choice(self.conf_tab, -1, choices=netifaces)
self.ip_label = wx.StaticText(self.conf_tab, -1, 'Server IP:')
self.ip = wx.TextCtrl(self.conf_tab, -1, self.main.conf['ip'])
self.port_label = wx.StaticText(self.conf_tab, -1, 'Server port:')
self.port = wx.TextCtrl(self.conf_tab, -1, str(self.main.port))
self.protocol = wx.RadioBox(self.conf_tab, -1, 'Protocol:', choices=supported_protocols, majorDimension=3, style=wx.RA_SPECIFY_COLS)
self.sb_net = wx.StaticBox(self.conf_tab, -1, 'Net options:')
self.qos = []
self.qos.append(('latency', wx.CheckBox(self.conf_tab, -1, 'Latency')))
self.qos.append(('delta', wx.CheckBox(self.conf_tab, -1, 'Delta')))
self.qos.append(('jitter', wx.CheckBox(self.conf_tab, -1, 'Jitter')))
self.qos.append(('skew', wx.CheckBox(self.conf_tab, -1, 'Skew')))
self.qos.append(('bandwidth', wx.CheckBox(self.conf_tab, -1, 'Bandwidth')))
self.qos.append(('plr', wx.CheckBox(self.conf_tab, -1, 'Packet Loss Rate')))
self.qos.append(('pld', wx.CheckBox(self.conf_tab, -1, 'Packet Loss Distribution')))
self.sb_qos = wx.StaticBox(self.conf_tab, -1, 'QoS measures:')
self.bs = []
self.bs.append(('streameye', wx.CheckBox(self.conf_tab, -1, 'Stream Eye')))
self.bs.append(('refstreameye', wx.CheckBox(self.conf_tab, -1, 'refStream Eye')))
self.bs.append(('gop', wx.CheckBox(self.conf_tab, -1, 'GOP size')))
self.bs.append(('iflr', wx.CheckBox(self.conf_tab, -1, 'I Frame Loss Rate')))
self.sb_bs = wx.StaticBox(self.conf_tab, -1, 'BitStream measures:')
self.vq = []
self.vq.append(('psnr', wx.CheckBox(self.conf_tab, -1, 'PSNR')))
self.vq.append(('ssim', wx.CheckBox(self.conf_tab, -1, 'SSIM')))
self.vq.append(('g1070', wx.CheckBox(self.conf_tab, -1, 'G.1070')))
self.vq.append(('psnrtomos', wx.CheckBox(self.conf_tab, -1, 'PSNRtoMOS')))
self.vq.append(('miv', wx.CheckBox(self.conf_tab, -1, 'MIV')))
self.sb_vq = wx.StaticBox(self.conf_tab, -1, 'Video quality measures:')
self.log_tab = wx.Panel(self.tabs, -1)
self.log = wx.TextCtrl(self.log_tab, -1, '', style=wx.TE_MULTILINE | wx.TE_READONLY)
self.results_tab = PlotNotebook(self.tabs)
self.video_tab = wx.Panel(self.tabs, -1)
self.player = wx.Panel(self.video_tab, -1)
self.player_button = wx.Button(self.video_tab, -1, 'Play', name='playvideo', size=(200, 50))
self.__setProperties()
self.__doLayout()
self.__initVideo()
self.Bind(wx.EVT_MENU, self.onOpen, self.m_files)
self.Bind(wx.EVT_MENU, self.onExit, self.m_exit)
self.Bind(wx.EVT_MENU, self.onRun, self.m_run)
self.Bind(wx.EVT_MENU, self.onAbout, self.m_about)
self.Bind(wx.EVT_CLOSE, self.onCloseWindow)
self.player_button.Bind(wx.EVT_BUTTON, self.onPlay)
# Logging
console = VTLOG.handlers[0]
self.hdlr = FuncLog(self.log)
self.hdlr.setLevel(console.level)
console.setLevel(40)
self.hdlr.setFormatter(console.formatter)
VTLOG.addHandler(self.hdlr)
def __setProperties(self):
self.SetTitle('Video Tester')
self.SetSize((800, 600))
self.Hide()
self.vtstatusbar.SetStatusWidths([-1])
vtstatusbar_fields = ['VT Client']
for i in range(len(vtstatusbar_fields)):
self.vtstatusbar.SetStatusText(vtstatusbar_fields[i], i)
self.video_label.SetMinSize((160, 17))
self.video.SetMinSize((120, 25))
self.video.SetSelection(zip(*self.main.videos)[0].index(self.main.conf['video']))
self.codec_label.SetMinSize((160, 17))
self.codec.SetMinSize((120, 25))
self.codec.SetSelection(supported_codecs.keys().index(self.main.conf['codec']))
self.bitrate_label.SetMinSize((160, 17))
self.bitrate.SetMinSize((210, 50))
self.framerate_label.SetMinSize((160, 17))
self.framerate.SetMinSize((210, 50))
self.iface_label.SetMinSize((140, 17))
self.iface.SetMinSize((80, 25))
self.iface.SetSelection(netifaces.index(self.main.conf['iface']))
self.ip_label.SetMinSize((140, 17))
self.ip.SetMinSize((150, 25))
self.port_label.SetMinSize((140, 17))
self.protocol.SetSelection(supported_protocols.index(self.main.conf['protocol']))
for name, el in self.qos + self.bs + self.vq:
if name in self.main.conf['qos'] + self.main.conf['bs'] + self.main.conf['vq']:
el.SetValue(True)
self.results_tab.Hide()
self.video_tab.Hide()
def __doLayout(self):
sizer_body = wx.BoxSizer(wx.VERTICAL)
sizer_log_tab = wx.BoxSizer(wx.HORIZONTAL)
sizer_video_tab = wx.BoxSizer(wx.VERTICAL)
sizer_conf_tab = wx.GridSizer(2, 1, 3, 3)
sizer_conf_up = wx.GridSizer(1, 2, 0, 0)
sizer_conf_down = wx.GridSizer(1, 3, 0, 0)
sizer_conf_tab.Add(sizer_conf_up, 1, wx.EXPAND, 0)
sizer_conf_tab.Add(sizer_conf_down, 1, wx.EXPAND, 0)
sizer_video = wx.GridSizer(4, 1, 0, 0)
sizer_net = wx.GridSizer(4, 1, 0, 0)
sizer_qos = wx.BoxSizer(wx.VERTICAL)
sizer_bs = wx.BoxSizer(wx.VERTICAL)
sizer_vq = wx.BoxSizer(wx.VERTICAL)
self.sb_video.Lower()
sizer_sb_video = wx.StaticBoxSizer(self.sb_video, wx.HORIZONTAL)
sizer_sb_video.Add(sizer_video, 1, wx.EXPAND | wx.ALL, 10)
self.sb_net.Lower()
sizer_sb_net = wx.StaticBoxSizer(self.sb_net, wx.HORIZONTAL)
sizer_sb_net.Add(sizer_net, 1, wx.EXPAND | wx.ALL, 10)
self.sb_qos.Lower()
sizer_sb_qos = wx.StaticBoxSizer(self.sb_qos, wx.HORIZONTAL)
sizer_sb_qos.Add(sizer_qos, 1, wx.EXPAND | wx.ALL, 10)
self.sb_bs.Lower()
sizer_sb_bs = wx.StaticBoxSizer(self.sb_bs, wx.HORIZONTAL)
sizer_sb_bs.Add(sizer_bs, 1, wx.EXPAND | wx.ALL, 10)
self.sb_vq.Lower()
sizer_sb_vq = wx.StaticBoxSizer(self.sb_vq, wx.HORIZONTAL)
sizer_sb_vq.Add(sizer_vq, 1, wx.EXPAND | wx.ALL, 10)
sizer_videobox = wx.BoxSizer(wx.HORIZONTAL)
sizer_videobox.Add(self.video_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_videobox.Add(self.video, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_codec = wx.BoxSizer(wx.HORIZONTAL)
sizer_codec.Add(self.codec_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_codec.Add(self.codec, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_bitrate = wx.BoxSizer(wx.HORIZONTAL)
sizer_bitrate.Add(self.bitrate_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_bitrate.Add(self.bitrate, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_framerate = wx.BoxSizer(wx.HORIZONTAL)
sizer_framerate.Add(self.framerate_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_framerate.Add(self.framerate, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_video.Add(sizer_videobox, 1, wx.EXPAND, 0)
sizer_video.Add(sizer_codec, 1, wx.EXPAND, 0)
sizer_video.Add(sizer_bitrate, 1, wx.EXPAND, 0)
sizer_video.Add(sizer_framerate, 1, wx.EXPAND, 0)
sizer_iface = wx.BoxSizer(wx.HORIZONTAL)
sizer_iface.Add(self.iface_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_iface.Add(self.iface, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_ip = wx.BoxSizer(wx.HORIZONTAL)
sizer_ip.Add(self.ip_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_ip.Add(self.ip, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_port = wx.BoxSizer(wx.HORIZONTAL)
sizer_port.Add(self.port_label, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_port.Add(self.port, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_net.Add(sizer_iface, 1, wx.EXPAND, 0)
sizer_net.Add(sizer_ip, 1, wx.EXPAND, 0)
sizer_net.Add(sizer_port, 1, wx.EXPAND, 0)
sizer_net.Add(self.protocol, 0, wx.EXPAND, 0)
for name, el in self.qos:
sizer_qos.Add(el, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
for name, el in self.bs:
sizer_bs.Add(el, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
for name, el in self.vq:
sizer_vq.Add(el, 0, wx.ALIGN_CENTER_VERTICAL | wx.ADJUST_MINSIZE, 0)
sizer_conf_up.Add(sizer_sb_video, 1, wx.EXPAND | wx.ALL^wx.BOTTOM, 10)
sizer_conf_up.Add(sizer_sb_net, 1, wx.EXPAND | wx.ALL^wx.BOTTOM, 10)
sizer_conf_down.Add(sizer_sb_qos, 1, wx.EXPAND | wx.ALL, 10)
sizer_conf_down.Add(sizer_sb_bs, 1, wx.EXPAND | wx.ALL, 10)
sizer_conf_down.Add(sizer_sb_vq, 1, wx.EXPAND | wx.ALL, 10)
self.conf_tab.SetSizer(sizer_conf_tab)
sizer_log_tab.Add(self.log, 1, wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.log_tab.SetSizer(sizer_log_tab)
sizer_video_tab.Add(self.player, 1, wx.EXPAND, 0)
sizer_video_tab.Add(self.player_button, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALL, 30)
self.video_tab.SetSizer(sizer_video_tab)
self.video_tab.SetBackgroundColour((0, 0, 0))
self.tabs.AddPage(self.conf_tab, 'Configuration')
self.tabs.AddPage(self.log_tab, 'Log')
self.tabs.AddPage(self.results_tab, 'Results')
self.tabs.AddPage(self.video_tab, 'Video')
sizer_body.Add(self.tabs, 1, wx.EXPAND, 0)
self.SetSizer(sizer_body)
self.Layout()
self.Centre()
def __initVideo(self):
self.pipeline = Gst.parse_launch(
'filesrc name=video1 filesrc name=video2 filesrc name=video3 \
videomixer name=mix ! xvimagesink \
video1. \
! queue ! videoparse framerate=%s/1 name=parser1 \
! textoverlay font-desc="Sans 24" text="Original" \
valignment=top halignment=left shaded-background=true \
! videoscale \
! mix.sink_1 \
video2. \
! queue ! videoparse framerate=%s/1 name=parser2 \
! textoverlay font-desc="Sans 24" text="Coded" \
valignment=top halignment=left shaded-background=true \
! videoscale \
! mix.sink_2 \
video3. \
! queue ! videoparse framerate=%s/1 name=parser3 \
! textoverlay font-desc="Sans 24" text="Received" \
valignment=top halignment=left shaded-background=true \
! videoscale \
! mix.sink_3' % (
self.main.conf['framerate'],
self.main.conf['framerate'],
self.main.conf['framerate']
))
bus = self.pipeline.get_bus()
bus.add_signal_watch()
bus.enable_sync_message_emission()
bus.connect('message', self.onMessage)
bus.connect('sync-message::element', self.onSyncMessage)
def onExit(self, event):
self.Close(True)
def onCloseWindow(self, event):
'''
Show a dialog to verify exit.
'''
# dialog to verify exit (including menuExit)
dlg = wx.MessageDialog(self, 'Do you want to exit?', 'Exit', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
dlg.Destroy()
if result == wx.ID_YES:
try:
self.pipeline.set_state(Gst.State.NULL)
except:
pass
VTLOG.removeHandler(self.hdlr)
self.Destroy() # frame
def onAbout(self, event):
'''
Show *About* dialog.
'''
license = textwrap.dedent('''\
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.''')
info = wx.AboutDialogInfo()
info.SetIcon(getVTIcon())
info.SetName('Video Tester')
info.SetVersion('version ' + __version__)
info.SetDescription('Video Quality Assessment Tool')
info.SetCopyright('(C) 2011-2016 Iñaki Úcar')
info.SetWebSite('https://github.com/Enchufa2/video-tester')
info.SetLicense(license)
info.AddDeveloper('Iñaki Úcar <iucar@fedoraproject.org>')
info.AddDocWriter('Iñaki Úcar <iucar@fedoraproject.org>')
info.AddArtist('Almudena M. Castro <almudena.m.castro@gmail.com>')
wx.AboutBox(info)
def onOpen(self, event):
'''
Show *Open files* dialog.
'''
self.video_tab.Hide()
wildcard = u'Pickle files (*.pkl)|*.pkl'
dlg = wx.FileDialog(self, u'Open files', '', '', wildcard, wx.FD_MULTIPLE)
if dlg.ShowModal() == wx.ID_OK:
results = []
for filename in dlg.GetFilenames():
f = open(dlg.GetDirectory() + '/' + filename, 'rb')
results.append(pickle.load(f))
f.close()
dlg.Destroy()
self.__setResults(results)
self.tabs.SetSelection(2)
def onRun(self, event):
'''
Run VT Client.
'''
self.conf_tab.Disable()
self.vtmenubar.Disable()
self.results_tab.Hide()
self.video_tab.Hide()
self.tabs.SetSelection(1)
self.vtstatusbar.SetStatusText('Running...')
self.__setValues()
ret = self.main.run()
if ret:
self.paths, self.caps, results = ret
self.__setResults(results)
self.video_tab.Show()
self.conf_tab.Enable()
wx.Window.Enable(self.vtmenubar)
self.vtstatusbar.SetStatusText('Stopped')
def onPlay(self, event):
'''
Play video files.
'''
if self.player_button.GetLabel() == 'Play':
self.player_button.SetLabel('Stop')
video1 = self.pipeline.get_by_name('video1')
video2 = self.pipeline.get_by_name('video2')
video3 = self.pipeline.get_by_name('video3')
video1.props.location = self.paths['original'][1]
video2.props.location = self.paths['coded'][1]
video3.props.location = self.paths['received'][1]
parser1 = self.pipeline.get_by_name('parser1')
parser2 = self.pipeline.get_by_name('parser2')
parser3 = self.pipeline.get_by_name('parser3')
mix = self.pipeline.get_by_name('mix')
sink_2 = mix.get_child_by_name('sink_2')
sink_3 = mix.get_child_by_name('sink_3')
sink_2.props.xpos = self.caps['width'] * 2
sink_3.props.xpos = self.caps['width']
parser1.props.width = self.caps['width']
parser1.props.height = self.caps['height']
parser2.props.width = self.caps['width']
parser2.props.height = self.caps['height']
parser3.props.width = self.caps['width']
parser3.props.height = self.caps['height']
self.pipeline.set_state(Gst.State.PLAYING)
else:
self.player_button.SetLabel('Play')
self.pipeline.set_state(Gst.State.NULL)
def onSyncMessage(self, bus, message):
if GstVideo.is_video_overlay_prepare_window_handle_message(message):
message.src.set_property('force-aspect-ratio', True)
message.src.set_window_handle(self.video_tab.GetHandle())
def onMessage(self, bus, message):
t = message.type
if t == Gst.MessageType.EOS or t == Gst.MessageType.ERROR:
self.pipeline.set_state(Gst.State.NULL)
self.player_button.SetLabel('Play')
def __setValues(self):
'''
Set configuration options.
'''
self.main.conf['bitrate'] = int(self.bitrate.GetValue())
self.main.conf['framerate'] = int(self.framerate.GetValue())
self.main.conf['video'] = str(self.video.GetStringSelection())
self.main.conf['codec'] = str(self.codec.GetStringSelection())
self.main.conf['iface'] = str(self.iface.GetStringSelection())
self.main.conf['ip'] = str(self.ip.GetValue())
self.main.port = int(self.port.GetValue())
self.main.conf['protocol'] = str(self.protocol.GetStringSelection())
qos = []
for name, el in self.qos:
if el.GetValue():
qos.append(name)
self.main.conf['qos'] = qos
bs = []
for name, el in self.bs:
if el.GetValue():
bs.append(name)
self.main.conf['bs'] = bs
vq = []
for name, el in self.vq:
if el.GetValue():
vq.append(name)
self.main.conf['vq'] = vq
def __setResults(self, results):
'''
Plot measures and show *Results* tab.
'''
self.results_tab.removePages()
for measure in results:
axes = self.results_tab.add(measure['name']).gca()
if measure['type'] == 'plot':
axes.plot(measure['axes'][0], measure['axes'][1], 'b')
axes.plot(measure['axes'][0], [measure['mean'] for i in measure['axes'][0]], 'g')
axes.plot(measure['axes'][0], [measure['max'][1] for i in measure['axes'][0]], 'r')
axes.plot(measure['axes'][0], [measure['min'][1] for i in measure['axes'][0]], 'r')
axes.set_xlabel(measure['units'][0])
axes.set_ylabel(measure['units'][1])
elif measure['type'] == 'value':
width = 1
axes.bar([0.5], measure['value'], width=width)
axes.set_ylabel(measure['units'])
axes.set_xticks([1])
axes.set_xlim(0, 2)
axes.set_xticklabels([measure['name']])
elif measure['type'] == 'bar':
axes.bar(measure['axes'][0], measure['axes'][1], width=measure['width'])
axes.plot(measure['axes'][0], [measure['mean'] for i in measure['axes'][0]], 'g')
axes.plot(measure['axes'][0], [measure['max'][1] for i in measure['axes'][0]], 'r')
axes.plot(measure['axes'][0], [measure['min'][1] for i in measure['axes'][0]], 'r')
axes.set_xlabel(measure['units'][0])
axes.set_ylabel(measure['units'][1])
elif measure['type'] == 'videoframes':
axes.bar(measure['axes'][0], measure['axes'][1]['B'], width=1, color='g')
axes.bar(measure['axes'][0], measure['axes'][1]['P'], width=1, color='b')
axes.bar(measure['axes'][0], measure['axes'][1]['I'], width=1, color='r')
axes.set_xlabel(measure['units'][0])
axes.set_ylabel(measure['units'][1])
self.results_tab.Show()
class Plot(wx.Panel):
'''
Plot panel.
'''
def __init__(self, parent, id = -1, dpi = None, **kwargs):
wx.Panel.__init__(self, parent, id=id, **kwargs)
self.figure = mpl.figure.Figure(dpi=dpi, figsize=(2,2))
self.canvas = Canvas(self, -1, self.figure)
self.toolbar = Toolbar(self.canvas)
self.toolbar.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.canvas,1,wx.EXPAND)
sizer.Add(self.toolbar, 0 , wx.LEFT | wx.EXPAND)
self.SetSizer(sizer)
class PlotNotebook(wx.Panel):
'''
Tab-style plotting panel.
'''
def __init__(self, parent, id = -1):
wx.Panel.__init__(self, parent, id=id)
self.nb = wx.aui.AuiNotebook(self)
sizer = wx.BoxSizer()
sizer.Add(self.nb, 1, wx.EXPAND)
self.SetSizer(sizer)
self.pages = []
def add(self, name='plot'):
'''
Add a tab.
'''
page = Plot(self.nb)
self.pages.append(page)
self.nb.AddPage(page, name)
return page.figure
def removePages(self):
'''
Remove all tabs.
'''
for page in self.pages:
self.nb.DeletePage(0)
class VTApp(wx.App):
'''
WxPython application class.
'''
def __init__(self, conf):
self.conf = conf
wx.App.__init__(self)
def OnInit(self):
vtframe = VTframe(self.conf)
self.SetTopWindow(vtframe)
vtframe.Show()
return True
| gpl-3.0 |
h3biomed/ansible | lib/ansible/modules/network/onyx/onyx_vlan.py | 57 | 6466 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_vlan
version_added: "2.5"
author: "Samer Deeb (@samerd) Alex Tabachnik (@atabachnik)"
short_description: Manage VLANs on Mellanox ONYX network devices
description:
- This module provides declarative management of VLANs
on Mellanox ONYX network devices.
options:
name:
description:
- Name of the VLAN.
vlan_id:
description:
- ID of the VLAN.
aggregate:
description: List of VLANs definitions.
purge:
description:
- Purge VLANs not defined in the I(aggregate) parameter.
default: no
type: bool
state:
description:
- State of the VLAN configuration.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure VLAN ID and name
onyx_vlan:
vlan_id: 20
name: test-vlan
- name: remove configuration
onyx_vlan:
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always.
type: list
sample:
- vlan 20
- name test-vlan
- exit
"""
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
from ansible.module_utils.network.onyx.onyx import show_cmd
class OnyxVlanModule(BaseOnyxModule):
_purge = False
@classmethod
def _get_element_spec(cls):
return dict(
vlan_id=dict(type='int'),
name=dict(type='str'),
state=dict(default='present', choices=['present', 'absent']),
)
@classmethod
def _get_aggregate_spec(cls, element_spec):
aggregate_spec = deepcopy(element_spec)
aggregate_spec['vlan_id'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
return aggregate_spec
def init_module(self):
""" module initialization
"""
element_spec = self._get_element_spec()
aggregate_spec = self._get_aggregate_spec(element_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict',
options=aggregate_spec),
purge=dict(default=False, type='bool'),
)
argument_spec.update(element_spec)
required_one_of = [['vlan_id', 'aggregate']]
mutually_exclusive = [['vlan_id', 'aggregate']]
self._module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
def validate_vlan_id(self, value):
if value and not 1 <= int(value) <= 4094:
self._module.fail_json(msg='vlan id must be between 1 and 4094')
def get_required_config(self):
self._required_config = list()
module_params = self._module.params
aggregate = module_params.get('aggregate')
self._purge = module_params.get('purge', False)
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module_params[key]
self.validate_param_values(item, item)
req_item = item.copy()
req_item['vlan_id'] = int(req_item['vlan_id'])
self._required_config.append(req_item)
else:
params = {
'vlan_id': module_params['vlan_id'],
'name': module_params['name'],
'state': module_params['state'],
}
self.validate_param_values(params)
self._required_config.append(params)
def _create_vlan_data(self, vlan_id, vlan_data):
if self._os_version >= self.ONYX_API_VERSION:
vlan_data = vlan_data[0]
return {
'vlan_id': vlan_id,
'name': self.get_config_attr(vlan_data, 'Name')
}
def _get_vlan_config(self):
return show_cmd(self._module, "show vlan")
def load_current_config(self):
# called in base class in run function
self._os_version = self._get_os_version()
self._current_config = dict()
vlan_config = self._get_vlan_config()
if not vlan_config:
return
for vlan_id, vlan_data in iteritems(vlan_config):
try:
vlan_id = int(vlan_id)
except ValueError:
continue
self._current_config[vlan_id] = \
self._create_vlan_data(vlan_id, vlan_data)
def generate_commands(self):
req_vlans = set()
for req_conf in self._required_config:
state = req_conf['state']
vlan_id = req_conf['vlan_id']
if state == 'absent':
if vlan_id in self._current_config:
self._commands.append('no vlan %s' % vlan_id)
else:
req_vlans.add(vlan_id)
self._generate_vlan_commands(vlan_id, req_conf)
if self._purge:
for vlan_id in self._current_config:
if vlan_id not in req_vlans:
self._commands.append('no vlan %s' % vlan_id)
def _generate_vlan_commands(self, vlan_id, req_conf):
curr_vlan = self._current_config.get(vlan_id, {})
if not curr_vlan:
self._commands.append("vlan %s" % vlan_id)
self._commands.append("exit")
req_name = req_conf['name']
curr_name = curr_vlan.get('name')
if req_name:
if req_name != curr_name:
self._commands.append("vlan %s name %s" % (vlan_id, req_name))
elif req_name is not None:
if curr_name:
self._commands.append("vlan %s no name" % vlan_id)
def main():
""" main entry point for module execution
"""
OnyxVlanModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
hell03610/python-koans | python2/koans/about_proxy_object_project.py | 28 | 4194 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Project: Create a Proxy Class
#
# In this assignment, create a proxy class (one is started for you
# below). You should be able to initialize the proxy object with any
# object. Any attributes called on the proxy object should be forwarded
# to the target object. As each attribute call is sent, the proxy should
# record the name of the attribute sent.
#
# The proxy class is started for you. You will need to add a method
# missing handler and any other supporting methods. The specification
# of the Proxy class is given in the AboutProxyObjectProject koan.
# Note: This is a bit trickier that its Ruby Koans counterpart, but you
# can do it!
from runner.koan import *
class Proxy(object):
def __init__(self, target_object):
# WRITE CODE HERE
#initialize '_obj' attribute last. Trust me on this!
self._obj = target_object
# WRITE CODE HERE
# The proxy object should pass the following Koan:
#
class AboutProxyObjectProject(Koan):
def test_proxy_method_returns_wrapped_object(self):
# NOTE: The Television class is defined below
tv = Proxy(Television())
self.assertTrue(isinstance(tv, Proxy))
def test_tv_methods_still_perform_their_function(self):
tv = Proxy(Television())
tv.channel = 10
tv.power()
self.assertEqual(10, tv.channel)
self.assertTrue(tv.is_on())
def test_proxy_records_messages_sent_to_tv(self):
tv = Proxy(Television())
tv.power()
tv.channel = 10
self.assertEqual(['power', 'channel'], tv.messages())
def test_proxy_handles_invalid_messages(self):
tv = Proxy(Television())
ex = None
try:
tv.no_such_method()
except AttributeError as ex:
pass
self.assertEqual(AttributeError, type(ex))
def test_proxy_reports_methods_have_been_called(self):
tv = Proxy(Television())
tv.power()
tv.power()
self.assertTrue(tv.was_called('power'))
self.assertFalse(tv.was_called('channel'))
def test_proxy_counts_method_calls(self):
tv = Proxy(Television())
tv.power()
tv.channel = 48
tv.power()
self.assertEqual(2, tv.number_of_times_called('power'))
self.assertEqual(1, tv.number_of_times_called('channel'))
self.assertEqual(0, tv.number_of_times_called('is_on'))
def test_proxy_can_record_more_than_just_tv_objects(self):
proxy = Proxy("Py Ohio 2010")
result = proxy.upper()
self.assertEqual("PY OHIO 2010", result)
result = proxy.split()
self.assertEqual(["Py", "Ohio", "2010"], result)
self.assertEqual(['upper', 'split'], proxy.messages())
# ====================================================================
# The following code is to support the testing of the Proxy class. No
# changes should be necessary to anything below this comment.
# Example class using in the proxy testing above.
class Television(object):
def __init__(self):
self._channel = None
self._power = None
@property
def channel(self):
return self._channel
@channel.setter
def channel(self, value):
self._channel = value
def power(self):
if self._power == 'on':
self._power = 'off'
else:
self._power = 'on'
def is_on(self):
return self._power == 'on'
# Tests for the Television class. All of theses tests should pass.
class TelevisionTest(Koan):
def test_it_turns_on(self):
tv = Television()
tv.power()
self.assertTrue(tv.is_on())
def test_it_also_turns_off(self):
tv = Television()
tv.power()
tv.power()
self.assertFalse(tv.is_on())
def test_edge_case_on_off(self):
tv = Television()
tv.power()
tv.power()
tv.power()
self.assertTrue(tv.is_on())
tv.power()
self.assertFalse(tv.is_on())
def test_can_set_the_channel(self):
tv = Television()
tv.channel = 11
self.assertEqual(11, tv.channel)
| mit |
jiejieling/RdsMonitor | src/dataprovider/sqliteprovider.py | 1 | 4256 | import os, sys
from api.util import settings
import contextlib
import sqlite3
import json
class RedisStatsProvider(object):
"""A Sqlite based persistance to store and fetch stats
"""
def __init__(self):
self.DEBUG = os.environ.get('RdsMonitor_DEBUG', 0)
stats = settings.settings().get_sqlite_stats_store()
self.location = stats.get('path', 'db/redislive.dat')
self.retries = 10
self.conn = sqlite3.connect(self.location)
def save_memory_info(self, server, timestamp, used, peak):
"""Saves used and peak memory stats,
Args:
server (str): The server ID
timestamp (datetime): The time of the info.
used (int): Used memory value.
peak (int): Peak memory value.
"""
query = "INSERT INTO memory VALUES (?, ?, ?, ?);"
values = (timestamp, used, peak, server)
try:
self._retry_query(query, values)
return True
except Exception, e:
if self.DEBUG:
print >>sys.stderr, 'Save memory info [server = %s] to sqlite %s err:%s'%(server, self.location, e)
return False
def save_command_info(self, server, timestamp, total_command):
"""Saves process stats,
Args:
server (str): The server ID
timestamp (datetime): The time of the info.
total_command (int): total command value.
"""
query = "INSERT INTO command VALUES (?, ?, ?);"
values = (timestamp, total_command, server)
try:
self._retry_query(query, values)
return True
except Exception, e:
if self.DEBUG:
print >>sys.stderr, 'Save command info [server = %s] to sqlite %s err:%s'%(server, self.location, e)
return False
def save_info_command(self, server, timestamp, info):
"""Save Redis info command dump
Args:
server (str): id of server
timestamp (datetime): Timestamp.
info (dict): The result of a Redis INFO command.
"""
query = "INSERT INTO info VALUES (?, ?, ?);"
values = (timestamp, json.dumps(info),
server)
try:
self._retry_query(query, values)
return True
except Exception, e:
if self.DEBUG:
print >>sys.stderr, 'Save info command [server = %s] to sqlite %s err:%s'%(server, self.location, e)
return False
def get_info(self, server):
"""Get info about the server
Args:
server (str): The server ID
"""
with contextlib.closing(self.conn.cursor()) as c:
query = "SELECT info FROM info WHERE server=?"
query += "ORDER BY datetime DESC LIMIT 1;"
for r in c.execute(query, (server,)):
return(json.loads(r[0]))
def get_memory_info(self, server, from_date, to_date):
"""Get stats for Memory Consumption between a range of dates
Args:
server (str): The server ID
from_date (datetime): Get memory info from this date onwards.
to_date (datetime): Get memory info up to this date.
"""
query = """SELECT datetime, max, current
FROM memory
WHERE datetime >= ?
AND datetime <= ?
AND server = ?;"""
values = (from_date, to_date, server)
with contextlib.closing(self.conn.cursor()) as c:
return [[r[0], r[1], r[2]] for r in c.execute(query, values)]
def get_command_stats(self, server, from_date, to_date):
"""Get total commands processed in the given time period
Args:
server (str): The server ID
from_date (datetime): Get data from this date.
to_date (datetime): Get data to this date.
group_by (str): How to group the stats.
"""
query = """SELECT datetime, max, current
FROM command
WHERE datetime >= ?
AND datetime <= ?
AND server = ?;"""
values = (from_date, to_date, server)
with contextlib.closing(self.conn.cursor()) as c:
return [[r[0], r[1], r[2]] for r in c.execute(query, values)]
def _retry_query(self, query, values=None):
"""Run a SQLite query until it sticks or until we reach the max number
of retries. Single-threaded writes :(
Args:
query (str): The query to execute.
Kwargs:
values (tuple|dict): Used when the query is parameterized.
"""
with contextlib.closing(self.conn.cursor()) as cursor:
completed = False
counter = 0
while counter < self.retries and not completed:
counter += 1
try:
cursor.execute(query, values)
self.conn.commit()
completed = True
except Exception:
# FIXME: Catch specific exceptions here otherwise it's likely to
# mask bugs/issues later.
pass
| mit |
ROB-Seismology/oq-hazardlib | openquake/hazardlib/tests/tom_test.py | 1 | 2343 | # The Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from openquake.hazardlib.tom import PoissonTOM
class PoissonTOMTestCase(unittest.TestCase):
def test_non_positive_time_span(self):
self.assertRaises(ValueError, PoissonTOM, -1)
self.assertRaises(ValueError, PoissonTOM, 0)
def test_get_probability_one_or_more_occurrences(self):
pdf = PoissonTOM(time_span=50)
self.assertEqual(pdf.get_probability_one_or_more_occurrences(10), 1)
aae = self.assertAlmostEqual
aae(pdf.get_probability_one_or_more_occurrences(0.1), 0.9932621)
aae(pdf.get_probability_one_or_more_occurrences(0.01), 0.39346934)
pdf = PoissonTOM(time_span=5)
self.assertEqual(pdf.get_probability_one_or_more_occurrences(8), 1)
aae = self.assertAlmostEqual
aae(pdf.get_probability_one_or_more_occurrences(0.1), 0.3934693)
aae(pdf.get_probability_one_or_more_occurrences(0.01), 0.0487706)
def test_get_probability_one_occurrence(self):
pdf = PoissonTOM(time_span=30)
aae = self.assertAlmostEqual
aae(pdf.get_probability_one_occurrence(10), 0)
aae(pdf.get_probability_one_occurrence(0.1), 0.1493612)
aae(pdf.get_probability_one_occurrence(0.01), 0.2222455)
def test_sample_number_of_occurrences(self):
time_span = 40
rate = 0.05
num_samples = 8000
tom = PoissonTOM(time_span)
numpy.random.seed(31)
mean = sum(tom.sample_number_of_occurrences(rate)
for i in xrange(num_samples)) / float(num_samples)
self.assertAlmostEqual(mean, rate * time_span, delta=1e-3)
| agpl-3.0 |
allefilmskijken/afk | script.module.livestreamer/lib/livestreamer/__init__.py | 10 | 1974 | # coding: utf8
"""Livestreamer extracts streams from various services.
The main compontent of Livestreamer is a command-line utility that
launches the streams in a video player.
An API is also provided that allows direct access to stream data.
Full documentation is available at http://docs.livestreamer.io/.
"""
__title__ = "livestreamer"
__version__ = "1.12.0"
__license__ = "Simplified BSD"
__author__ = "Christopher Rosell"
__copyright__ = "Copyright 2011-2015 Christopher Rosell"
__credits__ = [
"Agustín Carrasco (@asermax)",
"Andrew Bashore (@bashtech)",
"Andy Mikhailenko (@neithere)",
"Athanasios Oikonomou (@athoik)",
"Brian Callahan (@ibara)",
"Che (@chhe)",
"Christopher Rosell (@chrippa)",
"Daniel Miranda (@danielkza)",
"Daniel Wallace (@gtmanfred)",
"David Arvelo (@darvelo)",
"Dominik Dabrowski (@doda)",
"Eric J (@wormeyman)",
"Ethan Jones (@jonesz)",
"Gaspard Jankowiak (@gapato)",
"Jaime Marquínez Ferrándiz (@jaimeMF)",
"Jan Tore Morken (@jantore)",
"John Peterson (@john-peterson)",
"Jon Bergli Heier (@sn4kebite)",
"Kacper (@kasper93)",
"Martin Panter (@vadmium)",
"Max Nordlund (@maxnordlund)",
"Michael Cheah (@cheah)",
"Moritz Blanke",
"Niall McAndrew (@niallm90)",
"Niels Kräupl (@Gamewalker)",
"Pascal Romahn (@skulblakka)",
"Sam Edwards (@dotsam)",
"Stefan Breunig (@breunigs)",
"Suhail Patel (@suhailpatel)",
"Sunaga Takahiro (@sunaga720)",
"Vitaly Evtushenko (@eltiren)",
"Warnar Boekkooi (@boekkooi)",
"@btiom",
"@daslicious",
"@MasterofJOKers",
"@medina",
"@monkeyphysics",
"@nixxquality",
"@papplampe",
"@t0mm0",
"@ToadKing",
"@unintended",
"@wolftankk",
"@yeeeargh"
]
from .api import streams
from .exceptions import (LivestreamerError, PluginError, NoStreamsError,
NoPluginError, StreamError)
from .session import Livestreamer
| gpl-2.0 |
andrewcbennett/iris | lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py | 5 | 49811 | # (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for :class:`iris.analysis._regrid.RectilinearRegridder`."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
from iris.analysis._regrid import RectilinearRegridder as Regridder
from iris.aux_factory import HybridHeightFactory
from iris.coord_systems import GeogCS, OSGB
from iris.coords import AuxCoord, DimCoord
from iris.cube import Cube
from iris.tests import mock
from iris.tests.stock import global_pp, lat_lon_cube, realistic_4d
RESULT_DIR = ('analysis', 'regrid')
# Convenience to access Regridder static method.
regrid = Regridder._regrid
class Test__regrid__linear(tests.IrisTest):
def setUp(self):
self.x = DimCoord(np.linspace(-2, 57, 60))
self.y = DimCoord(np.linspace(0, 49, 50))
self.xs, self.ys = np.meshgrid(self.x.points, self.y.points)
transformation = lambda x, y: x + y ** 2
# Construct a function which adds dimensions to the 2D data array
# so that we can test higher dimensional functionality.
dim_extender = lambda arr: (arr[np.newaxis, ..., np.newaxis] * [1, 2])
self.data = dim_extender(transformation(self.xs, self.ys))
target_x = np.linspace(-3, 60, 4)
target_y = np.linspace(0.5, 51, 3)
self.target_x, self.target_y = np.meshgrid(target_x, target_y)
#: Expected values, which not quite the analytical value, but
#: representative of the bilinear interpolation scheme.
self.expected = np.array([[[[np.nan, np.nan],
[18.5, 37.],
[39.5, 79.],
[np.nan, np.nan]],
[[np.nan, np.nan],
[681.25, 1362.5],
[702.25, 1404.5],
[np.nan, np.nan]],
[[np.nan, np.nan],
[np.nan, np.nan],
[np.nan, np.nan],
[np.nan, np.nan]]]])
self.x_dim = 2
self.y_dim = 1
def assert_values(self, values):
# values is a list of [x, y, [val1, val2]]
xs, ys, expecteds = zip(*values)
expecteds = np.array(expecteds)[None, None, ...]
result = regrid(self.data, self.x_dim, self.y_dim,
self.x, self.y,
np.array([xs]), np.array([ys]))
self.assertArrayAllClose(result, expecteds, rtol=1e-04)
# Check that transposing the input data results in the same values
ndim = self.data.ndim
result2 = regrid(self.data.T, ndim - self.x_dim - 1,
ndim - self.y_dim - 1,
self.x, self.y,
np.array([xs]), np.array([ys]))
self.assertArrayEqual(result.T, result2)
def test_single_values(self):
# Check that the values are sensible e.g. (3 + 4**2 == 19)
self.assert_values([[3, 4, [19, 38]],
[-2, 0, [-2, -4]],
[-2.01, 0, [np.nan, np.nan]],
[2, -0.01, [np.nan, np.nan]],
[57, 0, [57, 114]],
[57.01, 0, [np.nan, np.nan]],
[57, 49, [2458, 4916]],
[57, 49.01, [np.nan, np.nan]]])
def test_simple_result(self):
result = regrid(self.data, self.x_dim, self.y_dim,
self.x, self.y,
self.target_x, self.target_y)
self.assertArrayEqual(result, self.expected)
def test_simple_masked(self):
data = np.ma.MaskedArray(self.data, mask=True)
data.mask[:, 1:30, 1:30] = False
result = regrid(data, self.x_dim, self.y_dim,
self.x, self.y,
self.target_x, self.target_y)
expected_mask = np.array([[[[True, True], [True, True],
[True, True], [True, True]],
[[True, True], [False, False],
[True, True], [True, True]],
[[True, True], [True, True],
[True, True], [True, True]]]], dtype=bool)
expected = np.ma.MaskedArray(self.expected,
mask=expected_mask)
self.assertMaskedArrayEqual(result, expected)
def test_simple_masked_no_mask(self):
data = np.ma.MaskedArray(self.data, mask=False)
result = regrid(data, self.x_dim, self.y_dim,
self.x, self.y,
self.target_x, self.target_y)
self.assertIsInstance(result, np.ma.MaskedArray)
def test_result_transpose_shape(self):
ndim = self.data.ndim
result = regrid(self.data.T, ndim - self.x_dim - 1,
ndim - self.y_dim - 1, self.x, self.y,
self.target_x, self.target_y)
self.assertArrayEqual(result, self.expected.T)
def test_reverse_x_coord(self):
index = [slice(None)] * self.data.ndim
index[self.x_dim] = slice(None, None, -1)
result = regrid(self.data[index], self.x_dim,
self.y_dim, self.x[::-1], self.y,
self.target_x, self.target_y)
self.assertArrayEqual(result, self.expected)
def test_circular_x_coord(self):
# Check that interpolation of a circular src coordinate doesn't result
# in an out of bounds value.
self.x.circular = True
self.x.units = 'degree'
result = regrid(self.data, self.x_dim, self.y_dim,
self.x, self.y, np.array([[58]]),
np.array([[0]]))
self.assertArrayAlmostEqual(result,
np.array([56.80398671, 113.60797342],
ndmin=self.data.ndim))
# Check what happens to NaN values, extrapolated values, and
# masked values.
class Test__regrid__extrapolation_modes(tests.IrisTest):
values_by_method = {'linear': [[np.nan, np.nan, 2, 3, np.nan],
[np.nan, np.nan, 6, 7, np.nan],
[8, 9, 10, 11, np.nan]],
'nearest': [[np.nan, 1, 2, 3, np.nan],
[4, 5, 6, 7, np.nan],
[8, 9, 10, 11, np.nan]]}
extrapolate_values_by_method = {'linear': [[np.nan, np.nan, 2, 3, 4],
[np.nan, np.nan, 6, 7, 8],
[8, 9, 10, 11, 12]],
'nearest': [[np.nan, 1, 2, 3, 3],
[4, 5, 6, 7, 7],
[8, 9, 10, 11, 11]]}
def setUp(self):
self.methods = ('linear', 'nearest')
def _regrid(self, data, method, extrapolation_mode=None):
x = np.arange(4)
y = np.arange(3)
x_coord = DimCoord(x)
y_coord = DimCoord(y)
x_dim, y_dim = 1, 0
grid_x, grid_y = np.meshgrid(np.arange(5), y)
kwargs = dict(method=method)
if extrapolation_mode is not None:
kwargs['extrapolation_mode'] = extrapolation_mode
result = regrid(data, x_dim, y_dim, x_coord, y_coord,
grid_x, grid_y, **kwargs)
return result
def test_default_ndarray(self):
# NaN -> NaN
# Extrapolated -> NaN
data = np.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method)
self.assertNotIsInstance(result, np.ma.MaskedArray)
expected = self.values_by_method[method]
self.assertArrayEqual(result, expected)
def test_default_maskedarray(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> Masked
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
data[2, 3] = np.ma.masked
for method in self.methods:
result = self._regrid(data, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 1, 1]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_default_maskedarray_none_masked(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> N/A
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_default_maskedarray_none_masked_expanded(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> N/A
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
# Make sure the mask has been expanded
data.mask = False
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_method_ndarray(self):
# NaN -> NaN
# Extrapolated -> linear
data = np.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method, 'extrapolate')
self.assertNotIsInstance(result, np.ma.MaskedArray)
expected = self.extrapolate_values_by_method[method]
self.assertArrayEqual(result, expected)
def test_method_maskedarray(self):
# NaN -> NaN
# Extrapolated -> linear
# Masked -> Masked
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
data[2, 3] = np.ma.masked
for method in self.methods:
result = self._regrid(data, method, 'extrapolate')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 1]]
values = self.extrapolate_values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_nan_ndarray(self):
# NaN -> NaN
# Extrapolated -> NaN
data = np.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method, 'nan')
self.assertNotIsInstance(result, np.ma.MaskedArray)
expected = self.values_by_method[method]
self.assertArrayEqual(result, expected)
def test_nan_maskedarray(self):
# NaN -> NaN
# Extrapolated -> NaN
# Masked -> Masked
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
data[2, 3] = np.ma.masked
for method in self.methods:
result = self._regrid(data, method, 'nan')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_error_ndarray(self):
# Values irrelevant - the function raises an error.
data = np.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
with self.assertRaisesRegexp(ValueError, 'out of bounds'):
self._regrid(data, method, 'error')
def test_error_maskedarray(self):
# Values irrelevant - the function raises an error.
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
data[2, 3] = np.ma.masked
for method in self.methods:
with self.assertRaisesRegexp(ValueError, 'out of bounds'):
self._regrid(data, method, 'error')
def test_mask_ndarray(self):
# NaN -> NaN
# Extrapolated -> Masked (this is different from all the other
# modes)
data = np.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method, 'mask')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_mask_maskedarray(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> Masked
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
data[2, 3] = np.ma.masked
for method in self.methods:
result = self._regrid(data, method, 'mask')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 1, 1]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_nanmask_ndarray(self):
# NaN -> NaN
# Extrapolated -> NaN
data = np.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
for method in self.methods:
result = self._regrid(data, method, 'nanmask')
self.assertNotIsInstance(result, np.ma.MaskedArray)
expected = self.values_by_method[method]
self.assertArrayEqual(result, expected)
def test_nanmask_maskedarray(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> Masked
data = np.ma.arange(12, dtype=np.float).reshape(3, 4)
data[0, 0] = np.nan
data[2, 3] = np.ma.masked
for method in self.methods:
result = self._regrid(data, method, 'nanmask')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 1, 1]]
values = self.values_by_method[method]
expected = np.ma.MaskedArray(values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_invalid(self):
data = np.arange(12, dtype=np.float).reshape(3, 4)
emsg = 'Invalid extrapolation mode'
for method in self.methods:
with self.assertRaisesRegexp(ValueError, emsg):
self._regrid(data, method, 'BOGUS')
class Test___call____invalid_types(tests.IrisTest):
def setUp(self):
self.cube = lat_lon_cube()
# Regridder method and extrapolation-mode.
self.args = ('linear', 'mask')
self.regridder = Regridder(self.cube, self.cube, *self.args)
def test_src_as_array(self):
arr = np.zeros((3, 4))
with self.assertRaises(TypeError):
Regridder(arr, self.cube, *self.args)
with self.assertRaises(TypeError):
self.regridder(arr)
def test_grid_as_array(self):
with self.assertRaises(TypeError):
Regridder(self.cube, np.zeros((3, 4)), *self.args)
def test_src_as_int(self):
with self.assertRaises(TypeError):
Regridder(42, self.cube, *self.args)
with self.assertRaises(TypeError):
self.regridder(42)
def test_grid_as_int(self):
with self.assertRaises(TypeError):
Regridder(self.cube, 42, *self.args)
class Test___call____missing_coords(tests.IrisTest):
def setUp(self):
self.args = ('linear', 'mask')
def ok_bad(self, coord_names):
# Deletes the named coords from `bad`.
ok = lat_lon_cube()
bad = lat_lon_cube()
for name in coord_names:
bad.remove_coord(name)
return ok, bad
def test_src_missing_lat(self):
ok, bad = self.ok_bad(['latitude'])
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
regridder = Regridder(ok, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_missing_lat(self):
ok, bad = self.ok_bad(['latitude'])
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
def test_src_missing_lon(self):
ok, bad = self.ok_bad(['longitude'])
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
regridder = Regridder(ok, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_missing_lon(self):
ok, bad = self.ok_bad(['longitude'])
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
def test_src_missing_lat_lon(self):
ok, bad = self.ok_bad(['latitude', 'longitude'])
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
regridder = Regridder(ok, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_missing_lat_lon(self):
ok, bad = self.ok_bad(['latitude', 'longitude'])
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
class Test___call____not_dim_coord(tests.IrisTest):
def setUp(self):
self.args = ('linear', 'mask')
def ok_bad(self, coord_name):
# Demotes the named DimCoord on `bad` to an AuxCoord.
ok = lat_lon_cube()
bad = lat_lon_cube()
coord = bad.coord(coord_name)
dims = bad.coord_dims(coord)
bad.remove_coord(coord_name)
aux_coord = AuxCoord.from_coord(coord)
bad.add_aux_coord(aux_coord, dims)
return ok, bad
def test_src_with_aux_lat(self):
ok, bad = self.ok_bad('latitude')
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
regridder = Regridder(ok, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_with_aux_lat(self):
ok, bad = self.ok_bad('latitude')
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
def test_src_with_aux_lon(self):
ok, bad = self.ok_bad('longitude')
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
regridder = Regridder(ok, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_with_aux_lon(self):
ok, bad = self.ok_bad('longitude')
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
class Test___call____not_dim_coord_share(tests.IrisTest):
def setUp(self):
self.args = ('linear', 'mask')
def ok_bad(self):
# Make lat/lon share a single dimension on `bad`.
ok = lat_lon_cube()
bad = lat_lon_cube()
lat = bad.coord('latitude')
bad = bad[0, :lat.shape[0]]
bad.remove_coord('latitude')
bad.add_aux_coord(lat, 0)
return ok, bad
def test_src_shares_dim(self):
ok, bad = self.ok_bad()
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
regridder = Regridder(ok, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_shares_dim(self):
ok, bad = self.ok_bad()
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
class Test___call____bad_georeference(tests.IrisTest):
def setUp(self):
self.args = ('linear', 'mask')
def ok_bad(self, lat_cs, lon_cs):
# Updates `bad` to use the given coordinate systems.
ok = lat_lon_cube()
bad = lat_lon_cube()
bad.coord('latitude').coord_system = lat_cs
bad.coord('longitude').coord_system = lon_cs
return ok, bad
def test_src_no_cs(self):
ok, bad = self.ok_bad(None, None)
regridder = Regridder(bad, ok, *self.args)
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_no_cs(self):
ok, bad = self.ok_bad(None, None)
regridder = Regridder(ok, bad, *self.args)
with self.assertRaises(ValueError):
regridder(ok)
def test_src_one_cs(self):
ok, bad = self.ok_bad(None, GeogCS(6371000))
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
def test_grid_one_cs(self):
ok, bad = self.ok_bad(None, GeogCS(6371000))
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
def test_src_inconsistent_cs(self):
ok, bad = self.ok_bad(GeogCS(6370000), GeogCS(6371000))
with self.assertRaises(ValueError):
Regridder(bad, ok, *self.args)
def test_grid_inconsistent_cs(self):
ok, bad = self.ok_bad(GeogCS(6370000), GeogCS(6371000))
with self.assertRaises(ValueError):
Regridder(ok, bad, *self.args)
class Test___call____bad_angular_units(tests.IrisTest):
def ok_bad(self):
# Changes the longitude coord to radians on `bad`.
ok = lat_lon_cube()
bad = lat_lon_cube()
bad.coord('longitude').units = 'radians'
return ok, bad
def test_src_radians(self):
ok, bad = self.ok_bad()
regridder = Regridder(bad, ok, 'linear', 'mask')
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_radians(self):
ok, bad = self.ok_bad()
with self.assertRaises(ValueError):
Regridder(ok, bad, 'linear', 'mask')
def uk_cube():
data = np.arange(12, dtype=np.float32).reshape(3, 4)
uk = Cube(data)
cs = OSGB()
y_coord = DimCoord(np.arange(3), 'projection_y_coordinate', units='m',
coord_system=cs)
x_coord = DimCoord(np.arange(4), 'projection_x_coordinate', units='m',
coord_system=cs)
uk.add_dim_coord(y_coord, 0)
uk.add_dim_coord(x_coord, 1)
surface = AuxCoord(data * 10, 'surface_altitude', units='m')
uk.add_aux_coord(surface, (0, 1))
uk.add_aux_factory(HybridHeightFactory(orography=surface))
return uk
class Test___call____bad_linear_units(tests.IrisTest):
def ok_bad(self):
# Defines `bad` with an x coordinate in km.
ok = lat_lon_cube()
bad = uk_cube()
bad.coord(axis='x').units = 'km'
return ok, bad
def test_src_km(self):
ok, bad = self.ok_bad()
regridder = Regridder(bad, ok, 'linear', 'mask')
with self.assertRaises(ValueError):
regridder(bad)
def test_grid_km(self):
ok, bad = self.ok_bad()
with self.assertRaises(ValueError):
Regridder(ok, bad, 'linear', 'mask')
class Test___call____no_coord_systems(tests.IrisTest):
# Test behaviour in the absence of any coordinate systems.
def setUp(self):
self.mode = 'mask'
self.methods = ('linear', 'nearest')
def remove_coord_systems(self, cube):
for coord in cube.coords():
coord.coord_system = None
def test_ok(self):
# Ensure regridding is supported when the coordinate definitions match.
# NB. We change the coordinate *values* to ensure that does not
# prevent the regridding operation.
src = uk_cube()
self.remove_coord_systems(src)
grid = src.copy()
for coord in grid.dim_coords:
coord.points = coord.points + 1
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
for coord in result.dim_coords:
self.assertEqual(coord, grid.coord(coord))
expected = np.ma.arange(12).reshape((3, 4)) + 5
expected[:, 3] = np.ma.masked
expected[2, :] = np.ma.masked
self.assertMaskedArrayEqual(result.data, expected)
def test_matching_units(self):
# Check we are insensitive to the units provided they match.
# NB. We change the coordinate *values* to ensure that does not
# prevent the regridding operation.
src = uk_cube()
self.remove_coord_systems(src)
# Move to unusual units (i.e. not metres or degrees).
for coord in src.dim_coords:
coord.units = 'feet'
grid = src.copy()
for coord in grid.dim_coords:
coord.points = coord.points + 1
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
for coord in result.dim_coords:
self.assertEqual(coord, grid.coord(coord))
expected = np.ma.arange(12).reshape((3, 4)) + 5
expected[:, 3] = np.ma.masked
expected[2, :] = np.ma.masked
self.assertMaskedArrayEqual(result.data, expected)
def test_different_units(self):
src = uk_cube()
self.remove_coord_systems(src)
# Move to unusual units (i.e. not metres or degrees).
for coord in src.coords():
coord.units = 'feet'
grid = src.copy()
grid.coord('projection_y_coordinate').units = 'yards'
# We change the coordinate *values* to ensure that does not
# prevent the regridding operation.
for coord in grid.dim_coords:
coord.points = coord.points + 1
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
emsg = 'matching coordinate metadata'
with self.assertRaisesRegexp(ValueError, emsg):
regridder(src)
def test_coord_metadata_mismatch(self):
# Check for failure when coordinate definitions differ.
uk = uk_cube()
self.remove_coord_systems(uk)
lat_lon = lat_lon_cube()
self.remove_coord_systems(lat_lon)
for method in self.methods:
regridder = Regridder(uk, lat_lon, method, self.mode)
with self.assertRaises(ValueError):
regridder(uk)
class Test___call____extrapolation_modes(tests.IrisTest):
values = [[np.nan, 6, 7, np.nan],
[9, 10, 11, np.nan],
[np.nan, np.nan, np.nan, np.nan]]
extrapolate_values_by_method = {'linear': [[np.nan, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]],
'nearest': [[np.nan, 6, 7, 7],
[9, 10, 11, 11],
[9, 10, 11, 11]]}
surface_values = [[50, 60, 70, np.nan],
[90, 100, 110, np.nan],
[np.nan, np.nan, np.nan, np.nan]]
def setUp(self):
self.methods = ('linear', 'nearest')
def _ndarray_cube(self, method):
assert method in self.methods
src = uk_cube()
index = (0, 0) if method == 'linear' else (1, 1)
src.data[index] = np.nan
return src
def _masked_cube(self, method):
assert method in self.methods
src = uk_cube()
src.data = np.ma.asarray(src.data)
nan_index = (0, 0) if method == 'linear' else (1, 1)
mask_index = (2, 3)
src.data[nan_index] = np.nan
src.data[mask_index] = np.ma.masked
return src
def _regrid(self, src, method, extrapolation_mode='mask'):
grid = src.copy()
for coord in grid.dim_coords:
coord.points = coord.points + 1
regridder = Regridder(src, grid, method, extrapolation_mode)
result = regridder(src)
surface = result.coord('surface_altitude').points
self.assertNotIsInstance(surface, np.ma.MaskedArray)
self.assertArrayEqual(surface, self.surface_values)
return result.data
def test_default_ndarray(self):
# NaN -> NaN
# Extrapolated -> Masked
for method in self.methods:
src = self._ndarray_cube(method)
result = self._regrid(src, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 0, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_default_maskedarray(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> Masked
for method in self.methods:
src = self._masked_cube(method)
result = self._regrid(src, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 1, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_default_maskedarray_none_masked(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> N/A
for method in self.methods:
src = uk_cube()
src.data = np.ma.asarray(src.data)
index = (0, 0) if method == 'linear' else (1, 1)
src.data[index] = np.nan
result = self._regrid(src, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 0, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_default_maskedarray_none_masked_expanded(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> N/A
for method in self.methods:
src = uk_cube()
src.data = np.ma.asarray(src.data)
# Make sure the mask has been expanded
src.data.mask = False
index = (0, 0) if method == 'linear' else (1, 1)
src.data[index] = np.nan
result = self._regrid(src, method)
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 0, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_method_ndarray(self):
# NaN -> NaN
# Extrapolated -> linear
for method in self.methods:
src = self._ndarray_cube(method)
result = self._regrid(src, method, 'extrapolate')
self.assertNotIsInstance(result, np.ma.MaskedArray)
expected = self.extrapolate_values_by_method[method]
self.assertArrayEqual(result, expected)
def test_nan_ndarray(self):
# NaN -> NaN
# Extrapolated -> NaN
for method in self.methods:
src = self._ndarray_cube(method)
result = self._regrid(src, method, 'nan')
self.assertNotIsInstance(result, np.ma.MaskedArray)
self.assertArrayEqual(result, self.values)
def test_nan_maskedarray(self):
# NaN -> NaN
# Extrapolated -> NaN
# Masked -> Masked
for method in self.methods:
src = self._masked_cube(method)
result = self._regrid(src, method, 'nan')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 0]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_error_ndarray(self):
# Values irrelevant - the function raises an error.
for method in self.methods:
src = self._ndarray_cube(method)
with self.assertRaisesRegexp(ValueError, 'out of bounds'):
self._regrid(src, method, 'error')
def test_error_maskedarray(self):
# Values irrelevant - the function raises an error.
for method in self.methods:
src = self._masked_cube(method)
with self.assertRaisesRegexp(ValueError, 'out of bounds'):
self._regrid(src, method, 'error')
def test_mask_ndarray(self):
# NaN -> NaN
# Extrapolated -> Masked (this is different from all the other
# modes)
for method in self.methods:
src = self._ndarray_cube(method)
result = self._regrid(src, method, 'mask')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 0, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_mask_maskedarray(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> Masked
for method in self.methods:
src = self._masked_cube(method)
result = self._regrid(src, method, 'mask')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 1, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_nanmask_ndarray(self):
# NaN -> NaN
# Extrapolated -> NaN
for method in self.methods:
src = self._ndarray_cube(method)
result = self._regrid(src, method, 'nanmask')
self.assertNotIsInstance(result, np.ma.MaskedArray)
self.assertArrayEqual(result, self.values)
def test_nanmask_maskedarray(self):
# NaN -> NaN
# Extrapolated -> Masked
# Masked -> Masked
for method in self.methods:
src = self._masked_cube(method)
result = self._regrid(src, method, 'nanmask')
self.assertIsInstance(result, np.ma.MaskedArray)
mask = [[0, 0, 0, 1],
[0, 0, 1, 1],
[1, 1, 1, 1]]
expected = np.ma.MaskedArray(self.values, mask)
self.assertMaskedArrayEqual(result, expected)
def test_invalid(self):
src = uk_cube()
emsg = 'Invalid extrapolation mode'
for method in self.methods:
with self.assertRaisesRegexp(ValueError, emsg):
self._regrid(src, method, 'BOGUS')
@tests.skip_data
class Test___call____rotated_to_lat_lon(tests.IrisTest):
def setUp(self):
self.src = realistic_4d()[:5, :2, ::40, ::30]
self.mode = 'mask'
self.methods = ('linear', 'nearest')
def test_single_point(self):
src = self.src[0, 0]
grid = global_pp()[:1, :1]
# These coordinate values have been derived by converting the
# rotated coordinates of src[1, 1] into lat/lon by using cs2cs.
grid.coord('longitude').points = -3.144870
grid.coord('latitude').points = 52.406444
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
self.assertEqual(src.data[1, 1], result.data)
def test_transposed_src(self):
# The source dimensions are in a non-standard order.
src = self.src
src.transpose([3, 1, 2, 0])
grid = self._grid_subset()
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
result.transpose([3, 1, 2, 0])
cml = RESULT_DIR + ('{}_subset.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def _grid_subset(self):
# The destination grid points are entirely contained within the
# src grid points.
grid = global_pp()[:4, :5]
grid.coord('longitude').points = np.linspace(-3.182, -3.06, 5)
grid.coord('latitude').points = np.linspace(52.372, 52.44, 4)
return grid
def test_reversed(self):
src = self.src
grid = self._grid_subset()
for method in self.methods:
cml = RESULT_DIR + ('{}_subset.cml'.format(method),)
regridder = Regridder(src, grid[::-1], method, self.mode)
result = regridder(src)
self.assertCMLApproxData(result[:, :, ::-1], cml)
sample = src[:, :, ::-1]
regridder = Regridder(sample, grid[::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, ::-1], cml)
sample = src[:, :, :, ::-1]
regridder = Regridder(sample, grid[::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, ::-1], cml)
sample = src[:, :, ::-1, ::-1]
regridder = Regridder(sample, grid[::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, ::-1], cml)
regridder = Regridder(src, grid[:, ::-1], method, self.mode)
result = regridder(src)
self.assertCMLApproxData(result[:, :, :, ::-1], cml)
sample = src[:, :, ::-1]
regridder = Regridder(sample, grid[:, ::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, :, ::-1], cml)
sample = src[:, :, :, ::-1]
regridder = Regridder(sample, grid[:, ::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, :, ::-1], cml)
sample = src[:, :, ::-1, ::-1]
regridder = Regridder(sample, grid[:, ::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, :, ::-1], cml)
regridder = Regridder(src, grid[::-1, ::-1], method, self.mode)
result = regridder(src)
self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml)
sample = src[:, :, ::-1]
regridder = Regridder(sample, grid[::-1, ::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml)
sample = src[:, :, :, ::-1]
regridder = Regridder(sample, grid[::-1, ::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml)
sample = src[:, :, ::-1, ::-1]
regridder = Regridder(sample, grid[::-1, ::-1], method, self.mode)
result = regridder(sample)
self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml)
def test_grid_subset(self):
# The destination grid points are entirely contained within the
# src grid points.
grid = self._grid_subset()
for method in self.methods:
regridder = Regridder(self.src, grid, method, self.mode)
result = regridder(self.src)
cml = RESULT_DIR + ('{}_subset.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def _big_grid(self):
grid = self._grid_subset()
big_grid = Cube(np.zeros((5, 10, 3, 4, 5)))
big_grid.add_dim_coord(grid.coord('latitude'), 3)
big_grid.add_dim_coord(grid.coord('longitude'), 4)
return big_grid
def test_grid_subset_big(self):
# Add some extra dimensions to the destination Cube and
# these should be safely ignored.
big_grid = self._big_grid()
for method in self.methods:
regridder = Regridder(self.src, big_grid, method, self.mode)
result = regridder(self.src)
cml = RESULT_DIR + ('{}_subset.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_grid_subset_big_transposed(self):
# The order of the grid's dimensions (including the X and Y
# dimensions) must not affect the result.
big_grid = self._big_grid()
big_grid.transpose([4, 0, 3, 1, 2])
for method in self.methods:
regridder = Regridder(self.src, big_grid, method, self.mode)
result = regridder(self.src)
cml = RESULT_DIR + ('{}_subset.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_grid_subset_anon(self):
# Must cope OK with anonymous source dimensions.
src = self.src
src.remove_coord('time')
grid = self._grid_subset()
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
cml = RESULT_DIR + ('{}_subset_anon.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_grid_subset_missing_data_1(self):
# The destination grid points are entirely contained within the
# src grid points AND we have missing data.
src = self.src
src.data = np.ma.MaskedArray(src.data)
src.data[:, :, 0, 0] = np.ma.masked
grid = self._grid_subset()
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
cml = RESULT_DIR + ('{}_subset_masked_1.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_grid_subset_missing_data_2(self):
# The destination grid points are entirely contained within the
# src grid points AND we have missing data.
src = self.src
src.data = np.ma.MaskedArray(src.data)
src.data[:, :, 1, 2] = np.ma.masked
grid = self._grid_subset()
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
cml = RESULT_DIR + ('{}_subset_masked_2.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_grid_partial_overlap(self):
# The destination grid points are partially contained within the
# src grid points.
grid = global_pp()[:4, :4]
grid.coord('longitude').points = np.linspace(-3.3, -3.06, 4)
grid.coord('latitude').points = np.linspace(52.377, 52.43, 4)
for method in self.methods:
regridder = Regridder(self.src, grid, method, self.mode)
result = regridder(self.src)
cml = RESULT_DIR + ('{}_partial_overlap.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_grid_no_overlap(self):
# The destination grid points are NOT contained within the
# src grid points.
grid = global_pp()[:4, :4]
grid.coord('longitude').points = np.linspace(-3.3, -3.2, 4)
grid.coord('latitude').points = np.linspace(52.377, 52.43, 4)
for method in self.methods:
regridder = Regridder(self.src, grid, method, self.mode)
result = regridder(self.src)
self.assertCMLApproxData(result, RESULT_DIR + ('no_overlap.cml',))
def test_grid_subset_missing_data_aux(self):
# The destination grid points are entirely contained within the
# src grid points AND we have missing data on the aux coordinate.
src = self.src
src.coord('surface_altitude').points[1, 2] = np.ma.masked
grid = self._grid_subset()
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
cml = RESULT_DIR + ('{}_masked_altitude.cml'.format(method),)
self.assertCMLApproxData(result, cml)
class Test___call____NOP(tests.IrisTest):
def setUp(self):
# The destination grid points are exactly the same as the
# src grid points.
self.src = realistic_4d()[:5, :2, ::40, ::30]
self.grid = self.src.copy()
def test_nop__linear(self):
regridder = Regridder(self.src, self.grid, 'linear', 'mask')
result = regridder(self.src)
self.assertEqual(result, self.src)
def test_nop__nearest(self):
regridder = Regridder(self.src, self.grid, 'nearest', 'mask')
result = regridder(self.src)
self.assertEqual(result, self.src)
@tests.skip_data
class Test___call____circular(tests.IrisTest):
def setUp(self):
src = global_pp()[::10, ::10]
level_height = AuxCoord(0, long_name='level_height', units='m',
attributes={'positive': 'up'})
sigma = AuxCoord(1, long_name='sigma')
surface_altitude = AuxCoord((src.data - src.data.min()) * 50,
'surface_altitude', units='m')
src.add_aux_coord(level_height)
src.add_aux_coord(sigma)
src.add_aux_coord(surface_altitude, [0, 1])
hybrid_height = HybridHeightFactory(level_height, sigma,
surface_altitude)
src.add_aux_factory(hybrid_height)
self.src = src
grid = global_pp()[:4, :4]
grid.coord('longitude').points = grid.coord('longitude').points - 5
self.grid = grid
self.mode = 'mask'
self.methods = ('linear', 'nearest')
def test_non_circular(self):
# Non-circular src -> non-circular grid
for method in self.methods:
regridder = Regridder(self.src, self.grid, method, self.mode)
result = regridder(self.src)
self.assertFalse(result.coord('longitude').circular)
cml = RESULT_DIR + ('{}_non_circular.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_circular_src(self):
# Circular src -> non-circular grid
src = self.src
src.coord('longitude').circular = True
for method in self.methods:
regridder = Regridder(src, self.grid, method, self.mode)
result = regridder(src)
self.assertFalse(result.coord('longitude').circular)
cml = RESULT_DIR + ('{}_circular_src.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_circular_grid(self):
# Non-circular src -> circular grid
grid = self.grid
grid.coord('longitude').circular = True
for method in self.methods:
regridder = Regridder(self.src, grid, method, self.mode)
result = regridder(self.src)
self.assertTrue(result.coord('longitude').circular)
cml = RESULT_DIR + ('{}_circular_grid.cml'.format(method),)
self.assertCMLApproxData(result, cml)
def test_circular_src_and_grid(self):
# Circular src -> circular grid
src = self.src
src.coord('longitude').circular = True
grid = self.grid
grid.coord('longitude').circular = True
for method in self.methods:
regridder = Regridder(src, grid, method, self.mode)
result = regridder(src)
self.assertTrue(result.coord('longitude').circular)
cml = RESULT_DIR + ('{}_both_circular.cml'.format(method),)
self.assertCMLApproxData(result, cml)
if __name__ == '__main__':
tests.main()
| gpl-3.0 |
nvoron23/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_platform.py | 52 | 3919 | import sys
import os
import unittest
import platform
import subprocess
from test import test_support
class PlatformTest(unittest.TestCase):
def test_architecture(self):
res = platform.architecture()
if hasattr(os, "symlink"):
def test_architecture_via_symlink(self): # issue3762
def get(python):
cmd = [python, '-c',
'import platform; print platform.architecture()']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return p.communicate()
real = os.path.realpath(sys.executable)
link = os.path.abspath(test_support.TESTFN)
os.symlink(real, link)
try:
self.assertEqual(get(real), get(link))
finally:
os.remove(link)
def test_platform(self):
for aliased in (False, True):
for terse in (False, True):
res = platform.platform(aliased, terse)
def test_system(self):
res = platform.system()
def test_node(self):
res = platform.node()
def test_release(self):
res = platform.release()
def test_version(self):
res = platform.version()
def test_machine(self):
res = platform.machine()
def test_processor(self):
res = platform.processor()
def test_python_implementation(self):
res = platform.python_implementation()
def test_python_version(self):
res1 = platform.python_version()
res2 = platform.python_version_tuple()
self.assertEqual(res1, ".".join(res2))
def test_python_branch(self):
res = platform.python_branch()
def test_python_revision(self):
res = platform.python_revision()
def test_python_build(self):
res = platform.python_build()
def test_python_compiler(self):
res = platform.python_compiler()
def test_system_alias(self):
res = platform.system_alias(
platform.system(),
platform.release(),
platform.version(),
)
def test_uname(self):
res = platform.uname()
self.assert_(any(res))
def test_java_ver(self):
res = platform.java_ver()
if sys.platform == 'java':
self.assert_(all(res))
def test_win32_ver(self):
res = platform.win32_ver()
def test_mac_ver(self):
res = platform.mac_ver()
try:
import gestalt
except ImportError:
have_toolbox_glue = False
else:
have_toolbox_glue = True
if have_toolbox_glue and platform.uname()[0] == 'Darwin':
# We're on a MacOSX system, check that
# the right version information is returned
fd = os.popen('sw_vers', 'r')
real_ver = None
for ln in fd:
if ln.startswith('ProductVersion:'):
real_ver = ln.strip().split()[-1]
break
fd.close()
self.failIf(real_ver is None)
self.assertEquals(res[0], real_ver)
# res[1] claims to contain
# (version, dev_stage, non_release_version)
# That information is no longer available
self.assertEquals(res[1], ('', '', ''))
if sys.byteorder == 'little':
self.assertEquals(res[2], 'i386')
else:
self.assertEquals(res[2], 'PowerPC')
def test_dist(self):
res = platform.dist()
def test_libc_ver(self):
import os
if os.path.isdir(sys.executable) and \
os.path.exists(sys.executable+'.exe'):
# Cygwin horror
executable = executable + '.exe'
res = platform.libc_ver(sys.executable)
def test_main():
test_support.run_unittest(
PlatformTest
)
if __name__ == '__main__':
test_main()
| apache-2.0 |
aifil/odoo | addons/website_quote/models/payment.py | 13 | 1162 | # -*- coding: utf-8 -*-
from openerp import SUPERUSER_ID
from openerp.osv import orm, fields
class PaymentTransaction(orm.Model):
_inherit = 'payment.transaction'
_columns = {
# link with the sale order
'sale_order_id': fields.many2one('sale.order', 'Sale Order'),
}
def form_feedback(self, cr, uid, data, acquirer_name, context=None):
""" Override to confirm the sale order, if defined, and if the transaction
is done. """
tx = None
res = super(PaymentTransaction, self).form_feedback(cr, uid, data, acquirer_name, context=context)
# fetch the tx, check its state, confirm the potential SO
tx_find_method_name = '_%s_form_get_tx_from_data' % acquirer_name
if hasattr(self, tx_find_method_name):
tx = getattr(self, tx_find_method_name)(cr, uid, data, context=context)
if tx and tx.state == 'done' and tx.acquirer_id.auto_confirm == 'at_pay_confirm' and tx.sale_order_id and tx.sale_order_id.state in ['draft', 'sent']:
self.pool['sale.order'].action_confirm(cr, SUPERUSER_ID, [tx.sale_order_id.id], context=context)
return res
| gpl-3.0 |
mmalecki/node-gyp | gyp/test/actions/src/subdir1/counter.py | 103 | 1290 | #!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import time
output = sys.argv[1]
persistoutput = "%s.persist" % sys.argv[1]
count = 0
try:
count = open(persistoutput, 'r').read()
except:
pass
count = int(count) + 1
if len(sys.argv) > 2:
max_count = int(sys.argv[2])
if count > max_count:
count = max_count
oldcount = 0
try:
oldcount = open(output, 'r').read()
except:
pass
# Save the count in a file that is undeclared, and thus hidden, to gyp. We need
# to do this because, prior to running commands, scons deletes any declared
# outputs, so we would lose our count if we just wrote to the given output file.
# (The other option is to use Precious() in the scons generator, but that seems
# too heavy-handed just to support this somewhat unrealistic test case, and
# might lead to unintended side-effects).
open(persistoutput, 'w').write('%d' % (count))
# Only write the given output file if the count has changed.
if int(oldcount) != count:
open(output, 'w').write('%d' % (count))
# Sleep so the next run changes the file time sufficiently to make the build
# detect the file as changed.
time.sleep(1)
sys.exit(0)
| mit |
afisher1/volttron-applications | pnnl/FncsVolttronBridge/FNCS_Volttron_Bridge.py | 4 | 11468 | #!env/bin/python
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2015, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
#}}}
from datetime import datetime
import os
import sys
import json
import gevent
import logging
import warnings
import re
from collections import defaultdict
from gevent.core import callback
from volttron.platform.messaging import headers as headers_mod
from volttron.platform.vip.agent import Agent, PubSub, Core
from volttron.platform.agent import utils
import common
#FNCS inports
import fncs
utils.setup_logging()
_log = logging.getLogger(__name__)
__version__ = "1.0"
def remote_url(**kwargs):
return "{vip_address}:{port}?serverkey={server_key}" \
"&publickey={agent_public}&" \
"secretkey={agent_secret}".format(**kwargs)
class FNCS_VOLTTRON_Bridge(Agent):
def __init__(self,
simulation_run_time,
heartbeat_period,
heartbeat_multiplier,
fncs_zpl,
**kwargs):
super(FNCS_VOLTTRON_Bridge,self).__init__(**kwargs)
self.simtime = 0
self.heartbeat_period = heartbeat_period
self.heartbeat_multiplier = heartbeat_multiplier
self.fncs_zpl = fncs_zpl
inTime = 0
inUnit = ''
timeMuliplier = 0
parsedTime = re.findall(r'(\d+)(\s?)(\D+)', simulation_run_time)
if len(parsedTime) > 0:
inTime = int(parsedTime[0][0])
inUnit = parsedTime[0][2]
if 's' in inUnit[0] or 'S' in inUnit[0]:
timeMultiplier = 1
elif 'm' in inUnit[0] or 'M' in inUnit[0]:
timeMultiplier = 60
elif 'h' in inUnit[0] or 'H' in inUnit[0]:
timeMultiplier = 3600
elif 'd' in inUnit[0] or 'D' in inUnit[0]:
timeMultiplier = 86400
else:
warnings.warn("Unknown time unit supplied. Defaulting to seconds.")
timeMultiplier = 1
else:
raise RuntimeError("Unable to parse run time argument. Please provide run time in the following format: #s, #m, #h, #d, or #y.")
self.simlength = inTime*timeMultiplier
self.simStart = datetime.utcnow()
def onmessage(self, peer, sender, bus, topic, headers, message):
d = {'topic': topic, 'headers': headers, 'message': message}
# Forward message to FNCS
if not fncs.is_initialized():
raise RuntimeError("FNCS connection was terminated. Killing Bridge.")
fncsmessage = str(message)
topic = topic.replace('fncs/input/','')
fncs.publish(topic, fncsmessage)
_log.debug('Volttron->FNCS:\nTopic:%s\nMessage:%s\n'%(topic, message))
@Core.receiver('onstart')
def start(self, sender, **kwargs):
self.vip.pubsub.subscribe(peer = 'pubsub',
prefix = 'fncs/input/',
#prefix = '',
callback = self.onmessage).get(timeout=5)
#Register with FNCS
cfg = "name = {0[name]}\ntime_delta = {0[time_delta]}\nbroker = {0[broker]}\n".format(self.fncs_zpl)
if 'values' in self.fncs_zpl.keys():
cfg += "values"
for x in self.fncs_zpl['values'].keys():
cfg += "\n {0}\n topic = {1[topic]}\n defualt = {1[default]}\n type = {1[type]}\n list = {1[list]}".format(x,self.fncs_zpl['values'][x])
fncs.initialize(cfg)
if not fncs.is_initialized():
raise RuntimeError("FNCS connection failed!")
self.publish_heartbeat()
print(self.heartbeat_period)
self.core.periodic(self.heartbeat_period, self.publish_heartbeat)
def publish_heartbeat(self):
'''Send heartbeat message every HEARTBEAT_PERIOD seconds.
HEARTBEAT_PERIOD is set and can be adjusted in the settings module.
'''
now = datetime.utcnow().isoformat(' ') + 'Z'
nowdate = datetime.utcnow()
print "publish_heartbeat", now
timeDiff = nowdate - self.simStart
valMap = defaultdict(dict)
metaMap = defaultdict(dict)
headers = {headers_mod.TIMESTAMP: now, headers_mod.DATE: now}
#Tell FNCS we are at our next timestep
if not fncs.is_initialized():
raise RuntimeError("FNCS connection was terminated. Killing Bridge.")
elif self.simtime > self.simlength:
fncs.finalize()
self.core.stop()
elif timeDiff.seconds >= 1:
self.simtime+=self.heartbeat_period*self.heartbeat_multiplier
print "fncs.time_request(",self.simtime,") request"
self.simtime = fncs.time_request(self.simtime)
print "fncs.time_request() response", self.simtime
#Grab Subscriptions from FNCS to publish to Volttron message bus
subKeys = fncs.get_events()
if len(subKeys) > 0:
for x in subKeys:
valStr = fncs.get_value(x)
#parse message to split value and unit
valList = valStr.split(' ')
if len(valList) == 1:
val = valList[0]
valUnit = '';
try:
val = float(val)
except:
pass
elif len(valList) == 2:
val = valList[0]
valUnit = valList[1]
if 'int' in self.fncs_zpl['values'][x]['type']:
val = int(val)
elif 'double' in self.fncs_zpl['values'][x]['type']:
val = float(val)
elif 'complex' in self.fncs_zpl['values'][x]['type']:
raise RuntimeError("complex data type is currently not supported in Volttron.")
#TODO: come up with a better way to handle all types that can come in from fncs
else:
warnings.warn("FNCS message could not be parsed into value and unit. The message will be farwarded to Volttron message bus as is.")
val = valStr
valUnit = ''
fncsmessage = [val, {'units' : '{0}'.format(valUnit), 'tz' : 'UTC', 'type': '{0[type]}'.format(self.fncs_zpl['values'][x])}]
fncsTopic = common.FNCS_OUTPUT_PATH(path = 'devices/{0[topic]}'.format(self.fncs_zpl['values'][x])) #fncs/output/devices/topic
self.vip.pubsub.publish('pubsub', fncsTopic, headers, fncsmessage).get(timeout=5)
_log.debug('FNCS->Volttron:\nTopic:%s\n:Message:%s\n'%(fncsTopic, str(fncsmessage)))
device, point = self.fncs_zpl['values'][x]['topic'].rsplit('/', 1)
deviceAllTopic = common.FNCS_OUTPUT_PATH(path = 'devices/' + device + '/all')
valMap[deviceAllTopic][point] = val
metaMap[deviceAllTopic][point] = fncsmessage[1]
for k in valMap.keys():
allMessage = [valMap[k], metaMap[k]]
self.vip.pubsub.publish('pubsub', k, headers, allMessage).get(timeout=5)
_log.debug('FNCS->Volttron:\nTopic:%s\n:Message:%s\n'%(k, str(allMessage)))
#Publish heartbeat message to voltron bus
self.vip.pubsub.publish(
'pubsub', '{0[name]}/heartbeat'.format(self.fncs_zpl), headers, now).get(timeout=5)
def fncs_bridge(**kwargs):
config = utils.load_config('FNCS_VOLTTRON_Bridge.config')
heartbeat_period = config.get('heartbeat_period', 1)
heartbeat_multiplier = config.get('heartbeat_multiplier', 1)
fncs_zpl = config["fncs_zpl"]
params = config["remote_platform_params"]
simulation_run_time = config.get("simulation_run_time", "1h")
return FNCS_VOLTTRON_Bridge(simulation_run_time,
heartbeat_period,
heartbeat_multiplier,
fncs_zpl,
address=remote_url(**params),
identity='FNCS_Volttron_Bridge')
def main():
'''Main method to start the agent'''
utils.vip_main(fncs_bridge)
if __name__ == '__main__':
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
if fncs.is_initialized():
fncs.die()
pass
| bsd-3-clause |
victormwenda/dbcrudgen-cpp | include/google-test/googlemock/scripts/generator/cpp/ast.py | 69 | 62925 | #!/usr/bin/env python
#
# Copyright 2007 Neal Norwitz
# Portions Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate an Abstract Syntax Tree (AST) for C++."""
__author__ = 'nnorwitz@google.com (Neal Norwitz)'
# TODO:
# * Tokens should never be exported, need to convert to Nodes
# (return types, parameters, etc.)
# * Handle static class data for templatized classes
# * Handle casts (both C++ and C-style)
# * Handle conditions and loops (if/else, switch, for, while/do)
#
# TODO much, much later:
# * Handle #define
# * exceptions
try:
# Python 3.x
import builtins
except ImportError:
# Python 2.x
import __builtin__ as builtins
import sys
import traceback
from cpp import keywords
from cpp import tokenize
from cpp import utils
if not hasattr(builtins, 'reversed'):
# Support Python 2.3 and earlier.
def reversed(seq):
for i in range(len(seq)-1, -1, -1):
yield seq[i]
if not hasattr(builtins, 'next'):
# Support Python 2.5 and earlier.
def next(obj):
return obj.next()
VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3)
FUNCTION_NONE = 0x00
FUNCTION_CONST = 0x01
FUNCTION_VIRTUAL = 0x02
FUNCTION_PURE_VIRTUAL = 0x04
FUNCTION_CTOR = 0x08
FUNCTION_DTOR = 0x10
FUNCTION_ATTRIBUTE = 0x20
FUNCTION_UNKNOWN_ANNOTATION = 0x40
FUNCTION_THROW = 0x80
FUNCTION_OVERRIDE = 0x100
"""
These are currently unused. Should really handle these properly at some point.
TYPE_MODIFIER_INLINE = 0x010000
TYPE_MODIFIER_EXTERN = 0x020000
TYPE_MODIFIER_STATIC = 0x040000
TYPE_MODIFIER_CONST = 0x080000
TYPE_MODIFIER_REGISTER = 0x100000
TYPE_MODIFIER_VOLATILE = 0x200000
TYPE_MODIFIER_MUTABLE = 0x400000
TYPE_MODIFIER_MAP = {
'inline': TYPE_MODIFIER_INLINE,
'extern': TYPE_MODIFIER_EXTERN,
'static': TYPE_MODIFIER_STATIC,
'const': TYPE_MODIFIER_CONST,
'register': TYPE_MODIFIER_REGISTER,
'volatile': TYPE_MODIFIER_VOLATILE,
'mutable': TYPE_MODIFIER_MUTABLE,
}
"""
_INTERNAL_TOKEN = 'internal'
_NAMESPACE_POP = 'ns-pop'
# TODO(nnorwitz): use this as a singleton for templated_types, etc
# where we don't want to create a new empty dict each time. It is also const.
class _NullDict(object):
__contains__ = lambda self: False
keys = values = items = iterkeys = itervalues = iteritems = lambda self: ()
# TODO(nnorwitz): move AST nodes into a separate module.
class Node(object):
"""Base AST node."""
def __init__(self, start, end):
self.start = start
self.end = end
def IsDeclaration(self):
"""Returns bool if this node is a declaration."""
return False
def IsDefinition(self):
"""Returns bool if this node is a definition."""
return False
def IsExportable(self):
"""Returns bool if this node exportable from a header file."""
return False
def Requires(self, node):
"""Does this AST node require the definition of the node passed in?"""
return False
def XXX__str__(self):
return self._StringHelper(self.__class__.__name__, '')
def _StringHelper(self, name, suffix):
if not utils.DEBUG:
return '%s(%s)' % (name, suffix)
return '%s(%d, %d, %s)' % (name, self.start, self.end, suffix)
def __repr__(self):
return str(self)
class Define(Node):
def __init__(self, start, end, name, definition):
Node.__init__(self, start, end)
self.name = name
self.definition = definition
def __str__(self):
value = '%s %s' % (self.name, self.definition)
return self._StringHelper(self.__class__.__name__, value)
class Include(Node):
def __init__(self, start, end, filename, system):
Node.__init__(self, start, end)
self.filename = filename
self.system = system
def __str__(self):
fmt = '"%s"'
if self.system:
fmt = '<%s>'
return self._StringHelper(self.__class__.__name__, fmt % self.filename)
class Goto(Node):
def __init__(self, start, end, label):
Node.__init__(self, start, end)
self.label = label
def __str__(self):
return self._StringHelper(self.__class__.__name__, str(self.label))
class Expr(Node):
def __init__(self, start, end, expr):
Node.__init__(self, start, end)
self.expr = expr
def Requires(self, node):
# TODO(nnorwitz): impl.
return False
def __str__(self):
return self._StringHelper(self.__class__.__name__, str(self.expr))
class Return(Expr):
pass
class Delete(Expr):
pass
class Friend(Expr):
def __init__(self, start, end, expr, namespace):
Expr.__init__(self, start, end, expr)
self.namespace = namespace[:]
class Using(Node):
def __init__(self, start, end, names):
Node.__init__(self, start, end)
self.names = names
def __str__(self):
return self._StringHelper(self.__class__.__name__, str(self.names))
class Parameter(Node):
def __init__(self, start, end, name, parameter_type, default):
Node.__init__(self, start, end)
self.name = name
self.type = parameter_type
self.default = default
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
return self.type.name == node.name
def __str__(self):
name = str(self.type)
suffix = '%s %s' % (name, self.name)
if self.default:
suffix += ' = ' + ''.join([d.name for d in self.default])
return self._StringHelper(self.__class__.__name__, suffix)
class _GenericDeclaration(Node):
def __init__(self, start, end, name, namespace):
Node.__init__(self, start, end)
self.name = name
self.namespace = namespace[:]
def FullName(self):
prefix = ''
if self.namespace and self.namespace[-1]:
prefix = '::'.join(self.namespace) + '::'
return prefix + self.name
def _TypeStringHelper(self, suffix):
if self.namespace:
names = [n or '<anonymous>' for n in self.namespace]
suffix += ' in ' + '::'.join(names)
return self._StringHelper(self.__class__.__name__, suffix)
# TODO(nnorwitz): merge with Parameter in some way?
class VariableDeclaration(_GenericDeclaration):
def __init__(self, start, end, name, var_type, initial_value, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.type = var_type
self.initial_value = initial_value
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
return self.type.name == node.name
def ToString(self):
"""Return a string that tries to reconstitute the variable decl."""
suffix = '%s %s' % (self.type, self.name)
if self.initial_value:
suffix += ' = ' + self.initial_value
return suffix
def __str__(self):
return self._StringHelper(self.__class__.__name__, self.ToString())
class Typedef(_GenericDeclaration):
def __init__(self, start, end, name, alias, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.alias = alias
def IsDefinition(self):
return True
def IsExportable(self):
return True
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
name = node.name
for token in self.alias:
if token is not None and name == token.name:
return True
return False
def __str__(self):
suffix = '%s, %s' % (self.name, self.alias)
return self._TypeStringHelper(suffix)
class _NestedType(_GenericDeclaration):
def __init__(self, start, end, name, fields, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.fields = fields
def IsDefinition(self):
return True
def IsExportable(self):
return True
def __str__(self):
suffix = '%s, {%s}' % (self.name, self.fields)
return self._TypeStringHelper(suffix)
class Union(_NestedType):
pass
class Enum(_NestedType):
pass
class Class(_GenericDeclaration):
def __init__(self, start, end, name, bases, templated_types, body, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.bases = bases
self.body = body
self.templated_types = templated_types
def IsDeclaration(self):
return self.bases is None and self.body is None
def IsDefinition(self):
return not self.IsDeclaration()
def IsExportable(self):
return not self.IsDeclaration()
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
if self.bases:
for token_list in self.bases:
# TODO(nnorwitz): bases are tokens, do name comparison.
for token in token_list:
if token.name == node.name:
return True
# TODO(nnorwitz): search in body too.
return False
def __str__(self):
name = self.name
if self.templated_types:
name += '<%s>' % self.templated_types
suffix = '%s, %s, %s' % (name, self.bases, self.body)
return self._TypeStringHelper(suffix)
class Struct(Class):
pass
class Function(_GenericDeclaration):
def __init__(self, start, end, name, return_type, parameters,
modifiers, templated_types, body, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
converter = TypeConverter(namespace)
self.return_type = converter.CreateReturnType(return_type)
self.parameters = converter.ToParameters(parameters)
self.modifiers = modifiers
self.body = body
self.templated_types = templated_types
def IsDeclaration(self):
return self.body is None
def IsDefinition(self):
return self.body is not None
def IsExportable(self):
if self.return_type and 'static' in self.return_type.modifiers:
return False
return None not in self.namespace
def Requires(self, node):
if self.parameters:
# TODO(nnorwitz): parameters are tokens, do name comparison.
for p in self.parameters:
if p.name == node.name:
return True
# TODO(nnorwitz): search in body too.
return False
def __str__(self):
# TODO(nnorwitz): add templated_types.
suffix = ('%s %s(%s), 0x%02x, %s' %
(self.return_type, self.name, self.parameters,
self.modifiers, self.body))
return self._TypeStringHelper(suffix)
class Method(Function):
def __init__(self, start, end, name, in_class, return_type, parameters,
modifiers, templated_types, body, namespace):
Function.__init__(self, start, end, name, return_type, parameters,
modifiers, templated_types, body, namespace)
# TODO(nnorwitz): in_class could also be a namespace which can
# mess up finding functions properly.
self.in_class = in_class
class Type(_GenericDeclaration):
"""Type used for any variable (eg class, primitive, struct, etc)."""
def __init__(self, start, end, name, templated_types, modifiers,
reference, pointer, array):
"""
Args:
name: str name of main type
templated_types: [Class (Type?)] template type info between <>
modifiers: [str] type modifiers (keywords) eg, const, mutable, etc.
reference, pointer, array: bools
"""
_GenericDeclaration.__init__(self, start, end, name, [])
self.templated_types = templated_types
if not name and modifiers:
self.name = modifiers.pop()
self.modifiers = modifiers
self.reference = reference
self.pointer = pointer
self.array = array
def __str__(self):
prefix = ''
if self.modifiers:
prefix = ' '.join(self.modifiers) + ' '
name = str(self.name)
if self.templated_types:
name += '<%s>' % self.templated_types
suffix = prefix + name
if self.reference:
suffix += '&'
if self.pointer:
suffix += '*'
if self.array:
suffix += '[]'
return self._TypeStringHelper(suffix)
# By definition, Is* are always False. A Type can only exist in
# some sort of variable declaration, parameter, or return value.
def IsDeclaration(self):
return False
def IsDefinition(self):
return False
def IsExportable(self):
return False
class TypeConverter(object):
def __init__(self, namespace_stack):
self.namespace_stack = namespace_stack
def _GetTemplateEnd(self, tokens, start):
count = 1
end = start
while 1:
token = tokens[end]
end += 1
if token.name == '<':
count += 1
elif token.name == '>':
count -= 1
if count == 0:
break
return tokens[start:end-1], end
def ToType(self, tokens):
"""Convert [Token,...] to [Class(...), ] useful for base classes.
For example, code like class Foo : public Bar<x, y> { ... };
the "Bar<x, y>" portion gets converted to an AST.
Returns:
[Class(...), ...]
"""
result = []
name_tokens = []
reference = pointer = array = False
def AddType(templated_types):
# Partition tokens into name and modifier tokens.
names = []
modifiers = []
for t in name_tokens:
if keywords.IsKeyword(t.name):
modifiers.append(t.name)
else:
names.append(t.name)
name = ''.join(names)
if name_tokens:
result.append(Type(name_tokens[0].start, name_tokens[-1].end,
name, templated_types, modifiers,
reference, pointer, array))
del name_tokens[:]
i = 0
end = len(tokens)
while i < end:
token = tokens[i]
if token.name == '<':
new_tokens, new_end = self._GetTemplateEnd(tokens, i+1)
AddType(self.ToType(new_tokens))
# If there is a comma after the template, we need to consume
# that here otherwise it becomes part of the name.
i = new_end
reference = pointer = array = False
elif token.name == ',':
AddType([])
reference = pointer = array = False
elif token.name == '*':
pointer = True
elif token.name == '&':
reference = True
elif token.name == '[':
pointer = True
elif token.name == ']':
pass
else:
name_tokens.append(token)
i += 1
if name_tokens:
# No '<' in the tokens, just a simple name and no template.
AddType([])
return result
def DeclarationToParts(self, parts, needs_name_removed):
name = None
default = []
if needs_name_removed:
# Handle default (initial) values properly.
for i, t in enumerate(parts):
if t.name == '=':
default = parts[i+1:]
name = parts[i-1].name
if name == ']' and parts[i-2].name == '[':
name = parts[i-3].name
i -= 1
parts = parts[:i-1]
break
else:
if parts[-1].token_type == tokenize.NAME:
name = parts.pop().name
else:
# TODO(nnorwitz): this is a hack that happens for code like
# Register(Foo<T>); where it thinks this is a function call
# but it's actually a declaration.
name = '???'
modifiers = []
type_name = []
other_tokens = []
templated_types = []
i = 0
end = len(parts)
while i < end:
p = parts[i]
if keywords.IsKeyword(p.name):
modifiers.append(p.name)
elif p.name == '<':
templated_tokens, new_end = self._GetTemplateEnd(parts, i+1)
templated_types = self.ToType(templated_tokens)
i = new_end - 1
# Don't add a spurious :: to data members being initialized.
next_index = i + 1
if next_index < end and parts[next_index].name == '::':
i += 1
elif p.name in ('[', ']', '='):
# These are handled elsewhere.
other_tokens.append(p)
elif p.name not in ('*', '&', '>'):
# Ensure that names have a space between them.
if (type_name and type_name[-1].token_type == tokenize.NAME and
p.token_type == tokenize.NAME):
type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
type_name.append(p)
else:
other_tokens.append(p)
i += 1
type_name = ''.join([t.name for t in type_name])
return name, type_name, templated_types, modifiers, default, other_tokens
def ToParameters(self, tokens):
if not tokens:
return []
result = []
name = type_name = ''
type_modifiers = []
pointer = reference = array = False
first_token = None
default = []
def AddParameter(end):
if default:
del default[0] # Remove flag.
parts = self.DeclarationToParts(type_modifiers, True)
(name, type_name, templated_types, modifiers,
unused_default, unused_other_tokens) = parts
parameter_type = Type(first_token.start, first_token.end,
type_name, templated_types, modifiers,
reference, pointer, array)
p = Parameter(first_token.start, end, name,
parameter_type, default)
result.append(p)
template_count = 0
for s in tokens:
if not first_token:
first_token = s
if s.name == '<':
template_count += 1
elif s.name == '>':
template_count -= 1
if template_count > 0:
type_modifiers.append(s)
continue
if s.name == ',':
AddParameter(s.start)
name = type_name = ''
type_modifiers = []
pointer = reference = array = False
first_token = None
default = []
elif s.name == '*':
pointer = True
elif s.name == '&':
reference = True
elif s.name == '[':
array = True
elif s.name == ']':
pass # Just don't add to type_modifiers.
elif s.name == '=':
# Got a default value. Add any value (None) as a flag.
default.append(None)
elif default:
default.append(s)
else:
type_modifiers.append(s)
AddParameter(tokens[-1].end)
return result
def CreateReturnType(self, return_type_seq):
if not return_type_seq:
return None
start = return_type_seq[0].start
end = return_type_seq[-1].end
_, name, templated_types, modifiers, default, other_tokens = \
self.DeclarationToParts(return_type_seq, False)
names = [n.name for n in other_tokens]
reference = '&' in names
pointer = '*' in names
array = '[' in names
return Type(start, end, name, templated_types, modifiers,
reference, pointer, array)
def GetTemplateIndices(self, names):
# names is a list of strings.
start = names.index('<')
end = len(names) - 1
while end > 0:
if names[end] == '>':
break
end -= 1
return start, end+1
class AstBuilder(object):
def __init__(self, token_stream, filename, in_class='', visibility=None,
namespace_stack=[]):
self.tokens = token_stream
self.filename = filename
# TODO(nnorwitz): use a better data structure (deque) for the queue.
# Switching directions of the "queue" improved perf by about 25%.
# Using a deque should be even better since we access from both sides.
self.token_queue = []
self.namespace_stack = namespace_stack[:]
self.in_class = in_class
if in_class is None:
self.in_class_name_only = None
else:
self.in_class_name_only = in_class.split('::')[-1]
self.visibility = visibility
self.in_function = False
self.current_token = None
# Keep the state whether we are currently handling a typedef or not.
self._handling_typedef = False
self.converter = TypeConverter(self.namespace_stack)
def HandleError(self, msg, token):
printable_queue = list(reversed(self.token_queue[-20:]))
sys.stderr.write('Got %s in %s @ %s %s\n' %
(msg, self.filename, token, printable_queue))
def Generate(self):
while 1:
token = self._GetNextToken()
if not token:
break
# Get the next token.
self.current_token = token
# Dispatch on the next token type.
if token.token_type == _INTERNAL_TOKEN:
if token.name == _NAMESPACE_POP:
self.namespace_stack.pop()
continue
try:
result = self._GenerateOne(token)
if result is not None:
yield result
except:
self.HandleError('exception', token)
raise
def _CreateVariable(self, pos_token, name, type_name, type_modifiers,
ref_pointer_name_seq, templated_types, value=None):
reference = '&' in ref_pointer_name_seq
pointer = '*' in ref_pointer_name_seq
array = '[' in ref_pointer_name_seq
var_type = Type(pos_token.start, pos_token.end, type_name,
templated_types, type_modifiers,
reference, pointer, array)
return VariableDeclaration(pos_token.start, pos_token.end,
name, var_type, value, self.namespace_stack)
def _GenerateOne(self, token):
if token.token_type == tokenize.NAME:
if (keywords.IsKeyword(token.name) and
not keywords.IsBuiltinType(token.name)):
method = getattr(self, 'handle_' + token.name)
return method()
elif token.name == self.in_class_name_only:
# The token name is the same as the class, must be a ctor if
# there is a paren. Otherwise, it's the return type.
# Peek ahead to get the next token to figure out which.
next = self._GetNextToken()
self._AddBackToken(next)
if next.token_type == tokenize.SYNTAX and next.name == '(':
return self._GetMethod([token], FUNCTION_CTOR, None, True)
# Fall through--handle like any other method.
# Handle data or function declaration/definition.
syntax = tokenize.SYNTAX
temp_tokens, last_token = \
self._GetVarTokensUpTo(syntax, '(', ';', '{', '[')
temp_tokens.insert(0, token)
if last_token.name == '(':
# If there is an assignment before the paren,
# this is an expression, not a method.
expr = bool([e for e in temp_tokens if e.name == '='])
if expr:
new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
temp_tokens.append(last_token)
temp_tokens.extend(new_temp)
last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
if last_token.name == '[':
# Handle array, this isn't a method, unless it's an operator.
# TODO(nnorwitz): keep the size somewhere.
# unused_size = self._GetTokensUpTo(tokenize.SYNTAX, ']')
temp_tokens.append(last_token)
if temp_tokens[-2].name == 'operator':
temp_tokens.append(self._GetNextToken())
else:
temp_tokens2, last_token = \
self._GetVarTokensUpTo(tokenize.SYNTAX, ';')
temp_tokens.extend(temp_tokens2)
if last_token.name == ';':
# Handle data, this isn't a method.
parts = self.converter.DeclarationToParts(temp_tokens, True)
(name, type_name, templated_types, modifiers, default,
unused_other_tokens) = parts
t0 = temp_tokens[0]
names = [t.name for t in temp_tokens]
if templated_types:
start, end = self.converter.GetTemplateIndices(names)
names = names[:start] + names[end:]
default = ''.join([t.name for t in default])
return self._CreateVariable(t0, name, type_name, modifiers,
names, templated_types, default)
if last_token.name == '{':
self._AddBackTokens(temp_tokens[1:])
self._AddBackToken(last_token)
method_name = temp_tokens[0].name
method = getattr(self, 'handle_' + method_name, None)
if not method:
# Must be declaring a variable.
# TODO(nnorwitz): handle the declaration.
return None
return method()
return self._GetMethod(temp_tokens, 0, None, False)
elif token.token_type == tokenize.SYNTAX:
if token.name == '~' and self.in_class:
# Must be a dtor (probably not in method body).
token = self._GetNextToken()
# self.in_class can contain A::Name, but the dtor will only
# be Name. Make sure to compare against the right value.
if (token.token_type == tokenize.NAME and
token.name == self.in_class_name_only):
return self._GetMethod([token], FUNCTION_DTOR, None, True)
# TODO(nnorwitz): handle a lot more syntax.
elif token.token_type == tokenize.PREPROCESSOR:
# TODO(nnorwitz): handle more preprocessor directives.
# token starts with a #, so remove it and strip whitespace.
name = token.name[1:].lstrip()
if name.startswith('include'):
# Remove "include".
name = name[7:].strip()
assert name
# Handle #include \<newline> "header-on-second-line.h".
if name.startswith('\\'):
name = name[1:].strip()
assert name[0] in '<"', token
assert name[-1] in '>"', token
system = name[0] == '<'
filename = name[1:-1]
return Include(token.start, token.end, filename, system)
if name.startswith('define'):
# Remove "define".
name = name[6:].strip()
assert name
value = ''
for i, c in enumerate(name):
if c.isspace():
value = name[i:].lstrip()
name = name[:i]
break
return Define(token.start, token.end, name, value)
if name.startswith('if') and name[2:3].isspace():
condition = name[3:].strip()
if condition.startswith('0') or condition.startswith('(0)'):
self._SkipIf0Blocks()
return None
def _GetTokensUpTo(self, expected_token_type, expected_token):
return self._GetVarTokensUpTo(expected_token_type, expected_token)[0]
def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens):
last_token = self._GetNextToken()
tokens = []
while (last_token.token_type != expected_token_type or
last_token.name not in expected_tokens):
tokens.append(last_token)
last_token = self._GetNextToken()
return tokens, last_token
# TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necessary.
def _IgnoreUpTo(self, token_type, token):
unused_tokens = self._GetTokensUpTo(token_type, token)
def _SkipIf0Blocks(self):
count = 1
while 1:
token = self._GetNextToken()
if token.token_type != tokenize.PREPROCESSOR:
continue
name = token.name[1:].lstrip()
if name.startswith('endif'):
count -= 1
if count == 0:
break
elif name.startswith('if'):
count += 1
def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None):
if GetNextToken is None:
GetNextToken = self._GetNextToken
# Assumes the current token is open_paren and we will consume
# and return up to the close_paren.
count = 1
token = GetNextToken()
while 1:
if token.token_type == tokenize.SYNTAX:
if token.name == open_paren:
count += 1
elif token.name == close_paren:
count -= 1
if count == 0:
break
yield token
token = GetNextToken()
yield token
def _GetParameters(self):
return self._GetMatchingChar('(', ')')
def GetScope(self):
return self._GetMatchingChar('{', '}')
def _GetNextToken(self):
if self.token_queue:
return self.token_queue.pop()
return next(self.tokens)
def _AddBackToken(self, token):
if token.whence == tokenize.WHENCE_STREAM:
token.whence = tokenize.WHENCE_QUEUE
self.token_queue.insert(0, token)
else:
assert token.whence == tokenize.WHENCE_QUEUE, token
self.token_queue.append(token)
def _AddBackTokens(self, tokens):
if tokens:
if tokens[-1].whence == tokenize.WHENCE_STREAM:
for token in tokens:
token.whence = tokenize.WHENCE_QUEUE
self.token_queue[:0] = reversed(tokens)
else:
assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
self.token_queue.extend(reversed(tokens))
def GetName(self, seq=None):
"""Returns ([tokens], next_token_info)."""
GetNextToken = self._GetNextToken
if seq is not None:
it = iter(seq)
GetNextToken = lambda: next(it)
next_token = GetNextToken()
tokens = []
last_token_was_name = False
while (next_token.token_type == tokenize.NAME or
(next_token.token_type == tokenize.SYNTAX and
next_token.name in ('::', '<'))):
# Two NAMEs in a row means the identifier should terminate.
# It's probably some sort of variable declaration.
if last_token_was_name and next_token.token_type == tokenize.NAME:
break
last_token_was_name = next_token.token_type == tokenize.NAME
tokens.append(next_token)
# Handle templated names.
if next_token.name == '<':
tokens.extend(self._GetMatchingChar('<', '>', GetNextToken))
last_token_was_name = True
next_token = GetNextToken()
return tokens, next_token
def GetMethod(self, modifiers, templated_types):
return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(')
assert len(return_type_and_name) >= 1
return self._GetMethod(return_type_and_name, modifiers, templated_types,
False)
def _GetMethod(self, return_type_and_name, modifiers, templated_types,
get_paren):
template_portion = None
if get_paren:
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
if token.name == '<':
# Handle templatized dtors.
template_portion = [token]
template_portion.extend(self._GetMatchingChar('<', '>'))
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
assert token.name == '(', token
name = return_type_and_name.pop()
# Handle templatized ctors.
if name.name == '>':
index = 1
while return_type_and_name[index].name != '<':
index += 1
template_portion = return_type_and_name[index:] + [name]
del return_type_and_name[index:]
name = return_type_and_name.pop()
elif name.name == ']':
rt = return_type_and_name
assert rt[-1].name == '[', return_type_and_name
assert rt[-2].name == 'operator', return_type_and_name
name_seq = return_type_and_name[-2:]
del return_type_and_name[-2:]
name = tokenize.Token(tokenize.NAME, 'operator[]',
name_seq[0].start, name.end)
# Get the open paren so _GetParameters() below works.
unused_open_paren = self._GetNextToken()
# TODO(nnorwitz): store template_portion.
return_type = return_type_and_name
indices = name
if return_type:
indices = return_type[0]
# Force ctor for templatized ctors.
if name.name == self.in_class and not modifiers:
modifiers |= FUNCTION_CTOR
parameters = list(self._GetParameters())
del parameters[-1] # Remove trailing ')'.
# Handling operator() is especially weird.
if name.name == 'operator' and not parameters:
token = self._GetNextToken()
assert token.name == '(', token
parameters = list(self._GetParameters())
del parameters[-1] # Remove trailing ')'.
token = self._GetNextToken()
while token.token_type == tokenize.NAME:
modifier_token = token
token = self._GetNextToken()
if modifier_token.name == 'const':
modifiers |= FUNCTION_CONST
elif modifier_token.name == '__attribute__':
# TODO(nnorwitz): handle more __attribute__ details.
modifiers |= FUNCTION_ATTRIBUTE
assert token.name == '(', token
# Consume everything between the (parens).
unused_tokens = list(self._GetMatchingChar('(', ')'))
token = self._GetNextToken()
elif modifier_token.name == 'throw':
modifiers |= FUNCTION_THROW
assert token.name == '(', token
# Consume everything between the (parens).
unused_tokens = list(self._GetMatchingChar('(', ')'))
token = self._GetNextToken()
elif modifier_token.name == 'override':
modifiers |= FUNCTION_OVERRIDE
elif modifier_token.name == modifier_token.name.upper():
# HACK(nnorwitz): assume that all upper-case names
# are some macro we aren't expanding.
modifiers |= FUNCTION_UNKNOWN_ANNOTATION
else:
self.HandleError('unexpected token', modifier_token)
assert token.token_type == tokenize.SYNTAX, token
# Handle ctor initializers.
if token.name == ':':
# TODO(nnorwitz): anything else to handle for initializer list?
while token.name != ';' and token.name != '{':
token = self._GetNextToken()
# Handle pointer to functions that are really data but look
# like method declarations.
if token.name == '(':
if parameters[0].name == '*':
# name contains the return type.
name = parameters.pop()
# parameters contains the name of the data.
modifiers = [p.name for p in parameters]
# Already at the ( to open the parameter list.
function_parameters = list(self._GetMatchingChar('(', ')'))
del function_parameters[-1] # Remove trailing ')'.
# TODO(nnorwitz): store the function_parameters.
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
assert token.name == ';', token
return self._CreateVariable(indices, name.name, indices.name,
modifiers, '', None)
# At this point, we got something like:
# return_type (type::*name_)(params);
# This is a data member called name_ that is a function pointer.
# With this code: void (sq_type::*field_)(string&);
# We get: name=void return_type=[] parameters=sq_type ... field_
# TODO(nnorwitz): is return_type always empty?
# TODO(nnorwitz): this isn't even close to being correct.
# Just put in something so we don't crash and can move on.
real_name = parameters[-1]
modifiers = [p.name for p in self._GetParameters()]
del modifiers[-1] # Remove trailing ')'.
return self._CreateVariable(indices, real_name.name, indices.name,
modifiers, '', None)
if token.name == '{':
body = list(self.GetScope())
del body[-1] # Remove trailing '}'.
else:
body = None
if token.name == '=':
token = self._GetNextToken()
if token.name == 'default' or token.name == 'delete':
# Ignore explicitly defaulted and deleted special members
# in C++11.
token = self._GetNextToken()
else:
# Handle pure-virtual declarations.
assert token.token_type == tokenize.CONSTANT, token
assert token.name == '0', token
modifiers |= FUNCTION_PURE_VIRTUAL
token = self._GetNextToken()
if token.name == '[':
# TODO(nnorwitz): store tokens and improve parsing.
# template <typename T, size_t N> char (&ASH(T (&seq)[N]))[N];
tokens = list(self._GetMatchingChar('[', ']'))
token = self._GetNextToken()
assert token.name == ';', (token, return_type_and_name, parameters)
# Looks like we got a method, not a function.
if len(return_type) > 2 and return_type[-1].name == '::':
return_type, in_class = \
self._GetReturnTypeAndClassName(return_type)
return Method(indices.start, indices.end, name.name, in_class,
return_type, parameters, modifiers, templated_types,
body, self.namespace_stack)
return Function(indices.start, indices.end, name.name, return_type,
parameters, modifiers, templated_types, body,
self.namespace_stack)
def _GetReturnTypeAndClassName(self, token_seq):
# Splitting the return type from the class name in a method
# can be tricky. For example, Return::Type::Is::Hard::To::Find().
# Where is the return type and where is the class name?
# The heuristic used is to pull the last name as the class name.
# This includes all the templated type info.
# TODO(nnorwitz): if there is only One name like in the
# example above, punt and assume the last bit is the class name.
# Ignore a :: prefix, if exists so we can find the first real name.
i = 0
if token_seq[0].name == '::':
i = 1
# Ignore a :: suffix, if exists.
end = len(token_seq) - 1
if token_seq[end-1].name == '::':
end -= 1
# Make a copy of the sequence so we can append a sentinel
# value. This is required for GetName will has to have some
# terminating condition beyond the last name.
seq_copy = token_seq[i:end]
seq_copy.append(tokenize.Token(tokenize.SYNTAX, '', 0, 0))
names = []
while i < end:
# Iterate through the sequence parsing out each name.
new_name, next = self.GetName(seq_copy[i:])
assert new_name, 'Got empty new_name, next=%s' % next
# We got a pointer or ref. Add it to the name.
if next and next.token_type == tokenize.SYNTAX:
new_name.append(next)
names.append(new_name)
i += len(new_name)
# Now that we have the names, it's time to undo what we did.
# Remove the sentinel value.
names[-1].pop()
# Flatten the token sequence for the return type.
return_type = [e for seq in names[:-1] for e in seq]
# The class name is the last name.
class_name = names[-1]
return return_type, class_name
def handle_bool(self):
pass
def handle_char(self):
pass
def handle_int(self):
pass
def handle_long(self):
pass
def handle_short(self):
pass
def handle_double(self):
pass
def handle_float(self):
pass
def handle_void(self):
pass
def handle_wchar_t(self):
pass
def handle_unsigned(self):
pass
def handle_signed(self):
pass
def _GetNestedType(self, ctor):
name = None
name_tokens, token = self.GetName()
if name_tokens:
name = ''.join([t.name for t in name_tokens])
# Handle forward declarations.
if token.token_type == tokenize.SYNTAX and token.name == ';':
return ctor(token.start, token.end, name, None,
self.namespace_stack)
if token.token_type == tokenize.NAME and self._handling_typedef:
self._AddBackToken(token)
return ctor(token.start, token.end, name, None,
self.namespace_stack)
# Must be the type declaration.
fields = list(self._GetMatchingChar('{', '}'))
del fields[-1] # Remove trailing '}'.
if token.token_type == tokenize.SYNTAX and token.name == '{':
next = self._GetNextToken()
new_type = ctor(token.start, token.end, name, fields,
self.namespace_stack)
# A name means this is an anonymous type and the name
# is the variable declaration.
if next.token_type != tokenize.NAME:
return new_type
name = new_type
token = next
# Must be variable declaration using the type prefixed with keyword.
assert token.token_type == tokenize.NAME, token
return self._CreateVariable(token, token.name, name, [], '', None)
def handle_struct(self):
# Special case the handling typedef/aliasing of structs here.
# It would be a pain to handle in the class code.
name_tokens, var_token = self.GetName()
if name_tokens:
next_token = self._GetNextToken()
is_syntax = (var_token.token_type == tokenize.SYNTAX and
var_token.name[0] in '*&')
is_variable = (var_token.token_type == tokenize.NAME and
next_token.name == ';')
variable = var_token
if is_syntax and not is_variable:
variable = next_token
temp = self._GetNextToken()
if temp.token_type == tokenize.SYNTAX and temp.name == '(':
# Handle methods declared to return a struct.
t0 = name_tokens[0]
struct = tokenize.Token(tokenize.NAME, 'struct',
t0.start-7, t0.start-2)
type_and_name = [struct]
type_and_name.extend(name_tokens)
type_and_name.extend((var_token, next_token))
return self._GetMethod(type_and_name, 0, None, False)
assert temp.name == ';', (temp, name_tokens, var_token)
if is_syntax or (is_variable and not self._handling_typedef):
modifiers = ['struct']
type_name = ''.join([t.name for t in name_tokens])
position = name_tokens[0]
return self._CreateVariable(position, variable.name, type_name,
modifiers, var_token.name, None)
name_tokens.extend((var_token, next_token))
self._AddBackTokens(name_tokens)
else:
self._AddBackToken(var_token)
return self._GetClass(Struct, VISIBILITY_PUBLIC, None)
def handle_union(self):
return self._GetNestedType(Union)
def handle_enum(self):
token = self._GetNextToken()
if not (token.token_type == tokenize.NAME and token.name == 'class'):
self._AddBackToken(token)
return self._GetNestedType(Enum)
def handle_auto(self):
# TODO(nnorwitz): warn about using auto? Probably not since it
# will be reclaimed and useful for C++0x.
pass
def handle_register(self):
pass
def handle_const(self):
pass
def handle_inline(self):
pass
def handle_extern(self):
pass
def handle_static(self):
pass
def handle_virtual(self):
# What follows must be a method.
token = token2 = self._GetNextToken()
if token.name == 'inline':
# HACK(nnorwitz): handle inline dtors by ignoring 'inline'.
token2 = self._GetNextToken()
if token2.token_type == tokenize.SYNTAX and token2.name == '~':
return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR, None)
assert token.token_type == tokenize.NAME or token.name == '::', token
return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') # )
return_type_and_name.insert(0, token)
if token2 is not token:
return_type_and_name.insert(1, token2)
return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL,
None, False)
def handle_volatile(self):
pass
def handle_mutable(self):
pass
def handle_public(self):
assert self.in_class
self.visibility = VISIBILITY_PUBLIC
def handle_protected(self):
assert self.in_class
self.visibility = VISIBILITY_PROTECTED
def handle_private(self):
assert self.in_class
self.visibility = VISIBILITY_PRIVATE
def handle_friend(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert tokens
t0 = tokens[0]
return Friend(t0.start, t0.end, tokens, self.namespace_stack)
def handle_static_cast(self):
pass
def handle_const_cast(self):
pass
def handle_dynamic_cast(self):
pass
def handle_reinterpret_cast(self):
pass
def handle_new(self):
pass
def handle_delete(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert tokens
return Delete(tokens[0].start, tokens[0].end, tokens)
def handle_typedef(self):
token = self._GetNextToken()
if (token.token_type == tokenize.NAME and
keywords.IsKeyword(token.name)):
# Token must be struct/enum/union/class.
method = getattr(self, 'handle_' + token.name)
self._handling_typedef = True
tokens = [method()]
self._handling_typedef = False
else:
tokens = [token]
# Get the remainder of the typedef up to the semi-colon.
tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';'))
# TODO(nnorwitz): clean all this up.
assert tokens
name = tokens.pop()
indices = name
if tokens:
indices = tokens[0]
if not indices:
indices = token
if name.name == ')':
# HACK(nnorwitz): Handle pointers to functions "properly".
if (len(tokens) >= 4 and
tokens[1].name == '(' and tokens[2].name == '*'):
tokens.append(name)
name = tokens[3]
elif name.name == ']':
# HACK(nnorwitz): Handle arrays properly.
if len(tokens) >= 2:
tokens.append(name)
name = tokens[1]
new_type = tokens
if tokens and isinstance(tokens[0], tokenize.Token):
new_type = self.converter.ToType(tokens)[0]
return Typedef(indices.start, indices.end, name.name,
new_type, self.namespace_stack)
def handle_typeid(self):
pass # Not needed yet.
def handle_typename(self):
pass # Not needed yet.
def _GetTemplatedTypes(self):
result = {}
tokens = list(self._GetMatchingChar('<', '>'))
len_tokens = len(tokens) - 1 # Ignore trailing '>'.
i = 0
while i < len_tokens:
key = tokens[i].name
i += 1
if keywords.IsKeyword(key) or key == ',':
continue
type_name = default = None
if i < len_tokens:
i += 1
if tokens[i-1].name == '=':
assert i < len_tokens, '%s %s' % (i, tokens)
default, unused_next_token = self.GetName(tokens[i:])
i += len(default)
else:
if tokens[i-1].name != ',':
# We got something like: Type variable.
# Re-adjust the key (variable) and type_name (Type).
key = tokens[i-1].name
type_name = tokens[i-2]
result[key] = (type_name, default)
return result
def handle_template(self):
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
assert token.name == '<', token
templated_types = self._GetTemplatedTypes()
# TODO(nnorwitz): for now, just ignore the template params.
token = self._GetNextToken()
if token.token_type == tokenize.NAME:
if token.name == 'class':
return self._GetClass(Class, VISIBILITY_PRIVATE, templated_types)
elif token.name == 'struct':
return self._GetClass(Struct, VISIBILITY_PUBLIC, templated_types)
elif token.name == 'friend':
return self.handle_friend()
self._AddBackToken(token)
tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
tokens.append(last)
self._AddBackTokens(tokens)
if last.name == '(':
return self.GetMethod(FUNCTION_NONE, templated_types)
# Must be a variable definition.
return None
def handle_true(self):
pass # Nothing to do.
def handle_false(self):
pass # Nothing to do.
def handle_asm(self):
pass # Not needed yet.
def handle_class(self):
return self._GetClass(Class, VISIBILITY_PRIVATE, None)
def _GetBases(self):
# Get base classes.
bases = []
while 1:
token = self._GetNextToken()
assert token.token_type == tokenize.NAME, token
# TODO(nnorwitz): store kind of inheritance...maybe.
if token.name not in ('public', 'protected', 'private'):
# If inheritance type is not specified, it is private.
# Just put the token back so we can form a name.
# TODO(nnorwitz): it would be good to warn about this.
self._AddBackToken(token)
else:
# Check for virtual inheritance.
token = self._GetNextToken()
if token.name != 'virtual':
self._AddBackToken(token)
else:
# TODO(nnorwitz): store that we got virtual for this base.
pass
base, next_token = self.GetName()
bases_ast = self.converter.ToType(base)
assert len(bases_ast) == 1, bases_ast
bases.append(bases_ast[0])
assert next_token.token_type == tokenize.SYNTAX, next_token
if next_token.name == '{':
token = next_token
break
# Support multiple inheritance.
assert next_token.name == ',', next_token
return bases, token
def _GetClass(self, class_type, visibility, templated_types):
class_name = None
class_token = self._GetNextToken()
if class_token.token_type != tokenize.NAME:
assert class_token.token_type == tokenize.SYNTAX, class_token
token = class_token
else:
# Skip any macro (e.g. storage class specifiers) after the
# 'class' keyword.
next_token = self._GetNextToken()
if next_token.token_type == tokenize.NAME:
self._AddBackToken(next_token)
else:
self._AddBackTokens([class_token, next_token])
name_tokens, token = self.GetName()
class_name = ''.join([t.name for t in name_tokens])
bases = None
if token.token_type == tokenize.SYNTAX:
if token.name == ';':
# Forward declaration.
return class_type(class_token.start, class_token.end,
class_name, None, templated_types, None,
self.namespace_stack)
if token.name in '*&':
# Inline forward declaration. Could be method or data.
name_token = self._GetNextToken()
next_token = self._GetNextToken()
if next_token.name == ';':
# Handle data
modifiers = ['class']
return self._CreateVariable(class_token, name_token.name,
class_name,
modifiers, token.name, None)
else:
# Assume this is a method.
tokens = (class_token, token, name_token, next_token)
self._AddBackTokens(tokens)
return self.GetMethod(FUNCTION_NONE, None)
if token.name == ':':
bases, token = self._GetBases()
body = None
if token.token_type == tokenize.SYNTAX and token.name == '{':
assert token.token_type == tokenize.SYNTAX, token
assert token.name == '{', token
ast = AstBuilder(self.GetScope(), self.filename, class_name,
visibility, self.namespace_stack)
body = list(ast.Generate())
if not self._handling_typedef:
token = self._GetNextToken()
if token.token_type != tokenize.NAME:
assert token.token_type == tokenize.SYNTAX, token
assert token.name == ';', token
else:
new_class = class_type(class_token.start, class_token.end,
class_name, bases, None,
body, self.namespace_stack)
modifiers = []
return self._CreateVariable(class_token,
token.name, new_class,
modifiers, token.name, None)
else:
if not self._handling_typedef:
self.HandleError('non-typedef token', token)
self._AddBackToken(token)
return class_type(class_token.start, class_token.end, class_name,
bases, templated_types, body, self.namespace_stack)
def handle_namespace(self):
token = self._GetNextToken()
# Support anonymous namespaces.
name = None
if token.token_type == tokenize.NAME:
name = token.name
token = self._GetNextToken()
self.namespace_stack.append(name)
assert token.token_type == tokenize.SYNTAX, token
# Create an internal token that denotes when the namespace is complete.
internal_token = tokenize.Token(_INTERNAL_TOKEN, _NAMESPACE_POP,
None, None)
internal_token.whence = token.whence
if token.name == '=':
# TODO(nnorwitz): handle aliasing namespaces.
name, next_token = self.GetName()
assert next_token.name == ';', next_token
self._AddBackToken(internal_token)
else:
assert token.name == '{', token
tokens = list(self.GetScope())
# Replace the trailing } with the internal namespace pop token.
tokens[-1] = internal_token
# Handle namespace with nothing in it.
self._AddBackTokens(tokens)
return None
def handle_using(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert tokens
return Using(tokens[0].start, tokens[0].end, tokens)
def handle_explicit(self):
assert self.in_class
# Nothing much to do.
# TODO(nnorwitz): maybe verify the method name == class name.
# This must be a ctor.
return self.GetMethod(FUNCTION_CTOR, None)
def handle_this(self):
pass # Nothing to do.
def handle_operator(self):
# Pull off the next token(s?) and make that part of the method name.
pass
def handle_sizeof(self):
pass
def handle_case(self):
pass
def handle_switch(self):
pass
def handle_default(self):
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX
assert token.name == ':'
def handle_if(self):
pass
def handle_else(self):
pass
def handle_return(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
if not tokens:
return Return(self.current_token.start, self.current_token.end, None)
return Return(tokens[0].start, tokens[0].end, tokens)
def handle_goto(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert len(tokens) == 1, str(tokens)
return Goto(tokens[0].start, tokens[0].end, tokens[0].name)
def handle_try(self):
pass # Not needed yet.
def handle_catch(self):
pass # Not needed yet.
def handle_throw(self):
pass # Not needed yet.
def handle_while(self):
pass
def handle_do(self):
pass
def handle_for(self):
pass
def handle_break(self):
self._IgnoreUpTo(tokenize.SYNTAX, ';')
def handle_continue(self):
self._IgnoreUpTo(tokenize.SYNTAX, ';')
def BuilderFromSource(source, filename):
"""Utility method that returns an AstBuilder from source code.
Args:
source: 'C++ source code'
filename: 'file1'
Returns:
AstBuilder
"""
return AstBuilder(tokenize.GetTokens(source), filename)
def PrintIndentifiers(filename, should_print):
"""Prints all identifiers for a C++ source file.
Args:
filename: 'file1'
should_print: predicate with signature: bool Function(token)
"""
source = utils.ReadFile(filename, False)
if source is None:
sys.stderr.write('Unable to find: %s\n' % filename)
return
#print('Processing %s' % actual_filename)
builder = BuilderFromSource(source, filename)
try:
for node in builder.Generate():
if should_print(node):
print(node.name)
except KeyboardInterrupt:
return
except:
pass
def PrintAllIndentifiers(filenames, should_print):
"""Prints all identifiers for each C++ source file in filenames.
Args:
filenames: ['file1', 'file2', ...]
should_print: predicate with signature: bool Function(token)
"""
for path in filenames:
PrintIndentifiers(path, should_print)
def main(argv):
for filename in argv[1:]:
source = utils.ReadFile(filename)
if source is None:
continue
print('Processing %s' % filename)
builder = BuilderFromSource(source, filename)
try:
entire_ast = filter(None, builder.Generate())
except KeyboardInterrupt:
return
except:
# Already printed a warning, print the traceback and continue.
traceback.print_exc()
else:
if utils.DEBUG:
for ast in entire_ast:
print(ast)
if __name__ == '__main__':
main(sys.argv)
| mit |
hastexo/edx-platform | openedx/core/djangoapps/theming/tests/test_theme_style_overrides.py | 4 | 9779 | """
Tests for comprehensive themes.
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.contrib import staticfiles
from openedx.core.djangoapps.theming.tests.test_util import with_comprehensive_theme
from openedx.core.djangolib.testing.utils import skip_unless_cms, skip_unless_lms
from student.tests.factories import UserFactory
@skip_unless_lms
class TestComprehensiveThemeLMS(TestCase):
"""
Test html, sass and static file overrides for comprehensive themes.
"""
def setUp(self):
"""
Clear static file finders cache and register cleanup methods.
"""
super(TestComprehensiveThemeLMS, self).setUp()
self.user = UserFactory()
# Clear the internal staticfiles caches, to get test isolation.
staticfiles.finders.get_finder.cache_clear()
def _login(self):
""" Log into LMS. """
self.client.login(username=self.user.username, password='test')
@with_comprehensive_theme("test-theme")
def test_footer(self):
"""
Test that theme footer is used instead of default footer.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
# This string comes from header.html of test-theme
self.assertContains(resp, "This is a footer for test-theme.")
@with_comprehensive_theme("edx.org")
def test_account_settings_hide_nav(self):
"""
Test that theme header doesn't show marketing site links for Account Settings page.
"""
self._login()
account_settings_url = reverse('account_settings')
response = self.client.get(account_settings_url)
# Verify that the header navigation links are hidden for the edx.org version
self.assertNotContains(response, "How it Works")
self.assertNotContains(response, "Find courses")
self.assertNotContains(response, "Schools & Partners")
@with_comprehensive_theme("test-theme")
def test_logo_image(self):
"""
Test that theme logo is used instead of default logo.
"""
result = staticfiles.finders.find('test-theme/images/logo.png')
self.assertEqual(result, settings.TEST_THEME / 'lms/static/images/logo.png')
@with_comprehensive_theme("test-theme")
def test_override_block_in_parent(self):
"""
Test that theme title is used instead of parent title.
"""
self._login()
dashboard_url = reverse('dashboard')
resp = self.client.get(dashboard_url)
self.assertEqual(resp.status_code, 200)
# This string comes from the 'pagetitle' block of the overriding theme.
self.assertContains(resp, "Overridden Title!")
@with_comprehensive_theme("test-theme")
def test_override_block_in_grandparent(self):
"""
Test that theme title is used instead of parent's parent's title.
"""
self._login()
dashboard_url = reverse('dashboard')
resp = self.client.get(dashboard_url)
self.assertEqual(resp.status_code, 200)
# This string comes from the 'bodyextra' block of the overriding theme.
self.assertContains(resp, "Overriden Body Extra!")
@with_comprehensive_theme("test-theme")
def test_parent_content_in_self_inherited_template(self):
"""
Test that parent's body is present in self inherited template.
"""
self._login()
dashboard_url = reverse('dashboard')
resp = self.client.get(dashboard_url)
self.assertEqual(resp.status_code, 200)
# This string comes from the default dashboard.html template.
self.assertContains(resp, "Explore courses")
@with_comprehensive_theme("test-theme")
def test_include_default_template(self):
"""
Test that theme template can include template which is not part of the theme.
"""
self._login()
courses_url = reverse('courses')
resp = self.client.get(courses_url)
self.assertEqual(resp.status_code, 200)
# The courses.html template includes the error-message.html template.
# Verify that the error message is included in the output.
self.assertContains(resp, "this module is temporarily unavailable")
@with_comprehensive_theme("test-theme")
def test_include_overridden_template(self):
"""
Test that theme template can include template which is overridden in the active theme.
"""
self._login()
courses_url = reverse('courses')
resp = self.client.get(courses_url)
self.assertEqual(resp.status_code, 200)
# The courses.html template includes the info.html file, which is overriden in the theme.
self.assertContains(resp, "This overrides the courseware/info.html template.")
@with_comprehensive_theme("test-theme")
def test_include_custom_template(self):
"""
Test that theme template can include template which is only present in the theme, but has no standard LMS
equivalent.
"""
self._login()
courses_url = reverse('courses')
resp = self.client.get(courses_url)
self.assertEqual(resp.status_code, 200)
# The courses.html template includes the test-theme.custom.html file.
# Verify its contents are present in the output.
self.assertContains(resp, "This is a custom template.")
@skip_unless_cms
class TestComprehensiveThemeCMS(TestCase):
"""
Test html, sass and static file overrides for comprehensive themes.
"""
def setUp(self):
"""
Clear static file finders cache and register cleanup methods.
"""
super(TestComprehensiveThemeCMS, self).setUp()
# Clear the internal staticfiles caches, to get test isolation.
staticfiles.finders.get_finder.cache_clear()
@with_comprehensive_theme("test-theme")
def test_template_override(self):
"""
Test that theme templates are used instead of default templates.
"""
resp = self.client.get('/signin')
self.assertEqual(resp.status_code, 200)
# This string comes from login.html of test-theme
self.assertContains(resp, "Login Page override for test-theme.")
@skip_unless_lms
class TestComprehensiveThemeDisabledLMS(TestCase):
"""
Test Sass compilation order and sass overrides for comprehensive themes.
"""
def setUp(self):
"""
Clear static file finders cache.
"""
super(TestComprehensiveThemeDisabledLMS, self).setUp()
# Clear the internal staticfiles caches, to get test isolation.
staticfiles.finders.get_finder.cache_clear()
def test_logo(self):
"""
Test that default logo is picked in case of no comprehensive theme.
"""
result = staticfiles.finders.find('images/logo.png')
self.assertEqual(result, settings.REPO_ROOT / 'lms/static/images/logo.png')
@skip_unless_cms
class TestComprehensiveThemeDisabledCMS(TestCase):
"""
Test default html, sass and static file when no theme is applied.
"""
def setUp(self):
"""
Clear static file finders cache and register cleanup methods.
"""
super(TestComprehensiveThemeDisabledCMS, self).setUp()
# Clear the internal staticfiles caches, to get test isolation.
staticfiles.finders.get_finder.cache_clear()
def test_template_override(self):
"""
Test that defaults templates are used when no theme is applied.
"""
resp = self.client.get('/signin')
self.assertEqual(resp.status_code, 200)
self.assertNotContains(resp, "Login Page override for test-theme.")
@skip_unless_lms
class TestStanfordTheme(TestCase):
"""
Test html, sass and static file overrides for stanford theme.
These tests are added to ensure expected behavior after USE_CUSTOM_THEME is removed and
a new theme 'stanford-style' is added instead.
"""
def setUp(self):
"""
Clear static file finders cache and register cleanup methods.
"""
super(TestStanfordTheme, self).setUp()
# Clear the internal staticfiles caches, to get test isolation.
staticfiles.finders.get_finder.cache_clear()
@with_comprehensive_theme("stanford-style")
def test_footer(self):
"""
Test stanford theme footer.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
# This string comes from header.html of test-theme
self.assertContains(resp, "footer overrides for stanford theme go here")
@with_comprehensive_theme("stanford-style")
def test_logo_image(self):
"""
Test custom logo.
"""
result = staticfiles.finders.find('stanford-style/images/logo.png')
self.assertEqual(result, settings.REPO_ROOT / 'themes/stanford-style/lms/static/images/logo.png')
@with_comprehensive_theme("stanford-style")
def test_favicon_image(self):
"""
Test correct favicon for custom theme.
"""
result = staticfiles.finders.find('stanford-style/images/favicon.ico')
self.assertEqual(result, settings.REPO_ROOT / 'themes/stanford-style/lms/static/images/favicon.ico')
@with_comprehensive_theme("stanford-style")
def test_index_page(self):
"""
Test custom theme overrides for index page.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
# This string comes from header.html of test-theme
self.assertContains(resp, "Free courses from <strong>Stanford</strong>")
| agpl-3.0 |
aliyun/oss-ftp | python27/win32/Lib/encodings/shift_jis_2004.py | 816 | 1059 | #
# shift_jis_2004.py: Python Unicode Codec for SHIFT_JIS_2004
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('shift_jis_2004')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='shift_jis_2004',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
phobson/statsmodels | statsmodels/miscmodels/count.py | 29 | 10838 | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 26 08:34:59 2010
Author: josef-pktd
changes:
added offset and zero-inflated version of Poisson
- kind of ok, need better test cases,
- a nan in ZIP bse, need to check hessian calculations
- found error in ZIP loglike
- all tests pass with
Issues
------
* If true model is not zero-inflated then numerical Hessian for ZIP has zeros
for the inflation probability and is not invertible.
-> hessian inverts and bse look ok if row and column are dropped, pinv also works
* GenericMLE: still get somewhere (where?)
"CacheWriteWarning: The attribute 'bse' cannot be overwritten"
* bfgs is too fragile, doesn't come back
* `nm` is slow but seems to work
* need good start_params and their use in genericmle needs to be checked for
consistency, set as attribute or method (called as attribute)
* numerical hessian needs better scaling
* check taking parts out of the loop, e.g. factorial(endog) could be precalculated
"""
from __future__ import print_function
import numpy as np
from scipy import stats
from scipy.misc import factorial
import statsmodels.api as sm
from statsmodels.base.model import GenericLikelihoodModel
def maxabs(arr1, arr2):
return np.max(np.abs(arr1 - arr2))
def maxabsrel(arr1, arr2):
return np.max(np.abs(arr2 / arr1 - 1))
class NonlinearDeltaCov(object):
'''Asymptotic covariance by Deltamethod
the function is designed for 2d array, with rows equal to
the number of equations and columns equal to the number
of parameters. 1d params work by chance ?
fun: R^{m*k) -> R^{m} where m is number of equations and k is
the number of parameters.
equations follow Greene
'''
def __init__(self, fun, params, cov_params):
self.fun = fun
self.params = params
self.cov_params = cov_params
def grad(self, params=None, **kwds):
if params is None:
params = self.params
kwds.setdefault('epsilon', 1e-4)
from statsmodels.tools.numdiff import approx_fprime
return approx_fprime(params, self.fun, **kwds)
def cov(self):
g = self.grad()
covar = np.dot(np.dot(g, self.cov_params), g.T)
return covar
def expected(self):
# rename: misnomer, this is the MLE of the fun
return self.fun(self.params)
def wald(self, value):
m = self.expected()
v = self.cov()
df = np.size(m)
diff = m - value
lmstat = np.dot(np.dot(diff.T, np.linalg.inv(v)), diff)
return lmstat, stats.chi2.sf(lmstat, df)
class PoissonGMLE(GenericLikelihoodModel):
'''Maximum Likelihood Estimation of Poisson Model
This is an example for generic MLE which has the same
statistical model as discretemod.Poisson.
Except for defining the negative log-likelihood method, all
methods and results are generic. Gradients and Hessian
and all resulting statistics are based on numerical
differentiation.
'''
# copied from discretemod.Poisson
def nloglikeobs(self, params):
"""
Loglikelihood of Poisson model
Parameters
----------
params : array-like
The parameters of the model.
Returns
-------
The log likelihood of the model evaluated at `params`
Notes
--------
.. math :: \\ln L=\\sum_{i=1}^{n}\\left[-\\lambda_{i}+y_{i}x_{i}^{\\prime}\\beta-\\ln y_{i}!\\right]
"""
XB = np.dot(self.exog, params)
endog = self.endog
return np.exp(XB) - endog*XB + np.log(factorial(endog))
def predict_distribution(self, exog):
'''return frozen scipy.stats distribution with mu at estimated prediction
'''
if not hasattr(self, result):
raise ValueError
else:
mu = np.exp(np.dot(exog, params))
return stats.poisson(mu, loc=0)
class PoissonOffsetGMLE(GenericLikelihoodModel):
'''Maximum Likelihood Estimation of Poisson Model
This is an example for generic MLE which has the same
statistical model as discretemod.Poisson but adds offset
Except for defining the negative log-likelihood method, all
methods and results are generic. Gradients and Hessian
and all resulting statistics are based on numerical
differentiation.
'''
def __init__(self, endog, exog=None, offset=None, missing='none', **kwds):
# let them be none in case user wants to use inheritance
if not offset is None:
if offset.ndim == 1:
offset = offset[:,None] #need column
self.offset = offset.ravel()
else:
self.offset = 0.
super(PoissonOffsetGMLE, self).__init__(endog, exog, missing=missing,
**kwds)
#this was added temporarily for bug-hunting, but shouldn't be needed
# def loglike(self, params):
# return -self.nloglikeobs(params).sum(0)
# original copied from discretemod.Poisson
def nloglikeobs(self, params):
"""
Loglikelihood of Poisson model
Parameters
----------
params : array-like
The parameters of the model.
Returns
-------
The log likelihood of the model evaluated at `params`
Notes
--------
.. math :: \\ln L=\\sum_{i=1}^{n}\\left[-\\lambda_{i}+y_{i}x_{i}^{\\prime}\\beta-\\ln y_{i}!\\right]
"""
XB = self.offset + np.dot(self.exog, params)
endog = self.endog
nloglik = np.exp(XB) - endog*XB + np.log(factorial(endog))
return nloglik
class PoissonZiGMLE(GenericLikelihoodModel):
'''Maximum Likelihood Estimation of Poisson Model
This is an example for generic MLE which has the same statistical model
as discretemod.Poisson but adds offset and zero-inflation.
Except for defining the negative log-likelihood method, all
methods and results are generic. Gradients and Hessian
and all resulting statistics are based on numerical
differentiation.
There are numerical problems if there is no zero-inflation.
'''
def __init__(self, endog, exog=None, offset=None, missing='none', **kwds):
# let them be none in case user wants to use inheritance
super(PoissonZiGMLE, self).__init__(endog, exog, missing=missing,
**kwds)
if not offset is None:
if offset.ndim == 1:
offset = offset[:,None] #need column
self.offset = offset.ravel() #which way?
else:
self.offset = 0.
#TODO: it's not standard pattern to use default exog
if exog is None:
self.exog = np.ones((self.nobs,1))
self.nparams = self.exog.shape[1]
#what's the shape in regression for exog if only constant
self.start_params = np.hstack((np.ones(self.nparams), 0))
self.cloneattr = ['start_params']
#needed for t_test and summary
self.exog_names.append('zi')
# original copied from discretemod.Poisson
def nloglikeobs(self, params):
"""
Loglikelihood of Poisson model
Parameters
----------
params : array-like
The parameters of the model.
Returns
-------
The log likelihood of the model evaluated at `params`
Notes
--------
.. math :: \\ln L=\\sum_{i=1}^{n}\\left[-\\lambda_{i}+y_{i}x_{i}^{\\prime}\\beta-\\ln y_{i}!\\right]
"""
beta = params[:-1]
gamm = 1 / (1 + np.exp(params[-1])) #check this
# replace with np.dot(self.exogZ, gamma)
#print(np.shape(self.offset), self.exog.shape, beta.shape
XB = self.offset + np.dot(self.exog, beta)
endog = self.endog
nloglik = -np.log(1-gamm) + np.exp(XB) - endog*XB + np.log(factorial(endog))
nloglik[endog==0] = - np.log(gamm + np.exp(-nloglik[endog==0]))
return nloglik
if __name__ == '__main__':
#Example:
np.random.seed(98765678)
nobs = 1000
rvs = np.random.randn(nobs,6)
data_exog = rvs
data_exog = sm.add_constant(data_exog, prepend=False)
xbeta = 1 + 0.1*rvs.sum(1)
data_endog = np.random.poisson(np.exp(xbeta))
#print(data_endog
modp = MyPoisson(data_endog, data_exog)
resp = modp.fit()
print(resp.params)
print(resp.bse)
from statsmodels.discretemod import Poisson
resdp = Poisson(data_endog, data_exog).fit()
print('\ncompare with discretemod')
print('compare params')
print(resdp.params - resp.params)
print('compare bse')
print(resdp.bse - resp.bse)
gmlp = sm.GLM(data_endog, data_exog, family=sm.families.Poisson())
resgp = gmlp.fit()
''' this creates a warning, bug bse is double defined ???
c:\josef\eclipsegworkspace\statsmodels-josef-experimental-gsoc\scikits\statsmodels\decorators.py:105: CacheWriteWarning: The attribute 'bse' cannot be overwritten
warnings.warn(errmsg, CacheWriteWarning)
'''
print('\ncompare with GLM')
print('compare params')
print(resgp.params - resp.params)
print('compare bse')
print(resgp.bse - resp.bse)
lam = np.exp(np.dot(data_exog, resp.params))
'''mean of Poisson distribution'''
predmean = stats.poisson.stats(lam,moments='m')
print(np.max(np.abs(predmean - lam)))
fun = lambda params: np.exp(np.dot(data_exog.mean(0), params))
lamcov = NonlinearDeltaCov(fun, resp.params, resdp.cov_params())
print(lamcov.cov().shape)
print(lamcov.cov())
print('analytical')
xm = data_exog.mean(0)
print(np.dot(np.dot(xm, resdp.cov_params()), xm.T) * \
np.exp(2*np.dot(data_exog.mean(0), resp.params)))
''' cov of linear transformation of params
>>> np.dot(np.dot(xm, resdp.cov_params()), xm.T)
0.00038904130127582825
>>> resp.cov_params(xm)
0.00038902428119179394
>>> np.dot(np.dot(xm, resp.cov_params()), xm.T)
0.00038902428119179394
'''
print(lamcov.wald(1.))
print(lamcov.wald(2.))
print(lamcov.wald(2.6))
do_bootstrap = False
if do_bootstrap:
m,s,r = resp.bootstrap(method='newton')
print(m)
print(s)
print(resp.bse)
print('\ncomparison maxabs, masabsrel')
print('discr params', maxabs(resdp.params, resp.params), maxabsrel(resdp.params, resp.params))
print('discr bse ', maxabs(resdp.bse, resp.bse), maxabsrel(resdp.bse, resp.bse))
print('discr bsejac', maxabs(resdp.bse, resp.bsejac), maxabsrel(resdp.bse, resp.bsejac))
print('discr bsejhj', maxabs(resdp.bse, resp.bsejhj), maxabsrel(resdp.bse, resp.bsejhj))
print('')
print('glm params ', maxabs(resdp.params, resp.params), maxabsrel(resdp.params, resp.params))
print('glm bse ', maxabs(resdp.bse, resp.bse), maxabsrel(resdp.bse, resp.bse))
| bsd-3-clause |
petteyg/intellij-community | python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py | 326 | 2107 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer that changes filter(F, X) into list(filter(F, X)).
We avoid the transformation if the filter() call is directly contained
in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
for V in <>:.
NOTE: This is still not correct if the original code was depending on
filter(F, X) to return a string if X is a string and a tuple if X is a
tuple. That would require type inference, which we don't do. Let
Python 2.6 figure it out.
"""
# Local imports
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, ListComp, in_special_context
class FixFilter(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = """
filter_lambda=power<
'filter'
trailer<
'('
arglist<
lambdef< 'lambda'
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
>
','
it=any
>
')'
>
>
|
power<
'filter'
trailer< '(' arglist< none='None' ',' seq=any > ')' >
>
|
power<
'filter'
args=trailer< '(' [any] ')' >
>
"""
skip_on = "future_builtins.filter"
def transform(self, node, results):
if self.should_skip(node):
return
if "filter_lambda" in results:
new = ListComp(results.get("fp").clone(),
results.get("fp").clone(),
results.get("it").clone(),
results.get("xp").clone())
elif "none" in results:
new = ListComp(Name(u"_f"),
Name(u"_f"),
results["seq"].clone(),
Name(u"_f"))
else:
if in_special_context(node):
return None
new = node.clone()
new.prefix = u""
new = Call(Name(u"list"), [new])
new.prefix = node.prefix
return new
| apache-2.0 |
rgeleta/odoo | addons/l10n_th/__init__.py | 893 | 1045 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
luceatnobis/youtube-dl | youtube_dl/extractor/adobetv.py | 52 | 7359 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
parse_duration,
unified_strdate,
str_to_int,
int_or_none,
float_or_none,
ISO639Utils,
determine_ext,
)
class AdobeTVBaseIE(InfoExtractor):
_API_BASE_URL = 'http://tv.adobe.com/api/v4/'
class AdobeTVIE(AdobeTVBaseIE):
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?watch/(?P<show_urlname>[^/]+)/(?P<id>[^/]+)'
_TEST = {
'url': 'http://tv.adobe.com/watch/the-complete-picture-with-julieanne-kost/quick-tip-how-to-draw-a-circle-around-an-object-in-photoshop/',
'md5': '9bc5727bcdd55251f35ad311ca74fa1e',
'info_dict': {
'id': '10981',
'ext': 'mp4',
'title': 'Quick Tip - How to Draw a Circle Around an Object in Photoshop',
'description': 'md5:99ec318dc909d7ba2a1f2b038f7d2311',
'thumbnail': r're:https?://.*\.jpg$',
'upload_date': '20110914',
'duration': 60,
'view_count': int,
},
}
def _real_extract(self, url):
language, show_urlname, urlname = re.match(self._VALID_URL, url).groups()
if not language:
language = 'en'
video_data = self._download_json(
self._API_BASE_URL + 'episode/get/?language=%s&show_urlname=%s&urlname=%s&disclosure=standard' % (language, show_urlname, urlname),
urlname)['data'][0]
formats = [{
'url': source['url'],
'format_id': source.get('quality_level') or source['url'].split('-')[-1].split('.')[0] or None,
'width': int_or_none(source.get('width')),
'height': int_or_none(source.get('height')),
'tbr': int_or_none(source.get('video_data_rate')),
} for source in video_data['videos']]
self._sort_formats(formats)
return {
'id': compat_str(video_data['id']),
'title': video_data['title'],
'description': video_data.get('description'),
'thumbnail': video_data.get('thumbnail'),
'upload_date': unified_strdate(video_data.get('start_date')),
'duration': parse_duration(video_data.get('duration')),
'view_count': str_to_int(video_data.get('playcount')),
'formats': formats,
}
class AdobeTVPlaylistBaseIE(AdobeTVBaseIE):
def _parse_page_data(self, page_data):
return [self.url_result(self._get_element_url(element_data)) for element_data in page_data]
def _extract_playlist_entries(self, url, display_id):
page = self._download_json(url, display_id)
entries = self._parse_page_data(page['data'])
for page_num in range(2, page['paging']['pages'] + 1):
entries.extend(self._parse_page_data(
self._download_json(url + '&page=%d' % page_num, display_id)['data']))
return entries
class AdobeTVShowIE(AdobeTVPlaylistBaseIE):
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?show/(?P<id>[^/]+)'
_TEST = {
'url': 'http://tv.adobe.com/show/the-complete-picture-with-julieanne-kost',
'info_dict': {
'id': '36',
'title': 'The Complete Picture with Julieanne Kost',
'description': 'md5:fa50867102dcd1aa0ddf2ab039311b27',
},
'playlist_mincount': 136,
}
def _get_element_url(self, element_data):
return element_data['urls'][0]
def _real_extract(self, url):
language, show_urlname = re.match(self._VALID_URL, url).groups()
if not language:
language = 'en'
query = 'language=%s&show_urlname=%s' % (language, show_urlname)
show_data = self._download_json(self._API_BASE_URL + 'show/get/?%s' % query, show_urlname)['data'][0]
return self.playlist_result(
self._extract_playlist_entries(self._API_BASE_URL + 'episode/?%s' % query, show_urlname),
compat_str(show_data['id']),
show_data['show_name'],
show_data['show_description'])
class AdobeTVChannelIE(AdobeTVPlaylistBaseIE):
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?channel/(?P<id>[^/]+)(?:/(?P<category_urlname>[^/]+))?'
_TEST = {
'url': 'http://tv.adobe.com/channel/development',
'info_dict': {
'id': 'development',
},
'playlist_mincount': 96,
}
def _get_element_url(self, element_data):
return element_data['url']
def _real_extract(self, url):
language, channel_urlname, category_urlname = re.match(self._VALID_URL, url).groups()
if not language:
language = 'en'
query = 'language=%s&channel_urlname=%s' % (language, channel_urlname)
if category_urlname:
query += '&category_urlname=%s' % category_urlname
return self.playlist_result(
self._extract_playlist_entries(self._API_BASE_URL + 'show/?%s' % query, channel_urlname),
channel_urlname)
class AdobeTVVideoIE(InfoExtractor):
_VALID_URL = r'https?://video\.tv\.adobe\.com/v/(?P<id>\d+)'
_TEST = {
# From https://helpx.adobe.com/acrobat/how-to/new-experience-acrobat-dc.html?set=acrobat--get-started--essential-beginners
'url': 'https://video.tv.adobe.com/v/2456/',
'md5': '43662b577c018ad707a63766462b1e87',
'info_dict': {
'id': '2456',
'ext': 'mp4',
'title': 'New experience with Acrobat DC',
'description': 'New experience with Acrobat DC',
'duration': 248.667,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_data = self._parse_json(self._search_regex(
r'var\s+bridge\s*=\s*([^;]+);', webpage, 'bridged data'), video_id)
formats = [{
'format_id': '%s-%s' % (determine_ext(source['src']), source.get('height')),
'url': source['src'],
'width': int_or_none(source.get('width')),
'height': int_or_none(source.get('height')),
'tbr': int_or_none(source.get('bitrate')),
} for source in video_data['sources']]
self._sort_formats(formats)
# For both metadata and downloaded files the duration varies among
# formats. I just pick the max one
duration = max(filter(None, [
float_or_none(source.get('duration'), scale=1000)
for source in video_data['sources']]))
subtitles = {}
for translation in video_data.get('translations', []):
lang_id = translation.get('language_w3c') or ISO639Utils.long2short(translation['language_medium'])
if lang_id not in subtitles:
subtitles[lang_id] = []
subtitles[lang_id].append({
'url': translation['vttPath'],
'ext': 'vtt',
})
return {
'id': video_id,
'formats': formats,
'title': video_data['title'],
'description': video_data.get('description'),
'thumbnail': video_data['video'].get('poster'),
'duration': duration,
'subtitles': subtitles,
}
| unlicense |
NvanAdrichem/networkx | networkx/algorithms/connectivity/tests/test_cuts.py | 3 | 10824 | from nose.tools import assert_equal, assert_true, assert_false, assert_raises
import networkx as nx
from networkx.algorithms.connectivity import (minimum_st_edge_cut,
minimum_st_node_cut)
from networkx.algorithms.flow import (edmonds_karp, preflow_push,
shortest_augmenting_path)
from networkx.utils import arbitrary_element
flow_funcs = [edmonds_karp, preflow_push, shortest_augmenting_path]
msg = "Assertion failed in function: {0}"
# Tests for node and edge cutsets
def _generate_no_biconnected(max_attempts=50):
attempts = 0
while True:
G = nx.fast_gnp_random_graph(100,0.0575)
if nx.is_connected(G) and not nx.is_biconnected(G):
attempts = 0
yield G
else:
if attempts >= max_attempts:
msg = "Tried %d times: no suitable Graph."%attempts
raise Exception(msg % max_attempts)
else:
attempts += 1
def test_articulation_points():
Ggen = _generate_no_biconnected()
for flow_func in flow_funcs:
for i in range(3):
G = next(Ggen)
cut = nx.minimum_node_cut(G, flow_func=flow_func)
assert_true(len(cut) == 1, msg=msg.format(flow_func.__name__))
assert_true(cut.pop() in set(nx.articulation_points(G)),
msg=msg.format(flow_func.__name__))
def test_brandes_erlebach_book():
# Figure 1 chapter 7: Connectivity
# http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
G = nx.Graph()
G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4),
(3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8),
(7, 10), (8, 11), (9, 10), (9, 11), (10, 11)])
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cutsets
assert_equal(3, len(nx.minimum_edge_cut(G, 1, 11, **kwargs)),
msg=msg.format(flow_func.__name__))
edge_cut = nx.minimum_edge_cut(G, **kwargs)
# Node 5 has only two edges
assert_equal(2, len(edge_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(edge_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
# node cuts
assert_equal(set([6, 7]), minimum_st_node_cut(G, 1, 11, **kwargs),
msg=msg.format(flow_func.__name__))
assert_equal(set([6, 7]), nx.minimum_node_cut(G, 1, 11, **kwargs),
msg=msg.format(flow_func.__name__))
node_cut = nx.minimum_node_cut(G, **kwargs)
assert_equal(2, len(node_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
def test_white_harary_paper():
# Figure 1b white and harary (2001)
# http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF
# A graph with high adhesion (edge connectivity) and low cohesion
# (node connectivity)
G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
G.remove_node(7)
for i in range(4,7):
G.add_edge(0,i)
G = nx.disjoint_union(G, nx.complete_graph(4))
G.remove_node(G.order()-1)
for i in range(7,10):
G.add_edge(0,i)
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
edge_cut = nx.minimum_edge_cut(G, **kwargs)
assert_equal(3, len(edge_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(edge_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
# node cuts
node_cut = nx.minimum_node_cut(G, **kwargs)
assert_equal(set([0]), node_cut, msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
def test_petersen_cutset():
G = nx.petersen_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
edge_cut = nx.minimum_edge_cut(G, **kwargs)
assert_equal(3, len(edge_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(edge_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
# node cuts
node_cut = nx.minimum_node_cut(G, **kwargs)
assert_equal(3, len(node_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
def test_octahedral_cutset():
G=nx.octahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
edge_cut = nx.minimum_edge_cut(G, **kwargs)
assert_equal(4, len(edge_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(edge_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
# node cuts
node_cut = nx.minimum_node_cut(G, **kwargs)
assert_equal(4, len(node_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
def test_icosahedral_cutset():
G=nx.icosahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
edge_cut = nx.minimum_edge_cut(G, **kwargs)
assert_equal(5, len(edge_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(edge_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
# node cuts
node_cut = nx.minimum_node_cut(G, **kwargs)
assert_equal(5, len(node_cut), msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
def test_node_cutset_exception():
G=nx.Graph()
G.add_edges_from([(1, 2), (3, 4)])
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.minimum_node_cut, G, flow_func=flow_func)
def test_node_cutset_random_graphs():
for flow_func in flow_funcs:
for i in range(3):
G = nx.fast_gnp_random_graph(50, 0.25)
if not nx.is_connected(G):
ccs = iter(nx.connected_components(G))
start = arbitrary_element(next(ccs))
G.add_edges_from((start, arbitrary_element(c)) for c in ccs)
cutset = nx.minimum_node_cut(G, flow_func=flow_func)
assert_equal(nx.node_connectivity(G), len(cutset),
msg=msg.format(flow_func.__name__))
G.remove_nodes_from(cutset)
assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__))
def test_edge_cutset_random_graphs():
for flow_func in flow_funcs:
for i in range(3):
G = nx.fast_gnp_random_graph(50, 0.25)
if not nx.is_connected(G):
ccs = iter(nx.connected_components(G))
start = arbitrary_element(next(ccs))
G.add_edges_from((start, arbitrary_element(c)) for c in ccs)
cutset = nx.minimum_edge_cut(G, flow_func=flow_func)
assert_equal(nx.edge_connectivity(G), len(cutset),
msg=msg.format(flow_func.__name__))
G.remove_edges_from(cutset)
assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__))
def test_empty_graphs():
G = nx.Graph()
D = nx.DiGraph()
for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
for flow_func in flow_funcs:
assert_raises(nx.NetworkXPointlessConcept, interface_func, G,
flow_func=flow_func)
assert_raises(nx.NetworkXPointlessConcept, interface_func, D,
flow_func=flow_func)
def test_unbounded():
G = nx.complete_graph(5)
for flow_func in flow_funcs:
assert_equal(4, len(minimum_st_edge_cut(G, 1, 4, flow_func=flow_func)))
def test_missing_source():
G = nx.path_graph(4)
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, interface_func, G, 10, 1,
flow_func=flow_func)
def test_missing_target():
G = nx.path_graph(4)
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, interface_func, G, 1, 10,
flow_func=flow_func)
def test_not_weakly_connected():
G = nx.DiGraph()
nx.add_path(G, [1, 2, 3])
nx.add_path(G, [4, 5])
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, interface_func, G,
flow_func=flow_func)
def test_not_connected():
G = nx.Graph()
nx.add_path(G, [1, 2, 3])
nx.add_path(G, [4, 5])
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, interface_func, G,
flow_func=flow_func)
def tests_min_cut_complete():
G = nx.complete_graph(5)
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_equal(4, len(interface_func(G, flow_func=flow_func)))
def tests_min_cut_complete_directed():
G = nx.complete_graph(5)
G = G.to_directed()
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_equal(4, len(interface_func(G, flow_func=flow_func)))
def tests_minimum_st_node_cut():
G = nx.Graph()
G.add_nodes_from([0, 1, 2, 3, 7, 8, 11, 12])
G.add_edges_from([(7, 11), (1, 11), (1, 12), (12, 8), (0, 1)])
nodelist = minimum_st_node_cut(G, 7, 11)
assert(nodelist == [])
def test_invalid_auxiliary():
G = nx.complete_graph(5)
assert_raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3,
auxiliary=G)
def test_interface_only_source():
G = nx.complete_graph(5)
for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
assert_raises(nx.NetworkXError, interface_func, G, s=0)
def test_interface_only_target():
G = nx.complete_graph(5)
for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
assert_raises(nx.NetworkXError, interface_func, G, t=3)
| bsd-3-clause |
OpenVnmrJ/OpenVnmrJ | src/biopack/myShutil.py | 4 | 1908 | import os.path
import shutil
import os, errno
def copytree(src, dest, symlinks=False):
"""My own copyTree which does not fail if the directory exists.
Recursively copy a directory tree using copy2().
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
Behavior is meant to be identical to GNU 'cp -R'.
"""
def copyItems(src, dest, symlinks=False):
"""Function that does all the work.
It is necessary to handle the two 'cp' cases:
- destination does exist
- destination does not exist
See 'cp -R' documentation for more details
"""
for item in os.listdir(src):
srcPath = os.path.join(src, item)
if os.path.isdir(srcPath):
srcBasename = os.path.basename(srcPath)
destDirPath = os.path.join(dest, srcBasename)
if not os.path.exists(destDirPath):
os.makedirs(destDirPath)
copyItems(srcPath, destDirPath)
elif os.path.islink(item) and symlinks:
linkto = os.readlink(item)
os.symlink(linkto, dest)
else:
shutil.copy2(srcPath, dest)
# case 'cp -R src/ dest/' where dest/ already exists
if os.path.exists(dest):
destPath = os.path.join(dest, os.path.basename(src))
if not os.path.exists(destPath):
os.makedirs(destPath)
# case 'cp -R src/ dest/' where dest/ does not exist
else:
os.makedirs(dest)
destPath = dest
# actually copy the files
copyItems(src, destPath)
| apache-2.0 |
jhuckabee/camlistore | lib/python/camli/schema.py | 21 | 9355 | #!/usr/bin/env python
#
# Camlistore uploader client for Python.
#
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Schema blob library for Camlistore."""
__author__ = 'Brett Slatkin (bslatkin@gmail.com)'
import datetime
import re
import simplejson
__all__ = [
'Error', 'DecodeError', 'SchemaBlob', 'FileCommon', 'File',
'Directory', 'Symlink', 'decode']
class Error(Exception):
"""Base class for exceptions in this module."""
class DecodeError(Error):
"""Could not decode the supplied schema blob."""
# Maps 'camliType' to SchemaBlob sub-classes.
_TYPE_TO_CLASS = {}
def _camel_to_python(name):
"""Converts camelcase to Python case."""
return re.sub(r'([a-z]+)([A-Z])', r'\1_\2', name).lower()
class _SchemaMeta(type):
"""Meta-class for schema blobs."""
def __init__(cls, name, bases, dict):
required_fields = set()
optional_fields = set()
json_to_python = {}
python_to_json = {}
serializers = {}
def map_name(field):
if field.islower():
return field
python_name = _camel_to_python(field)
json_to_python[field] = python_name
python_to_json[python_name] = field
return python_name
for klz in bases + (cls,):
if hasattr(klz, '_json_to_python'):
json_to_python.update(klz._json_to_python)
if hasattr(klz, '_python_to_json'):
python_to_json.update(klz._python_to_json)
if hasattr(klz, 'required_fields'):
for field in klz.required_fields:
field = map_name(field)
assert field not in required_fields, (klz, field)
assert field not in optional_fields, (klz, field)
required_fields.add(field)
if hasattr(klz, 'optional_fields'):
for field in klz.optional_fields:
field = map_name(field)
assert field not in required_fields, (klz, field)
assert field not in optional_fields, (klz, field)
optional_fields.add(field)
if hasattr(klz, '_serializers'):
for field, value in klz._serializers.iteritems():
field = map_name(field)
assert (field in required_fields or
field in optional_fields), (klz, field)
if not isinstance(value, _FieldSerializer):
serializers[field] = value(field)
else:
serializers[field] = value
setattr(cls, 'required_fields', frozenset(required_fields))
setattr(cls, 'optional_fields', frozenset(optional_fields))
setattr(cls, '_serializers', serializers)
setattr(cls, '_json_to_python', json_to_python)
setattr(cls, '_python_to_json', python_to_json)
if hasattr(cls, 'type'):
_TYPE_TO_CLASS[cls.type] = cls
class SchemaBlob(object):
"""Base-class for schema blobs.
Each sub-class should have these fields:
type: Required value of 'camliType'.
required_fields: Set of required field names.
optional_fields: Set of optional field names.
_serializers: Dictionary mapping field names to the _FieldSerializer
sub-class to use for serializing/deserializing the field's value.
"""
__metaclass__ = _SchemaMeta
required_fields = frozenset([
'camliVersion',
'camliType',
])
optional_fields = frozenset([
'camliSigner',
'camliSig',
])
_serializers = {}
def __init__(self, blobref):
"""Initializer.
Args:
blobref: The blobref of the schema blob.
"""
self.blobref = blobref
self.unexpected_fields = {}
@property
def all_fields(self):
"""Returns the set of all potential fields for this blob."""
all_fields = set()
all_fields.update(self.required_fields)
all_fields.update(self.optional_fields)
all_fields.update(self.unexpected_fields)
return all_fields
def decode(self, blob_bytes, parsed=None):
"""Decodes a schema blob's bytes and unmarshals its fields.
Args:
blob_bytes: String with the bytes of the blob.
parsed: If not None, an already parsed version of the blob bytes. When
set, the blob_bytes argument is ignored.
Raises:
DecodeError if the blob_bytes are bad or the parsed blob is missing
required fields.
"""
for field in self.all_fields:
if hasattr(self, field):
delattr(self, field)
if parsed is None:
try:
parsed = simplejson.loads(blob_bytes)
except simplejson.JSONDecodeError, e:
raise DecodeError('Could not parse JSON. %s: %s' % (e.__class__, e))
for json_name, value in parsed.iteritems():
name = self._json_to_python.get(json_name, json_name)
if not (name in self.required_fields or name in self.optional_fields):
self.unexpected_fields[name] = value
continue
serializer = self._serializers.get(name)
if serializer:
value = serializer.from_json(value)
setattr(self, name, value)
for name in self.required_fields:
if not hasattr(self, name):
raise DecodeError('Missing required field: %s' % name)
def encode(self):
"""Encodes a schema blob's bytes and marshals its fields.
Returns:
A UTF-8-encoding plain string containing the encoded blob bytes.
"""
out = {}
for python_name in self.all_fields:
if not hasattr(self, python_name):
continue
value = getattr(self, python_name)
serializer = self._serializers.get(python_name)
if serializer:
value = serializer.to_json(value)
json_name = self._python_to_json.get(python_name, python_name)
out[json_name] = value
return simplejson.dumps(out)
################################################################################
# Serializers for converting JSON fields to/from Python values
class _FieldSerializer(object):
"""Serializes a named field's value to and from JSON."""
def __init__(self, name):
"""Initializer.
Args:
name: The name of the field.
"""
self.name = name
def from_json(self, value):
"""Converts the JSON format of the field to the Python type.
Args:
value: The JSON value.
Returns:
The Python value.
"""
raise NotImplemented('Must implement from_json')
def to_json(self, value):
"""Converts the Python field value to the JSON format of the field.
Args:
value: The Python value.
Returns:
The JSON formatted-value.
"""
raise NotImplemented('Must implement to_json')
class _DateTimeSerializer(_FieldSerializer):
"""Formats ISO 8601 strings to/from datetime.datetime instances."""
def from_json(self, value):
if '.' in value:
iso, micros = value.split('.')
micros = int((micros[:-1] + ('0' * 6))[:6])
else:
iso, micros = value[:-1], 0
when = datetime.datetime.strptime(iso, '%Y-%m-%dT%H:%M:%S')
return when + datetime.timedelta(microseconds=micros)
def to_json(self, value):
return value.isoformat() + 'Z'
################################################################################
# Concrete Schema Blobs
class FileCommon(SchemaBlob):
"""Common base-class for all unix-y files."""
required_fields = frozenset([])
optional_fields = frozenset([
'fileName',
'fileNameBytes',
'unixPermission',
'unixOwnerId',
'unixGroupId',
'unixGroup',
'unixXattrs',
'unixMtime',
'unixCtime',
'unixAtime',
])
_serializers = {
'unixMtime': _DateTimeSerializer,
'unixCtime': _DateTimeSerializer,
'unixAtime': _DateTimeSerializer,
}
class File(FileCommon):
"""A file."""
type = 'file'
required_fields = frozenset([
'size',
'contentParts',
])
optional_fields = frozenset([
'inodeRef',
])
_serializers = {}
class Directory(FileCommon):
"""A directory."""
type = 'directory'
required_fields = frozenset([
'entries',
])
optional_fields = frozenset([])
_serializers = {}
class Symlink(FileCommon):
"""A symlink."""
type = 'symlink'
required_fields = frozenset([])
optional_fields = frozenset([
'symlinkTarget',
'symlinkTargetBytes',
])
_serializers = {}
################################################################################
# Helper methods
def decode(blobref, blob_bytes):
"""Decode any schema blob, validating all required fields for its time."""
try:
parsed = simplejson.loads(blob_bytes)
except simplejson.JSONDecodeError, e:
raise DecodeError('Could not parse JSON. %s: %s' % (e.__class__, e))
if 'camliType' not in parsed:
raise DecodeError('Could not find "camliType" field.')
camli_type = parsed['camliType']
blob_class = _TYPE_TO_CLASS.get(camli_type)
if blob_class is None:
raise DecodeError(
'Could not find SchemaBlob sub-class for camliType=%r' % camli_type)
schema_blob = blob_class(blobref)
schema_blob.decode(None, parsed=parsed)
return schema_blob
| apache-2.0 |
MiltosD/CEFELRC | lib/python2.7/site-packages/django/contrib/admin/sites.py | 70 | 17393 | import re
from django import http, template
from django.contrib.admin import ModelAdmin, actions
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.contenttypes import views as contenttype_views
from django.views.decorators.csrf import csrf_protect
from django.db.models.base import ModelBase
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from django.utils.functional import update_wrapper
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.views.decorators.cache import never_cache
from django.conf import settings
LOGIN_FORM_KEY = 'this_is_the_login_form'
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name=None, app_name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.root_path = None
if name is None:
self.name = 'admin'
else:
self.name = name
self.app_name = app_name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
# Don't import the humongous validation code unless required
if admin_class and settings.DEBUG:
from django.contrib.admin.validation import validate
else:
validate = lambda model, adminclass: None
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Validate (which might be a no-op)
validate(admin_class, model)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitally get a registered global action wheather it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return self._actions.iteritems()
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.admin.models import LogEntry
from django.contrib.contenttypes.models import ContentType
if not LogEntry._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.admin' in your "
"INSTALLED_APPS setting in order to use the admin application.")
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or
'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
urls = super(MyAdminSite, self).get_urls()
urls += patterns('',
url(r'^my_view/$', self.admin_view(some_view))
)
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
return self.login(request)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls.defaults import patterns, url, include
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = patterns('',
url(r'^$',
wrap(self.index),
name='index'),
url(r'^logout/$',
wrap(self.logout),
name='logout'),
url(r'^password_change/$',
wrap(self.password_change, cacheable=True),
name='password_change'),
url(r'^password_change/done/$',
wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$',
wrap(self.i18n_javascript, cacheable=True),
name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$',
wrap(contenttype_views.shortcut)),
url(r'^(?P<app_label>\w+)/$',
wrap(self.app_index),
name='app_list')
)
# Add in each model's views.
for model, model_admin in self._registry.iteritems():
urlpatterns += patterns('',
url(r'^%s/%s/' % (model._meta.app_label, model._meta.module_name),
include(model_admin.urls))
)
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.app_name, self.name
def password_change(self, request):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.auth.views import password_change
if self.root_path is not None:
url = '%spassword_change/done/' % self.root_path
else:
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'current_app': self.name,
'post_change_redirect': url
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin'])
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
from django.contrib.auth.views import login
context = {
'title': _('Log in'),
'root_path': self.root_path,
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
}
context.update(extra_context or {})
defaults = {
'extra_context': context,
'current_app': self.name,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
return login(request, **defaults)
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_dict = {}
user = request.user
for model, model_admin in self._registry.items():
app_label = model._meta.app_label
has_module_perms = user.has_module_perms(app_label)
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'admin_url': mark_safe('%s/%s/' % (app_label, model.__name__.lower())),
'perms': perms,
}
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': app_label.title(),
'app_url': app_label + '/',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
# Sort the apps alphabetically.
app_list = app_dict.values()
app_list.sort(key=lambda x: x['name'])
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
context = {
'title': _('Site administration'),
'app_list': app_list,
'root_path': self.root_path,
}
context.update(extra_context or {})
context_instance = template.RequestContext(request, current_app=self.name)
return render_to_response(self.index_template or 'admin/index.html', context,
context_instance=context_instance
)
def app_index(self, request, app_label, extra_context=None):
user = request.user
has_module_perms = user.has_module_perms(app_label)
app_dict = {}
for model, model_admin in self._registry.items():
if app_label == model._meta.app_label:
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'admin_url': '%s/' % model.__name__.lower(),
'perms': perms,
}
if app_dict:
app_dict['models'].append(model_dict),
else:
# First time around, now that we know there's
# something to display, add in the necessary meta
# information.
app_dict = {
'name': app_label.title(),
'app_url': '',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if not app_dict:
raise http.Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
context = {
'title': _('%s administration') % capfirst(app_label),
'app_list': [app_dict],
'root_path': self.root_path,
}
context.update(extra_context or {})
context_instance = template.RequestContext(request, current_app=self.name)
return render_to_response(self.app_index_template or ('admin/%s/app_index.html' % app_label,
'admin/app_index.html'), context,
context_instance=context_instance
)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| bsd-3-clause |
kiszk/spark | examples/src/main/python/streaming/stateful_network_wordcount.py | 51 | 2274 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
r"""
Counts words in UTF8 encoded, '\n' delimited text received from the
network every second.
Usage: stateful_network_wordcount.py <hostname> <port>
<hostname> and <port> describe the TCP server that Spark Streaming
would connect to receive data.
To run this on your local machine, you need to first run a Netcat server
`$ nc -lk 9999`
and then run the example
`$ bin/spark-submit examples/src/main/python/streaming/stateful_network_wordcount.py \
localhost 9999`
"""
from __future__ import print_function
import sys
from pyspark import SparkContext
from pyspark.streaming import StreamingContext
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: stateful_network_wordcount.py <hostname> <port>", file=sys.stderr)
sys.exit(-1)
sc = SparkContext(appName="PythonStreamingStatefulNetworkWordCount")
ssc = StreamingContext(sc, 1)
ssc.checkpoint("checkpoint")
# RDD with initial state (key, value) pairs
initialStateRDD = sc.parallelize([(u'hello', 1), (u'world', 1)])
def updateFunc(new_values, last_sum):
return sum(new_values) + (last_sum or 0)
lines = ssc.socketTextStream(sys.argv[1], int(sys.argv[2]))
running_counts = lines.flatMap(lambda line: line.split(" "))\
.map(lambda word: (word, 1))\
.updateStateByKey(updateFunc, initialRDD=initialStateRDD)
running_counts.pprint()
ssc.start()
ssc.awaitTermination()
| apache-2.0 |
tumbl3w33d/ansible | test/units/modules/network/fortios/test_fortios_router_policy.py | 21 | 11853 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_router_policy
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_router_policy.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_router_policy_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_policy': {
'action': 'deny',
'comments': 'test_value_4',
'dst_negate': 'enable',
'end_port': '6',
'end_source_port': '7',
'gateway': 'test_value_8',
'output_device': 'test_value_9',
'protocol': '10',
'seq_num': '11',
'src_negate': 'enable',
'start_port': '13',
'start_source_port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos_mask': 'test_value_17'
},
'vdom': 'root'}
is_error, changed, response = fortios_router_policy.fortios_router(input_data, fos_instance)
expected_data = {
'action': 'deny',
'comments': 'test_value_4',
'dst-negate': 'enable',
'end-port': '6',
'end-source-port': '7',
'gateway': 'test_value_8',
'output-device': 'test_value_9',
'protocol': '10',
'seq-num': '11',
'src-negate': 'enable',
'start-port': '13',
'start-source-port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos-mask': 'test_value_17'
}
set_method_mock.assert_called_with('router', 'policy', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_router_policy_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_policy': {
'action': 'deny',
'comments': 'test_value_4',
'dst_negate': 'enable',
'end_port': '6',
'end_source_port': '7',
'gateway': 'test_value_8',
'output_device': 'test_value_9',
'protocol': '10',
'seq_num': '11',
'src_negate': 'enable',
'start_port': '13',
'start_source_port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos_mask': 'test_value_17'
},
'vdom': 'root'}
is_error, changed, response = fortios_router_policy.fortios_router(input_data, fos_instance)
expected_data = {
'action': 'deny',
'comments': 'test_value_4',
'dst-negate': 'enable',
'end-port': '6',
'end-source-port': '7',
'gateway': 'test_value_8',
'output-device': 'test_value_9',
'protocol': '10',
'seq-num': '11',
'src-negate': 'enable',
'start-port': '13',
'start-source-port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos-mask': 'test_value_17'
}
set_method_mock.assert_called_with('router', 'policy', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_router_policy_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'router_policy': {
'action': 'deny',
'comments': 'test_value_4',
'dst_negate': 'enable',
'end_port': '6',
'end_source_port': '7',
'gateway': 'test_value_8',
'output_device': 'test_value_9',
'protocol': '10',
'seq_num': '11',
'src_negate': 'enable',
'start_port': '13',
'start_source_port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos_mask': 'test_value_17'
},
'vdom': 'root'}
is_error, changed, response = fortios_router_policy.fortios_router(input_data, fos_instance)
delete_method_mock.assert_called_with('router', 'policy', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_router_policy_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'router_policy': {
'action': 'deny',
'comments': 'test_value_4',
'dst_negate': 'enable',
'end_port': '6',
'end_source_port': '7',
'gateway': 'test_value_8',
'output_device': 'test_value_9',
'protocol': '10',
'seq_num': '11',
'src_negate': 'enable',
'start_port': '13',
'start_source_port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos_mask': 'test_value_17'
},
'vdom': 'root'}
is_error, changed, response = fortios_router_policy.fortios_router(input_data, fos_instance)
delete_method_mock.assert_called_with('router', 'policy', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_router_policy_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_policy': {
'action': 'deny',
'comments': 'test_value_4',
'dst_negate': 'enable',
'end_port': '6',
'end_source_port': '7',
'gateway': 'test_value_8',
'output_device': 'test_value_9',
'protocol': '10',
'seq_num': '11',
'src_negate': 'enable',
'start_port': '13',
'start_source_port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos_mask': 'test_value_17'
},
'vdom': 'root'}
is_error, changed, response = fortios_router_policy.fortios_router(input_data, fos_instance)
expected_data = {
'action': 'deny',
'comments': 'test_value_4',
'dst-negate': 'enable',
'end-port': '6',
'end-source-port': '7',
'gateway': 'test_value_8',
'output-device': 'test_value_9',
'protocol': '10',
'seq-num': '11',
'src-negate': 'enable',
'start-port': '13',
'start-source-port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos-mask': 'test_value_17'
}
set_method_mock.assert_called_with('router', 'policy', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_router_policy_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_policy': {
'random_attribute_not_valid': 'tag',
'action': 'deny',
'comments': 'test_value_4',
'dst_negate': 'enable',
'end_port': '6',
'end_source_port': '7',
'gateway': 'test_value_8',
'output_device': 'test_value_9',
'protocol': '10',
'seq_num': '11',
'src_negate': 'enable',
'start_port': '13',
'start_source_port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos_mask': 'test_value_17'
},
'vdom': 'root'}
is_error, changed, response = fortios_router_policy.fortios_router(input_data, fos_instance)
expected_data = {
'action': 'deny',
'comments': 'test_value_4',
'dst-negate': 'enable',
'end-port': '6',
'end-source-port': '7',
'gateway': 'test_value_8',
'output-device': 'test_value_9',
'protocol': '10',
'seq-num': '11',
'src-negate': 'enable',
'start-port': '13',
'start-source-port': '14',
'status': 'enable',
'tos': 'test_value_16',
'tos-mask': 'test_value_17'
}
set_method_mock.assert_called_with('router', 'policy', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
tarvip/skytools | python/skytools/sockutil.py | 3 | 4066 | """Various low-level utility functions for sockets."""
__all__ = ['set_tcp_keepalive', 'set_nonblocking', 'set_cloexec']
import sys
import os
import socket
try:
import fcntl
except ImportError:
pass
__all__ = ['set_tcp_keepalive', 'set_nonblocking', 'set_cloexec']
def set_tcp_keepalive(fd, keepalive = True,
tcp_keepidle = 4 * 60,
tcp_keepcnt = 4,
tcp_keepintvl = 15):
"""Turn on TCP keepalive. The fd can be either numeric or socket
object with 'fileno' method.
OS defaults for SO_KEEPALIVE=1:
- Linux: (7200, 9, 75) - can configure all.
- MacOS: (7200, 8, 75) - can configure only tcp_keepidle.
- Win32: (7200, 5|10, 1) - can configure tcp_keepidle and tcp_keepintvl.
Our defaults: (240, 4, 15).
>>> import socket
>>> s = socket.socket()
>>> set_tcp_keepalive(s)
"""
# usable on this OS?
if not hasattr(socket, 'SO_KEEPALIVE') or not hasattr(socket, 'fromfd'):
return
# need socket object
if isinstance(fd, socket.SocketType):
s = fd
else:
if hasattr(fd, 'fileno'):
fd = fd.fileno()
s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
# skip if unix socket
if type(s.getsockname()) != type(()):
return
# no keepalive?
if not keepalive:
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0)
return
# basic keepalive
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# detect available options
TCP_KEEPCNT = getattr(socket, 'TCP_KEEPCNT', None)
TCP_KEEPINTVL = getattr(socket, 'TCP_KEEPINTVL', None)
TCP_KEEPIDLE = getattr(socket, 'TCP_KEEPIDLE', None)
TCP_KEEPALIVE = getattr(socket, 'TCP_KEEPALIVE', None)
SIO_KEEPALIVE_VALS = getattr(socket, 'SIO_KEEPALIVE_VALS', None)
if TCP_KEEPIDLE is None and TCP_KEEPALIVE is None and sys.platform == 'darwin':
TCP_KEEPALIVE = 0x10
# configure
if TCP_KEEPCNT is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPCNT, tcp_keepcnt)
if TCP_KEEPINTVL is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPINTVL, tcp_keepintvl)
if TCP_KEEPIDLE is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPIDLE, tcp_keepidle)
elif TCP_KEEPALIVE is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPALIVE, tcp_keepidle)
elif SIO_KEEPALIVE_VALS is not None:
s.ioctl(SIO_KEEPALIVE_VALS, (1, tcp_keepidle*1000, tcp_keepintvl*1000))
def set_nonblocking(fd, onoff=True):
"""Toggle the O_NONBLOCK flag.
If onoff==None then return current setting.
Actual sockets from 'socket' module should use .setblocking() method,
this is for situations where it is not available. Eg. pipes
from 'subprocess' module.
>>> import socket
>>> s = socket.socket()
>>> set_nonblocking(s, None)
False
>>> set_nonblocking(s, 1)
>>> set_nonblocking(s, None)
True
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
if onoff is None:
return (flags & os.O_NONBLOCK) > 0
if onoff:
flags |= os.O_NONBLOCK
else:
flags &= ~os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def set_cloexec(fd, onoff=True):
"""Toggle the FD_CLOEXEC flag.
If onoff==None then return current setting.
Some libraries do it automatically (eg. libpq).
Others do not (Python stdlib).
>>> import os
>>> f = open(os.devnull, 'rb')
>>> set_cloexec(f, None)
False
>>> set_cloexec(f, True)
>>> set_cloexec(f, None)
True
>>> import socket
>>> s = socket.socket()
>>> set_cloexec(s, None)
False
>>> set_cloexec(s)
>>> set_cloexec(s, None)
True
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
if onoff is None:
return (flags & fcntl.FD_CLOEXEC) > 0
if onoff:
flags |= fcntl.FD_CLOEXEC
else:
flags &= ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
if __name__ == '__main__':
import doctest
doctest.testmod()
| isc |
chiviak/CouchPotatoServer | libs/dateutil/rrule.py | 214 | 41036 | """
Copyright (c) 2003-2010 Gustavo Niemeyer <gustavo@niemeyer.net>
This module offers extensions to the standard Python
datetime module.
"""
__license__ = "Simplified BSD"
import itertools
import datetime
import calendar
try:
import _thread
except ImportError:
import thread as _thread
import sys
from six import advance_iterator, integer_types
__all__ = ["rrule", "rruleset", "rrulestr",
"YEARLY", "MONTHLY", "WEEKLY", "DAILY",
"HOURLY", "MINUTELY", "SECONDLY",
"MO", "TU", "WE", "TH", "FR", "SA", "SU"]
# Every mask is 7 days longer to handle cross-year weekly periods.
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30+
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
M365MASK = list(M366MASK)
M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32))
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
MDAY365MASK = list(MDAY366MASK)
M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0))
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
NMDAY365MASK = list(NMDAY366MASK)
M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366)
M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
MDAY365MASK = tuple(MDAY365MASK)
M365MASK = tuple(M365MASK)
(YEARLY,
MONTHLY,
WEEKLY,
DAILY,
HOURLY,
MINUTELY,
SECONDLY) = list(range(7))
# Imported on demand.
easter = None
parser = None
class weekday(object):
__slots__ = ["weekday", "n"]
def __init__(self, weekday, n=None):
if n == 0:
raise ValueError("Can't create weekday with n == 0")
self.weekday = weekday
self.n = n
def __call__(self, n):
if n == self.n:
return self
else:
return self.__class__(self.weekday, n)
def __eq__(self, other):
try:
if self.weekday != other.weekday or self.n != other.n:
return False
except AttributeError:
return False
return True
def __repr__(self):
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
if not self.n:
return s
else:
return "%s(%+d)" % (s, self.n)
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
class rrulebase(object):
def __init__(self, cache=False):
if cache:
self._cache = []
self._cache_lock = _thread.allocate_lock()
self._cache_gen = self._iter()
self._cache_complete = False
else:
self._cache = None
self._cache_complete = False
self._len = None
def __iter__(self):
if self._cache_complete:
return iter(self._cache)
elif self._cache is None:
return self._iter()
else:
return self._iter_cached()
def _iter_cached(self):
i = 0
gen = self._cache_gen
cache = self._cache
acquire = self._cache_lock.acquire
release = self._cache_lock.release
while gen:
if i == len(cache):
acquire()
if self._cache_complete:
break
try:
for j in range(10):
cache.append(advance_iterator(gen))
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
release()
yield cache[i]
i += 1
while i < self._len:
yield cache[i]
i += 1
def __getitem__(self, item):
if self._cache_complete:
return self._cache[item]
elif isinstance(item, slice):
if item.step and item.step < 0:
return list(iter(self))[item]
else:
return list(itertools.islice(self,
item.start or 0,
item.stop or sys.maxsize,
item.step or 1))
elif item >= 0:
gen = iter(self)
try:
for i in range(item+1):
res = advance_iterator(gen)
except StopIteration:
raise IndexError
return res
else:
return list(iter(self))[item]
def __contains__(self, item):
if self._cache_complete:
return item in self._cache
else:
for i in self:
if i == item:
return True
elif i > item:
return False
return False
# __len__() introduces a large performance penality.
def count(self):
if self._len is None:
for x in self: pass
return self._len
def before(self, dt, inc=False):
if self._cache_complete:
gen = self._cache
else:
gen = self
last = None
if inc:
for i in gen:
if i > dt:
break
last = i
else:
for i in gen:
if i >= dt:
break
last = i
return last
def after(self, dt, inc=False):
if self._cache_complete:
gen = self._cache
else:
gen = self
if inc:
for i in gen:
if i >= dt:
return i
else:
for i in gen:
if i > dt:
return i
return None
def between(self, after, before, inc=False):
if self._cache_complete:
gen = self._cache
else:
gen = self
started = False
l = []
if inc:
for i in gen:
if i > before:
break
elif not started:
if i >= after:
started = True
l.append(i)
else:
l.append(i)
else:
for i in gen:
if i >= before:
break
elif not started:
if i > after:
started = True
l.append(i)
else:
l.append(i)
return l
class rrule(rrulebase):
def __init__(self, freq, dtstart=None,
interval=1, wkst=None, count=None, until=None, bysetpos=None,
bymonth=None, bymonthday=None, byyearday=None, byeaster=None,
byweekno=None, byweekday=None,
byhour=None, byminute=None, bysecond=None,
cache=False):
super(rrule, self).__init__(cache)
global easter
if not dtstart:
dtstart = datetime.datetime.now().replace(microsecond=0)
elif not isinstance(dtstart, datetime.datetime):
dtstart = datetime.datetime.fromordinal(dtstart.toordinal())
else:
dtstart = dtstart.replace(microsecond=0)
self._dtstart = dtstart
self._tzinfo = dtstart.tzinfo
self._freq = freq
self._interval = interval
self._count = count
if until and not isinstance(until, datetime.datetime):
until = datetime.datetime.fromordinal(until.toordinal())
self._until = until
if wkst is None:
self._wkst = calendar.firstweekday()
elif isinstance(wkst, integer_types):
self._wkst = wkst
else:
self._wkst = wkst.weekday
if bysetpos is None:
self._bysetpos = None
elif isinstance(bysetpos, integer_types):
if bysetpos == 0 or not (-366 <= bysetpos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
self._bysetpos = (bysetpos,)
else:
self._bysetpos = tuple(bysetpos)
for pos in self._bysetpos:
if pos == 0 or not (-366 <= pos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
if not (byweekno or byyearday or bymonthday or
byweekday is not None or byeaster is not None):
if freq == YEARLY:
if not bymonth:
bymonth = dtstart.month
bymonthday = dtstart.day
elif freq == MONTHLY:
bymonthday = dtstart.day
elif freq == WEEKLY:
byweekday = dtstart.weekday()
# bymonth
if not bymonth:
self._bymonth = None
elif isinstance(bymonth, integer_types):
self._bymonth = (bymonth,)
else:
self._bymonth = tuple(bymonth)
# byyearday
if not byyearday:
self._byyearday = None
elif isinstance(byyearday, integer_types):
self._byyearday = (byyearday,)
else:
self._byyearday = tuple(byyearday)
# byeaster
if byeaster is not None:
if not easter:
from dateutil import easter
if isinstance(byeaster, integer_types):
self._byeaster = (byeaster,)
else:
self._byeaster = tuple(byeaster)
else:
self._byeaster = None
# bymonthay
if not bymonthday:
self._bymonthday = ()
self._bynmonthday = ()
elif isinstance(bymonthday, integer_types):
if bymonthday < 0:
self._bynmonthday = (bymonthday,)
self._bymonthday = ()
else:
self._bymonthday = (bymonthday,)
self._bynmonthday = ()
else:
self._bymonthday = tuple([x for x in bymonthday if x > 0])
self._bynmonthday = tuple([x for x in bymonthday if x < 0])
# byweekno
if byweekno is None:
self._byweekno = None
elif isinstance(byweekno, integer_types):
self._byweekno = (byweekno,)
else:
self._byweekno = tuple(byweekno)
# byweekday / bynweekday
if byweekday is None:
self._byweekday = None
self._bynweekday = None
elif isinstance(byweekday, integer_types):
self._byweekday = (byweekday,)
self._bynweekday = None
elif hasattr(byweekday, "n"):
if not byweekday.n or freq > MONTHLY:
self._byweekday = (byweekday.weekday,)
self._bynweekday = None
else:
self._bynweekday = ((byweekday.weekday, byweekday.n),)
self._byweekday = None
else:
self._byweekday = []
self._bynweekday = []
for wday in byweekday:
if isinstance(wday, integer_types):
self._byweekday.append(wday)
elif not wday.n or freq > MONTHLY:
self._byweekday.append(wday.weekday)
else:
self._bynweekday.append((wday.weekday, wday.n))
self._byweekday = tuple(self._byweekday)
self._bynweekday = tuple(self._bynweekday)
if not self._byweekday:
self._byweekday = None
elif not self._bynweekday:
self._bynweekday = None
# byhour
if byhour is None:
if freq < HOURLY:
self._byhour = (dtstart.hour,)
else:
self._byhour = None
elif isinstance(byhour, integer_types):
self._byhour = (byhour,)
else:
self._byhour = tuple(byhour)
# byminute
if byminute is None:
if freq < MINUTELY:
self._byminute = (dtstart.minute,)
else:
self._byminute = None
elif isinstance(byminute, integer_types):
self._byminute = (byminute,)
else:
self._byminute = tuple(byminute)
# bysecond
if bysecond is None:
if freq < SECONDLY:
self._bysecond = (dtstart.second,)
else:
self._bysecond = None
elif isinstance(bysecond, integer_types):
self._bysecond = (bysecond,)
else:
self._bysecond = tuple(bysecond)
if self._freq >= HOURLY:
self._timeset = None
else:
self._timeset = []
for hour in self._byhour:
for minute in self._byminute:
for second in self._bysecond:
self._timeset.append(
datetime.time(hour, minute, second,
tzinfo=self._tzinfo))
self._timeset.sort()
self._timeset = tuple(self._timeset)
def _iter(self):
year, month, day, hour, minute, second, weekday, yearday, _ = \
self._dtstart.timetuple()
# Some local variables to speed things up a bit
freq = self._freq
interval = self._interval
wkst = self._wkst
until = self._until
bymonth = self._bymonth
byweekno = self._byweekno
byyearday = self._byyearday
byweekday = self._byweekday
byeaster = self._byeaster
bymonthday = self._bymonthday
bynmonthday = self._bynmonthday
bysetpos = self._bysetpos
byhour = self._byhour
byminute = self._byminute
bysecond = self._bysecond
ii = _iterinfo(self)
ii.rebuild(year, month)
getdayset = {YEARLY:ii.ydayset,
MONTHLY:ii.mdayset,
WEEKLY:ii.wdayset,
DAILY:ii.ddayset,
HOURLY:ii.ddayset,
MINUTELY:ii.ddayset,
SECONDLY:ii.ddayset}[freq]
if freq < HOURLY:
timeset = self._timeset
else:
gettimeset = {HOURLY:ii.htimeset,
MINUTELY:ii.mtimeset,
SECONDLY:ii.stimeset}[freq]
if ((freq >= HOURLY and
self._byhour and hour not in self._byhour) or
(freq >= MINUTELY and
self._byminute and minute not in self._byminute) or
(freq >= SECONDLY and
self._bysecond and second not in self._bysecond)):
timeset = ()
else:
timeset = gettimeset(hour, minute, second)
total = 0
count = self._count
while True:
# Get dayset with the right frequency
dayset, start, end = getdayset(year, month, day)
# Do the "hard" work ;-)
filtered = False
for i in dayset[start:end]:
if ((bymonth and ii.mmask[i] not in bymonth) or
(byweekno and not ii.wnomask[i]) or
(byweekday and ii.wdaymask[i] not in byweekday) or
(ii.nwdaymask and not ii.nwdaymask[i]) or
(byeaster and not ii.eastermask[i]) or
((bymonthday or bynmonthday) and
ii.mdaymask[i] not in bymonthday and
ii.nmdaymask[i] not in bynmonthday) or
(byyearday and
((i < ii.yearlen and i+1 not in byyearday
and -ii.yearlen+i not in byyearday) or
(i >= ii.yearlen and i+1-ii.yearlen not in byyearday
and -ii.nextyearlen+i-ii.yearlen
not in byyearday)))):
dayset[i] = None
filtered = True
# Output results
if bysetpos and timeset:
poslist = []
for pos in bysetpos:
if pos < 0:
daypos, timepos = divmod(pos, len(timeset))
else:
daypos, timepos = divmod(pos-1, len(timeset))
try:
i = [x for x in dayset[start:end]
if x is not None][daypos]
time = timeset[timepos]
except IndexError:
pass
else:
date = datetime.date.fromordinal(ii.yearordinal+i)
res = datetime.datetime.combine(date, time)
if res not in poslist:
poslist.append(res)
poslist.sort()
for res in poslist:
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
total += 1
yield res
if count:
count -= 1
if not count:
self._len = total
return
else:
for i in dayset[start:end]:
if i is not None:
date = datetime.date.fromordinal(ii.yearordinal+i)
for time in timeset:
res = datetime.datetime.combine(date, time)
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
total += 1
yield res
if count:
count -= 1
if not count:
self._len = total
return
# Handle frequency and interval
fixday = False
if freq == YEARLY:
year += interval
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == MONTHLY:
month += interval
if month > 12:
div, mod = divmod(month, 12)
month = mod
year += div
if month == 0:
month = 12
year -= 1
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == WEEKLY:
if wkst > weekday:
day += -(weekday+1+(6-wkst))+self._interval*7
else:
day += -(weekday-wkst)+self._interval*7
weekday = wkst
fixday = True
elif freq == DAILY:
day += interval
fixday = True
elif freq == HOURLY:
if filtered:
# Jump to one iteration before next day
hour += ((23-hour)//interval)*interval
while True:
hour += interval
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
if not byhour or hour in byhour:
break
timeset = gettimeset(hour, minute, second)
elif freq == MINUTELY:
if filtered:
# Jump to one iteration before next day
minute += ((1439-(hour*60+minute))//interval)*interval
while True:
minute += interval
div, mod = divmod(minute, 60)
if div:
minute = mod
hour += div
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
filtered = False
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute)):
break
timeset = gettimeset(hour, minute, second)
elif freq == SECONDLY:
if filtered:
# Jump to one iteration before next day
second += (((86399-(hour*3600+minute*60+second))
//interval)*interval)
while True:
second += self._interval
div, mod = divmod(second, 60)
if div:
second = mod
minute += div
div, mod = divmod(minute, 60)
if div:
minute = mod
hour += div
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute) and
(not bysecond or second in bysecond)):
break
timeset = gettimeset(hour, minute, second)
if fixday and day > 28:
daysinmonth = calendar.monthrange(year, month)[1]
if day > daysinmonth:
while day > daysinmonth:
day -= daysinmonth
month += 1
if month == 13:
month = 1
year += 1
if year > datetime.MAXYEAR:
self._len = total
return
daysinmonth = calendar.monthrange(year, month)[1]
ii.rebuild(year, month)
class _iterinfo(object):
__slots__ = ["rrule", "lastyear", "lastmonth",
"yearlen", "nextyearlen", "yearordinal", "yearweekday",
"mmask", "mrange", "mdaymask", "nmdaymask",
"wdaymask", "wnomask", "nwdaymask", "eastermask"]
def __init__(self, rrule):
for attr in self.__slots__:
setattr(self, attr, None)
self.rrule = rrule
def rebuild(self, year, month):
# Every mask is 7 days longer to handle cross-year weekly periods.
rr = self.rrule
if year != self.lastyear:
self.yearlen = 365+calendar.isleap(year)
self.nextyearlen = 365+calendar.isleap(year+1)
firstyday = datetime.date(year, 1, 1)
self.yearordinal = firstyday.toordinal()
self.yearweekday = firstyday.weekday()
wday = datetime.date(year, 1, 1).weekday()
if self.yearlen == 365:
self.mmask = M365MASK
self.mdaymask = MDAY365MASK
self.nmdaymask = NMDAY365MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M365RANGE
else:
self.mmask = M366MASK
self.mdaymask = MDAY366MASK
self.nmdaymask = NMDAY366MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M366RANGE
if not rr._byweekno:
self.wnomask = None
else:
self.wnomask = [0]*(self.yearlen+7)
#no1wkst = firstwkst = self.wdaymask.index(rr._wkst)
no1wkst = firstwkst = (7-self.yearweekday+rr._wkst)%7
if no1wkst >= 4:
no1wkst = 0
# Number of days in the year, plus the days we got
# from last year.
wyearlen = self.yearlen+(self.yearweekday-rr._wkst)%7
else:
# Number of days in the year, minus the days we
# left in last year.
wyearlen = self.yearlen-no1wkst
div, mod = divmod(wyearlen, 7)
numweeks = div+mod//4
for n in rr._byweekno:
if n < 0:
n += numweeks+1
if not (0 < n <= numweeks):
continue
if n > 1:
i = no1wkst+(n-1)*7
if no1wkst != firstwkst:
i -= 7-firstwkst
else:
i = no1wkst
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if 1 in rr._byweekno:
# Check week number 1 of next year as well
# TODO: Check -numweeks for next year.
i = no1wkst+numweeks*7
if no1wkst != firstwkst:
i -= 7-firstwkst
if i < self.yearlen:
# If week starts in next year, we
# don't care about it.
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if no1wkst:
# Check last week number of last year as
# well. If no1wkst is 0, either the year
# started on week start, or week number 1
# got days from last year, so there are no
# days from last year's last week number in
# this year.
if -1 not in rr._byweekno:
lyearweekday = datetime.date(year-1, 1, 1).weekday()
lno1wkst = (7-lyearweekday+rr._wkst)%7
lyearlen = 365+calendar.isleap(year-1)
if lno1wkst >= 4:
lno1wkst = 0
lnumweeks = 52+(lyearlen+
(lyearweekday-rr._wkst)%7)%7//4
else:
lnumweeks = 52+(self.yearlen-no1wkst)%7//4
else:
lnumweeks = -1
if lnumweeks in rr._byweekno:
for i in range(no1wkst):
self.wnomask[i] = 1
if (rr._bynweekday and
(month != self.lastmonth or year != self.lastyear)):
ranges = []
if rr._freq == YEARLY:
if rr._bymonth:
for month in rr._bymonth:
ranges.append(self.mrange[month-1:month+1])
else:
ranges = [(0, self.yearlen)]
elif rr._freq == MONTHLY:
ranges = [self.mrange[month-1:month+1]]
if ranges:
# Weekly frequency won't get here, so we may not
# care about cross-year weekly periods.
self.nwdaymask = [0]*self.yearlen
for first, last in ranges:
last -= 1
for wday, n in rr._bynweekday:
if n < 0:
i = last+(n+1)*7
i -= (self.wdaymask[i]-wday)%7
else:
i = first+(n-1)*7
i += (7-self.wdaymask[i]+wday)%7
if first <= i <= last:
self.nwdaymask[i] = 1
if rr._byeaster:
self.eastermask = [0]*(self.yearlen+7)
eyday = easter.easter(year).toordinal()-self.yearordinal
for offset in rr._byeaster:
self.eastermask[eyday+offset] = 1
self.lastyear = year
self.lastmonth = month
def ydayset(self, year, month, day):
return list(range(self.yearlen)), 0, self.yearlen
def mdayset(self, year, month, day):
set = [None]*self.yearlen
start, end = self.mrange[month-1:month+1]
for i in range(start, end):
set[i] = i
return set, start, end
def wdayset(self, year, month, day):
# We need to handle cross-year weeks here.
set = [None]*(self.yearlen+7)
i = datetime.date(year, month, day).toordinal()-self.yearordinal
start = i
for j in range(7):
set[i] = i
i += 1
#if (not (0 <= i < self.yearlen) or
# self.wdaymask[i] == self.rrule._wkst):
# This will cross the year boundary, if necessary.
if self.wdaymask[i] == self.rrule._wkst:
break
return set, start, i
def ddayset(self, year, month, day):
set = [None]*self.yearlen
i = datetime.date(year, month, day).toordinal()-self.yearordinal
set[i] = i
return set, i, i+1
def htimeset(self, hour, minute, second):
set = []
rr = self.rrule
for minute in rr._byminute:
for second in rr._bysecond:
set.append(datetime.time(hour, minute, second,
tzinfo=rr._tzinfo))
set.sort()
return set
def mtimeset(self, hour, minute, second):
set = []
rr = self.rrule
for second in rr._bysecond:
set.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo))
set.sort()
return set
def stimeset(self, hour, minute, second):
return (datetime.time(hour, minute, second,
tzinfo=self.rrule._tzinfo),)
class rruleset(rrulebase):
class _genitem(object):
def __init__(self, genlist, gen):
try:
self.dt = advance_iterator(gen)
genlist.append(self)
except StopIteration:
pass
self.genlist = genlist
self.gen = gen
def __next__(self):
try:
self.dt = advance_iterator(self.gen)
except StopIteration:
self.genlist.remove(self)
next = __next__
def __lt__(self, other):
return self.dt < other.dt
def __gt__(self, other):
return self.dt > other.dt
def __eq__(self, other):
return self.dt == other.dt
def __ne__(self, other):
return self.dt != other.dt
def __init__(self, cache=False):
super(rruleset, self).__init__(cache)
self._rrule = []
self._rdate = []
self._exrule = []
self._exdate = []
def rrule(self, rrule):
self._rrule.append(rrule)
def rdate(self, rdate):
self._rdate.append(rdate)
def exrule(self, exrule):
self._exrule.append(exrule)
def exdate(self, exdate):
self._exdate.append(exdate)
def _iter(self):
rlist = []
self._rdate.sort()
self._genitem(rlist, iter(self._rdate))
for gen in [iter(x) for x in self._rrule]:
self._genitem(rlist, gen)
rlist.sort()
exlist = []
self._exdate.sort()
self._genitem(exlist, iter(self._exdate))
for gen in [iter(x) for x in self._exrule]:
self._genitem(exlist, gen)
exlist.sort()
lastdt = None
total = 0
while rlist:
ritem = rlist[0]
if not lastdt or lastdt != ritem.dt:
while exlist and exlist[0] < ritem:
advance_iterator(exlist[0])
exlist.sort()
if not exlist or ritem != exlist[0]:
total += 1
yield ritem.dt
lastdt = ritem.dt
advance_iterator(ritem)
rlist.sort()
self._len = total
class _rrulestr(object):
_freq_map = {"YEARLY": YEARLY,
"MONTHLY": MONTHLY,
"WEEKLY": WEEKLY,
"DAILY": DAILY,
"HOURLY": HOURLY,
"MINUTELY": MINUTELY,
"SECONDLY": SECONDLY}
_weekday_map = {"MO":0,"TU":1,"WE":2,"TH":3,"FR":4,"SA":5,"SU":6}
def _handle_int(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = int(value)
def _handle_int_list(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = [int(x) for x in value.split(',')]
_handle_INTERVAL = _handle_int
_handle_COUNT = _handle_int
_handle_BYSETPOS = _handle_int_list
_handle_BYMONTH = _handle_int_list
_handle_BYMONTHDAY = _handle_int_list
_handle_BYYEARDAY = _handle_int_list
_handle_BYEASTER = _handle_int_list
_handle_BYWEEKNO = _handle_int_list
_handle_BYHOUR = _handle_int_list
_handle_BYMINUTE = _handle_int_list
_handle_BYSECOND = _handle_int_list
def _handle_FREQ(self, rrkwargs, name, value, **kwargs):
rrkwargs["freq"] = self._freq_map[value]
def _handle_UNTIL(self, rrkwargs, name, value, **kwargs):
global parser
if not parser:
from dateutil import parser
try:
rrkwargs["until"] = parser.parse(value,
ignoretz=kwargs.get("ignoretz"),
tzinfos=kwargs.get("tzinfos"))
except ValueError:
raise ValueError("invalid until date")
def _handle_WKST(self, rrkwargs, name, value, **kwargs):
rrkwargs["wkst"] = self._weekday_map[value]
def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwarsg):
l = []
for wday in value.split(','):
for i in range(len(wday)):
if wday[i] not in '+-0123456789':
break
n = wday[:i] or None
w = wday[i:]
if n: n = int(n)
l.append(weekdays[self._weekday_map[w]](n))
rrkwargs["byweekday"] = l
_handle_BYDAY = _handle_BYWEEKDAY
def _parse_rfc_rrule(self, line,
dtstart=None,
cache=False,
ignoretz=False,
tzinfos=None):
if line.find(':') != -1:
name, value = line.split(':')
if name != "RRULE":
raise ValueError("unknown parameter name")
else:
value = line
rrkwargs = {}
for pair in value.split(';'):
name, value = pair.split('=')
name = name.upper()
value = value.upper()
try:
getattr(self, "_handle_"+name)(rrkwargs, name, value,
ignoretz=ignoretz,
tzinfos=tzinfos)
except AttributeError:
raise ValueError("unknown parameter '%s'" % name)
except (KeyError, ValueError):
raise ValueError("invalid '%s': %s" % (name, value))
return rrule(dtstart=dtstart, cache=cache, **rrkwargs)
def _parse_rfc(self, s,
dtstart=None,
cache=False,
unfold=False,
forceset=False,
compatible=False,
ignoretz=False,
tzinfos=None):
global parser
if compatible:
forceset = True
unfold = True
s = s.upper()
if not s.strip():
raise ValueError("empty string")
if unfold:
lines = s.splitlines()
i = 0
while i < len(lines):
line = lines[i].rstrip()
if not line:
del lines[i]
elif i > 0 and line[0] == " ":
lines[i-1] += line[1:]
del lines[i]
else:
i += 1
else:
lines = s.split()
if (not forceset and len(lines) == 1 and
(s.find(':') == -1 or s.startswith('RRULE:'))):
return self._parse_rfc_rrule(lines[0], cache=cache,
dtstart=dtstart, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
rrulevals = []
rdatevals = []
exrulevals = []
exdatevals = []
for line in lines:
if not line:
continue
if line.find(':') == -1:
name = "RRULE"
value = line
else:
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
raise ValueError("empty property name")
name = parms[0]
parms = parms[1:]
if name == "RRULE":
for parm in parms:
raise ValueError("unsupported RRULE parm: "+parm)
rrulevals.append(value)
elif name == "RDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError("unsupported RDATE parm: "+parm)
rdatevals.append(value)
elif name == "EXRULE":
for parm in parms:
raise ValueError("unsupported EXRULE parm: "+parm)
exrulevals.append(value)
elif name == "EXDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError("unsupported RDATE parm: "+parm)
exdatevals.append(value)
elif name == "DTSTART":
for parm in parms:
raise ValueError("unsupported DTSTART parm: "+parm)
if not parser:
from dateutil import parser
dtstart = parser.parse(value, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
raise ValueError("unsupported property: "+name)
if (forceset or len(rrulevals) > 1 or
rdatevals or exrulevals or exdatevals):
if not parser and (rdatevals or exdatevals):
from dateutil import parser
set = rruleset(cache=cache)
for value in rrulevals:
set.rrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in rdatevals:
for datestr in value.split(','):
set.rdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exrulevals:
set.exrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exdatevals:
for datestr in value.split(','):
set.exdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
if compatible and dtstart:
set.rdate(dtstart)
return set
else:
return self._parse_rfc_rrule(rrulevals[0],
dtstart=dtstart,
cache=cache,
ignoretz=ignoretz,
tzinfos=tzinfos)
def __call__(self, s, **kwargs):
return self._parse_rfc(s, **kwargs)
rrulestr = _rrulestr()
# vim:ts=4:sw=4:et
| gpl-3.0 |
clips/pattern | pattern/text/nl/inflect.py | 1 | 16691 | #### PATTERN | NL | INFLECT ########################################################################
# -*- coding: utf-8 -*-
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <tom@organisms.be>
# License: BSD (see LICENSE.txt for details).
####################################################################################################
# Regular expressions-based rules for Dutch word inflection:
# - pluralization and singularization of nouns,
# - conjugation of verbs,
# - predicative and attributive of adjectives.
# Accuracy (measured on CELEX Dutch morphology word forms):
# 79% for pluralize()
# 91% for singularize()
# 90% for Verbs.find_lemma()
# 88% for Verbs.find_lexeme()
# 99% for predicative()
# 99% for attributive()
from __future__ import unicode_literals
from __future__ import division
from builtins import str, bytes, dict, int
from builtins import map, zip, filter
from builtins import object, range
import os
import sys
import re
try:
MODULE = os.path.dirname(os.path.realpath(__file__))
except:
MODULE = ""
sys.path.insert(0, os.path.join(MODULE, "..", "..", "..", ".."))
from pattern.text import Verbs as _Verbs
from pattern.text import (
INFINITIVE, PRESENT, PAST, FUTURE,
FIRST, SECOND, THIRD,
SINGULAR, PLURAL, SG, PL,
PROGRESSIVE,
PARTICIPLE
)
sys.path.pop(0)
VERB, NOUN, ADJECTIVE, ADVERB = "VB", "NN", "JJ", "RB"
VOWELS = ("a", "e", "i", "o", "u")
re_vowel = re.compile(r"a|e|i|o|u|y", re.I)
is_vowel = lambda ch: ch in VOWELS
#### PLURALIZE ######################################################################################
plural_irregular_en = set(("dag", "dak", "dal", "pad", "vat", "weg"))
plural_irregular_een = set(("fee", "genie", "idee", "orgie", "ree"))
plural_irregular_eren = set(("blad", "ei", "gelid", "gemoed", "kalf", "kind", "lied", "rad", "rund"))
plural_irregular_deren = set(("hoen", "been"))
plural_irregular = {
"centrum": "centra",
"escargot": "escargots",
"gedrag": "gedragingen",
"gelid": "gelederen",
"kaars": "kaarsen",
"kleed": "kleren",
"koe": "koeien",
"lam": "lammeren",
"museum": "museums",
"stad": "steden",
"stoel": "stoelen",
"vlo": "vlooien"
}
def pluralize(word, pos=NOUN, custom={}):
""" Returns the plural of a given word.
For example: stad => steden.
The custom dictionary is for user-defined replacements.
"""
if word in custom.keys():
return custom[word]
w = word.lower()
if pos == NOUN:
if w in plural_irregular_en: # dag => dagen
return w + "en"
if w in plural_irregular_een: # fee => feeën
return w + "ën"
if w in plural_irregular_eren: # blad => bladeren
return w + "eren"
if w in plural_irregular_deren: # been => beenderen
return w + "deren"
if w in plural_irregular:
return plural_irregular[w]
# Words ending in -icus get -ici: academicus => academici
if w.endswith("icus"):
return w[:-2] + "i"
# Words ending in -s usually get -sen: les => lessen.
if w.endswith(("es", "as", "nis", "ris", "vis")):
return w + "sen"
# Words ending in -s usually get -zen: huis => huizen.
if w.endswith("s") and not w.endswith(("us", "ts", "mens")):
return w[:-1] + "zen"
# Words ending in -f usually get -ven: brief => brieven.
if w.endswith("f"):
return w[:-1] + "ven"
# Words ending in -um get -ums: museum => museums.
if w.endswith("um"):
return w + "s"
# Words ending in unstressed -ee or -ie get -ën: bacterie => bacteriën
if w.endswith("ie"):
return w + "s"
if w.endswith(("ee", "ie")):
return w[:-1] + "ën"
# Words ending in -heid get -heden: mogelijkheid => mogelijkheden
if w.endswith("heid"):
return w[:-4] + "heden"
# Words ending in -e -el -em -en -er -ie get -s: broer => broers.
if w.endswith(("é", "e", "el", "em", "en", "er", "eu", "ie", "ue", "ui", "eau", "ah")):
return w + "s"
# Words ending in a vowel get 's: auto => auto's.
if w.endswith(VOWELS) or w.endswith("y") and not w.endswith("e"):
return w + "'s"
# Words ending in -or always get -en: motor => motoren.
if w.endswith("or"):
return w + "en"
# Words ending in -ij get -en: boerderij => boerderijen.
if w.endswith("ij"):
return w + "en"
# Words ending in two consonants get -en: hand => handen.
if len(w) > 1 and not is_vowel(w[-1]) and not is_vowel(w[-2]):
return w + "en"
# Words ending in one consonant with a short sound: fles => flessen.
if len(w) > 2 and not is_vowel(w[-1]) and not is_vowel(w[-3]):
return w + w[-1] + "en"
# Words ending in one consonant with a long sound: raam => ramen.
if len(w) > 2 and not is_vowel(w[-1]) and w[-2] == w[-3]:
return w[:-2] + w[-1] + "en"
return w + "en"
return w
#### SINGULARIZE ###################################################################################
singular_irregular = dict((v, k) for k, v in plural_irregular.items())
def singularize(word, pos=NOUN, custom={}):
if word in custom.keys():
return custom[word]
w = word.lower()
if pos == NOUN and w in singular_irregular:
return singular_irregular[w]
if pos == NOUN and w.endswith(("ën", "en", "s", "i")):
# auto's => auto
if w.endswith("'s"):
return w[:-2]
# broers => broer
if w.endswith("s"):
return w[:-1]
# academici => academicus
if w.endswith("ici"):
return w[:-1] + "us"
# feeën => fee
if w.endswith("ën") and w[:-2] in plural_irregular_een:
return w[:-2]
# bacteriën => bacterie
if w.endswith("ën"):
return w[:-2] + "e"
# mogelijkheden => mogelijkheid
if w.endswith("heden"):
return w[:-5] + "heid"
# artikelen => artikel
if w.endswith("elen") and not w.endswith("delen"):
return w[:-2]
# chinezen => chinees
if w.endswith("ezen"):
return w[:-4] + "ees"
# neven => neef
if w.endswith("even") and len(w) > 4 and not is_vowel(w[-5]):
return w[:-4] + "eef"
if w.endswith("en"):
w = w[:-2]
# ogen => oog
if w in ("og", "om", "ur"):
return w[:-1] + w[-2] + w[-1]
# hoenderen => hoen
if w.endswith("der") and w[:-3] in plural_irregular_deren:
return w[:-3]
# eieren => ei
if w.endswith("er") and w[:-2] in plural_irregular_eren:
return w[:-2]
# dagen => dag (not daag)
if w in plural_irregular_en:
return w
# huizen => huis
if w.endswith("z"):
return w[:-1] + "s"
# brieven => brief
if w.endswith("v"):
return w[:-1] + "f"
# motoren => motor
if w.endswith("or"):
return w
# flessen => fles
if len(w) > 1 and not is_vowel(w[-1]) and w[-1] == w[-2]:
return w[:-1]
# baarden => baard
if len(w) > 1 and not is_vowel(w[-1]) and not is_vowel(w[-2]):
return w
# boerderijen => boerderij
if w.endswith("ij"):
return w
# idealen => ideaal
if w.endswith(("eal", "ean", "eol", "ial", "ian", "iat", "iol")):
return w[:-1] + w[-2] + w[-1]
# ramen => raam
if len(w) > 2 and not is_vowel(w[-1]) and is_vowel(w[-2]) and not is_vowel(w[-3]):
return w[:-1] + w[-2] + w[-1]
return w
return w
#### VERB CONJUGATION ##############################################################################
class Verbs(_Verbs):
def __init__(self):
_Verbs.__init__(self, os.path.join(MODULE, "nl-verbs.txt"),
language = "nl",
format = [0, 1, 2, 3, 7, 8, 17, 18, 19, 23, 25, 24, 16, 9, 10, 11, 15, 33, 26, 27, 28, 32],
default = {
1: 0, 2: 0, 3: 0, 7: 0, # present singular
4: 7, 5: 7, 6: 7, # present plural
17: 25, 18: 25, 19: 25, 23: 25, # past singular
20: 23, 21: 23, 22: 23, # past plural
9: 16, 10: 16, 11: 16, 15: 16, # present singular negated
12: 15, 13: 15, 14: 15, # present plural negated
26: 33, 27: 33, 28: 33, # past singular negated
29: 32, 30: 32, 31: 32, 32: 33 # past plural negated
})
def load(self):
_Verbs.load(self)
self._inverse["was"] = "zijn" # Instead of "wassen".
self._inverse["waren"] = "zijn"
self._inverse["zagen"] = "zien"
self._inverse["wist"] = "weten"
self._inverse["zou"] = "zullen"
def find_lemma(self, verb):
""" Returns the base form of the given inflected verb, using a rule-based approach.
This is problematic if a verb ending in -e is given in the past tense or gerund.
"""
v = verb.lower()
# Common prefixes: op-bouwen and ver-bouwen inflect like bouwen.
for prefix in ("aan", "be", "her", "in", "mee", "ont", "op", "over", "uit", "ver"):
if v.startswith(prefix) and v[len(prefix):] in self.inflections:
return prefix + self.inflections[v[len(prefix):]]
# Present participle -end: hengelend, knippend.
if v.endswith("end"):
b = v[:-3]
# Past singular -de or -te: hengelde, knipte.
elif v.endswith(("de", "det", "te", "tet")):
b = v[:-2]
# Past plural -den or -ten: hengelden, knipten.
elif v.endswith(("chten"),):
b = v[:-2]
elif v.endswith(("den", "ten")) and len(v) > 3 and is_vowel(v[-4]):
b = v[:-2]
elif v.endswith(("den", "ten")):
b = v[:-3]
# Past participle ge- and -d or -t: gehengeld, geknipt.
elif v.endswith(("d", "t")) and v.startswith("ge"):
b = v[2:-1]
# Present 2nd or 3rd singular: wordt, denkt, snakt, wacht.
elif v.endswith(("cht"),):
b = v
elif v.endswith(("dt", "bt", "gt", "kt", "mt", "pt", "wt", "xt", "aait", "ooit")):
b = v[:-1]
elif v.endswith("t") and len(v) > 2 and not is_vowel(v[-2]):
b = v[:-1]
elif v.endswith("en") and len(v) > 3:
return v
else:
b = v
# hengel => hengelen (and not hengellen)
if len(b) > 2 and b.endswith(("el", "nder", "om", "tter")) and not is_vowel(b[-3]):
pass
# Long vowel followed by -f or -s: geef => geven.
elif len(b) > 2 and not is_vowel(b[-1]) and is_vowel(b[-2]) and is_vowel(b[-3])\
or b.endswith(("ijf", "erf"),):
if b.endswith("f"):
b = b[:-1] + "v"
if b.endswith("s"):
b = b[:-1] + "z"
if b[-2] == b[-3]:
b = b[:-2] + b[-1]
# Short vowel followed by consonant: snak => snakken.
elif len(b) > 1 and not is_vowel(b[-1]) and is_vowel(b[-2]) and not b.endswith(("er", "ig")):
b = b + b[-1]
b = b + "en"
b = b.replace("vven", "ven") # omgevven => omgeven
b = b.replace("zzen", "zen") # genezzen => genezen
b = b.replace("aen", "aan") # doorgaen => doorgaan
return b
def find_lexeme(self, verb):
""" For a regular verb (base form), returns the forms using a rule-based approach.
"""
v = verb.lower()
# Stem = infinitive minus -en.
b = b0 = re.sub("en$", "", v)
# zweven => zweef, graven => graaf
if b.endswith("v"):
b = b[:-1] + "f"
if b.endswith("z"):
b = b[:-1] + "s"
# Vowels with a long sound are doubled, we need to guess how it sounds:
if len(b) > 2 and not is_vowel(b[-1]) and is_vowel(b[-2]) and not is_vowel(b[-3]):
if not v.endswith(("elen", "deren", "keren", "nderen", "tteren")):
b = b[:-1] + b[-2] + b[-1]
# pakk => pak
if len(b) > 1 and not is_vowel(b[-1]) and b[-1] == b[-2]:
b = b[:-1]
# Present tense gets -t:
sg = not b.endswith("t") and b + "t" or b
# Past tense ending in a consonant in "xtc-koffieshop" gets -t, otherwise -d:
dt = b0 and b0[-1] in "xtckfshp" and "t" or (not b.endswith("d") and "d" or "")
# Past tense -e and handle common irregular inflections:
p = b + dt + "e"
for suffix, irregular in (("erfde", "ierf"), ("ijfde", "eef"), ("ingde", "ong"), ("inkte", "onk")):
if p.endswith(suffix):
p = p[:-len(suffix)] + irregular; break
# Past participle: ge-:
pp = re.sub("tt$", "t", "ge" + b + dt)
pp = pp.startswith(("geop", "gein", "geaf")) and pp[2:4] + "ge" + pp[4:] or pp # geopstart => opgestart
pp = pp.startswith(("gever", "gebe", "gege")) and pp[2:] or pp
return [v, b, sg, sg, v, b0 + "end", p, p, p, b + dt + "en", p, pp]
verbs = Verbs()
conjugate, lemma, lexeme, tenses = \
verbs.conjugate, verbs.lemma, verbs.lexeme, verbs.tenses
#### ATTRIBUTIVE & PREDICATIVE #####################################################################
adjective_attributive = {
"civiel": "civiele",
"complex": "complexe",
"enkel": "enkele",
"grof": "grove",
"half": "halve",
"luttel": "luttele",
"mobiel": "mobiele",
"parijs": "parijse",
"ruw": "ruwe",
"simpel": "simpele",
"stabiel": "stabiele",
"steriel": "steriele",
"subtiel": "subtiele",
"teer": "tere"
}
def attributive(adjective):
""" For a predicative adjective, returns the attributive form (lowercase).
In Dutch, the attributive is formed with -e: "fel" => "felle kritiek".
"""
w = adjective.lower()
if w in adjective_attributive:
return adjective_attributive[w]
if w.endswith("e"):
return w
if w.endswith(("er", "st")) and len(w) > 4:
return w + "e"
if w.endswith("ees"):
return w[:-2] + w[-1] + "e"
if w.endswith("el") and len(w) > 2 and not is_vowel(w[-3]):
return w + "e"
if w.endswith("ig"):
return w + "e"
if len(w) > 2 and (not is_vowel(w[-1]) and is_vowel(w[-2]) and is_vowel(w[-3]) or w[:-1].endswith("ij")):
if w.endswith("f"):
w = w[:-1] + "v"
if w.endswith("s"):
w = w[:-1] + "z"
if w[-2] == w[-3]:
w = w[:-2] + w[-1]
elif len(w) > 1 and is_vowel(w[-2]) and w.endswith(tuple("bdfgklmnprst")):
w = w + w[-1]
return w + "e"
adjective_predicative = dict((v, k) for k, v in adjective_attributive.items())
adjective_predicative.update({
"moe": "moe",
"taboe": "taboe",
"voldoende": "voldoende"
})
def predicative(adjective):
""" Returns the predicative adjective (lowercase).
In Dutch, the attributive form preceding a noun is common:
"rake opmerking" => "raak", "straffe uitspraak" => "straf", "dwaze blik" => "dwaas".
"""
w = adjective.lower()
if w in adjective_predicative:
return adjective_predicative[w]
if w.endswith("ste"):
return w[:-1]
if w.endswith("ere"):
return w[:-1]
if w.endswith("bele"):
return w[:-1]
if w.endswith("le") and len(w) > 2 and is_vowel(w[-3]) and not w.endswith(("eule", "oele")):
return w[:-2] + w[-3] + "l"
if w.endswith("ve") and len(w) > 2 and is_vowel(w[-3]) and not w.endswith(("euve", "oeve", "ieve")):
return w[:-2] + w[-3] + "f"
if w.endswith("ze") and len(w) > 2 and is_vowel(w[-3]) and not w.endswith(("euze", "oeze", "ieze")):
return w[:-2] + w[-3] + "s"
if w.endswith("ve"):
return w[:-2] + "f"
if w.endswith("ze"):
return w[:-2] + "s"
if w.endswith("e") and len(w) > 2:
if not is_vowel(w[-2]) and w[-2] == w[-3]:
return w[:-2]
if len(w) > 3 and not is_vowel(w[-2]) and is_vowel(w[-3]) and w[-3] != "i" and not is_vowel(w[-4]):
return w[:-2] + w[-3] + w[-2]
return w[:-1]
return w
| bsd-3-clause |
ajdawson/eofs | lib/eofs/tests/utils.py | 1 | 3384 | """Utilities for testing the `eofs` package."""
# (c) Copyright 2013-2016 Andrew Dawson. All Rights Reserved.
#
# This file is part of eofs.
#
# eofs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# eofs is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with eofs. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function) # noqa
import numpy as np
try:
from iris.cube import Cube
except ImportError:
pass
def _close(a, b, rtol=1e-05, atol=1e-08):
"""Check if two values are close."""
return abs(a - b) <= (atol + rtol * abs(b))
def __tomasked(*args):
"""Convert cdms2 variables or iris cubes to masked arrays.
The conversion is safe, so if non-variables/cubes are passed they
are just returned.
"""
def __asma(a):
try:
if type(a) is Cube:
# Retrieve the data from the cube.
a = a.data
except NameError:
pass
try:
# Retrieve data from cdms variable.
a = a.asma()
except AttributeError:
# The input is already an array or masked array, either extracted
# from an iris cube, or was like that to begin with.
pass
return a
return [__asma(a) for a in args]
def error(a, b):
"""Compute the error between two arrays.
Computes the RMSD normalized by the range of the second input.
"""
a, b = __tomasked(a, b)
return np.sqrt((a - b)**2).mean() / (np.max(b) - np.min(b))
def sign_adjustments(eofset, refeofset):
"""Sign adjustments for EOFs/PCs.
Create a matrix of sign weights used for adjusting the sign of a set
of EOFs or PCs to the sign of a reference set.
The first dimension is assumed to be modes.
**Arguments:**
*eofset*
Set of EOFs.
*refeofset*
Reference set of EOFs.
"""
if eofset.shape != refeofset.shape:
raise ValueError('input set has different shape from reference set')
eofset, refeofset = __tomasked(eofset, refeofset)
nmodes = eofset.shape[0]
signs = np.ones([nmodes])
shape = [nmodes] + [1] * (eofset.ndim - 1)
eofset = eofset.reshape([nmodes, np.prod(eofset.shape[1:], dtype=np.int)])
refeofset = refeofset.reshape([nmodes,
np.prod(refeofset.shape[1:],
dtype=np.int)])
for mode in range(nmodes):
i = 0
try:
while _close(eofset[mode, i], 0.) or \
_close(refeofset[mode, i], 0.) \
or np.ma.is_masked(eofset[mode, i]) or \
np.ma.is_masked(refeofset[mode, i]):
i += 1
except IndexError:
i = 0
if np.sign(eofset[mode, i]) != np.sign(refeofset[mode, i]):
signs[mode] = -1
return signs.reshape(shape)
if __name__ == '__main__':
pass
| gpl-3.0 |
fusionapp/documint | documint/extproc/neon.py | 1 | 2744 | from functools import partial
from tempfile import mkstemp
from twisted.python.filepath import FilePath
from documint.errors import RemoteExternalProcessError
from documint.extproc.common import getProcessOutput, sanitizePaths, which
_neonBinary = partial(which, 'clj-neon')
def failingPDFSign(*a, **kw):
"""
Fail to sign anything.
"""
raise RemoteExternalProcessError('PDF signing is not correctly configured')
def signPDF(data, keystorePath, keystorePassword, reason, location,
signaturePage=None, fields=None, privateKeyPassword=None,
imagePath=None, rectangle=None):
"""
Digitally sign a PDF.
@param data: Unsigned PDF bytes.
@type data: L{str}
@param keystorePath: The path to the Java Keystore.
@type keystorePath: L{twisted.python.filepath.FilePath}
@param keystorePassword: The Java Keystore password.
@type keystorePassword: L{str}
@param reason: The reason for signing the PDF.
@type reason: L{str}
@param location: The location the PDF was signed.
@type location: L{str}
@param signaturePage: Path to signature page.
@type signaturePage: L{FilePath} or L{None}
@param fields: Mapping of signature page field names and values.
@type fields: L{dict}
@param privateKeyPassword: The password for the private key contained in
the Java Keystore.
@type privateKeyPassword: L{str} or L{None}
@param imagePath: The path to an image to stamp on the PDF.
@type imagePath: L{twisted.python.filepath.FilePath}
@param rectangle: The size of the signature rectangle. eg:
[LX1,LY1,UX2,UY2]
@type rectangle: L{list} of L{str}
@return: A deferred resulting in the signed PDF content as a byte string or
a L{diamond.error.ExternalProcessError}.
"""
tempPath = FilePath(mkstemp()[1])
tempPath.setContent(data)
def _cleanup(result):
tempPath.remove()
return result
keystorePath, inputPath = sanitizePaths([keystorePath, tempPath])
args = [inputPath,
'-',
keystorePath,
'--keystore-pass', keystorePassword,
'--reason', reason,
'--location', location]
if privateKeyPassword:
args.extend(['--password', privateKeyPassword])
if imagePath:
args.extend(['--signature-image', imagePath])
if rectangle:
args.extend(['--signature-rect', ','.join(rectangle)])
if signaturePage:
args.extend(['--signature-page', signaturePage.path])
if fields:
for k, v in fields.iteritems():
args.extend(['--field', '%s:%s' % (k, v)])
d = getProcessOutput(_neonBinary(), args)
d.addBoth(_cleanup)
return d
| mit |
hmit/livestreamer | src/livestreamer/plugins/tga.py | 31 | 2354 | #coding: utf-8
import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.plugin.api.utils import parse_query
from livestreamer.stream import HLSStream, HTTPStream
CHANNEL_INFO_URL = "http://api.plu.cn/tga/streams/%s"
STREAM_INFO_URL = "http://info.zb.qq.com/?cnlid=%d&cmd=2&stream=%d&system=1&sdtfrom=113"
_url_re = re.compile("http://star\.longzhu\.(?:tv|com)/(m\/)?(?P<domain>[a-z0-9]+)");
_channel_schema = validate.Schema(
{
"data" : validate.any(None, {
"channel" : validate.any(None, {
"id" : validate.all(
validate.text,
validate.transform(int)
),
"vid" : int
})
})
},
validate.get("data")
);
_qq_schema = validate.Schema({
validate.optional("playurl"): validate.url(scheme="http")
},
validate.get("playurl")
)
STREAM_WEIGHTS = {
"middle": 540,
"source": 1080
}
class Tga(Plugin):
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
@classmethod
def stream_weight(cls, stream):
if stream in STREAM_WEIGHTS:
return STREAM_WEIGHTS[stream], "tga"
return Plugin.stream_weight(stream)
def _get_channel_id(self, domain):
channel_info = http.get(CHANNEL_INFO_URL % str(domain))
info = http.json(channel_info, schema=_channel_schema)
if info is None:
return False
cnid = info['channel']['vid']
if cnid <= 0:
return False
return cnid
def _get_qq_stream_url(self, cnid, weight = 1):
qq_stream_url = http.get(STREAM_INFO_URL % (int(cnid), int(weight)));
qq_info = http.json(qq_stream_url, schema=_qq_schema)
return qq_info;
def _get_streams(self):
match = _url_re.match(self.url);
domain = match.group('domain')
cnid = self._get_channel_id(domain);
if cnid == False:
return;
flash_stream = HTTPStream(self.session, self._get_qq_stream_url(cnid, 1))
if flash_stream:
yield "live", flash_stream
mobile_stream = HLSStream(self.session, self._get_qq_stream_url(cnid, 2))
if mobile_stream:
yield "live_http", mobile_stream
__plugin__ = Tga
| bsd-2-clause |
charbeljc/server-tools | fetchmail_attach_from_folder/match_algorithm/email_exact.py | 54 | 2397 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013 Therp BV (<http://therp.nl>)
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from .base import base
from openerp.tools.safe_eval import safe_eval
from openerp.tools.mail import email_split
class email_exact(base):
'''Search for exactly the mailadress as noted in the email'''
name = 'Exact mailadress'
required_fields = ['model_field', 'mail_field']
def _get_mailaddresses(self, conf, mail_message):
mailaddresses = []
fields = conf.mail_field.split(',')
for field in fields:
if field in mail_message:
mailaddresses += email_split(mail_message[field])
return [addr.lower() for addr in mailaddresses]
def _get_mailaddress_search_domain(
self, conf, mail_message, operator='=', values=None):
mailaddresses = values or self._get_mailaddresses(
conf, mail_message)
if not mailaddresses:
return [(0, '=', 1)]
search_domain = ((['|'] * (len(mailaddresses) - 1)) + [
(conf.model_field, operator, addr) for addr in mailaddresses] +
safe_eval(conf.domain or '[]'))
return search_domain
def search_matches(self, cr, uid, conf, mail_message, mail_message_org):
conf_model = conf.pool.get(conf.model_id.model)
search_domain = self._get_mailaddress_search_domain(conf, mail_message)
return conf_model.search(
cr, uid, search_domain, order=conf.model_order)
| agpl-3.0 |
endlessm/chromium-browser | third_party/catapult/third_party/pyasn1_modules/pyasn1_modules/rfc1155.py | 24 | 2683 | #
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pyasn1/license.html
#
# SNMPv1 message syntax
#
# ASN.1 source from:
# http://www.ietf.org/rfc/rfc1155.txt
#
# Sample captures from:
# http://wiki.wireshark.org/SampleCaptures/
#
from pyasn1.type import constraint
from pyasn1.type import namedtype
from pyasn1.type import tag
from pyasn1.type import univ
class ObjectName(univ.ObjectIdentifier):
pass
class SimpleSyntax(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('number', univ.Integer()),
namedtype.NamedType('string', univ.OctetString()),
namedtype.NamedType('object', univ.ObjectIdentifier()),
namedtype.NamedType('empty', univ.Null())
)
class IpAddress(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0)
)
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(
4, 4
)
class NetworkAddress(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('internet', IpAddress())
)
class Counter(univ.Integer):
tagSet = univ.Integer.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 1)
)
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
0, 4294967295
)
class Gauge(univ.Integer):
tagSet = univ.Integer.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
)
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
0, 4294967295
)
class TimeTicks(univ.Integer):
tagSet = univ.Integer.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 3)
)
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
0, 4294967295
)
class Opaque(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 4)
)
class ApplicationSyntax(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('address', NetworkAddress()),
namedtype.NamedType('counter', Counter()),
namedtype.NamedType('gauge', Gauge()),
namedtype.NamedType('ticks', TimeTicks()),
namedtype.NamedType('arbitrary', Opaque())
)
class ObjectSyntax(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('simple', SimpleSyntax()),
namedtype.NamedType('application-wide', ApplicationSyntax())
)
| bsd-3-clause |
fsmi/odie-server | db/odie.py | 3 | 1337 | #! /usr/bin/env python3
import config
from odie import sqla, Column
class OrderDocument(sqla.Model):
__tablename__ = 'order_documents'
__table_args__ = config.odie_table_args
index = Column(sqla.Integer, primary_key=True)
order_id = Column(sqla.Integer, sqla.ForeignKey('odie.orders.id'), primary_key=True)
document_id = Column(sqla.ForeignKey('documents.documents.id', ondelete='CASCADE'), primary_key=True)
order = sqla.relationship('Order', back_populates='items')
document = sqla.relationship('Document', lazy='joined')
class Order(sqla.Model):
__tablename__ = 'orders'
__table_args__ = config.odie_table_args
id = Column(sqla.Integer, primary_key=True)
name = Column(sqla.String(256))
creation_time = Column(sqla.DateTime(timezone=True), server_default=sqla.func.now())
items = sqla.relationship('OrderDocument', cascade='all', order_by=OrderDocument.index, lazy='subquery',
back_populates='order')
def __init__(self, name, document_ids, creation_time=None):
self.name = name
self.creation_time = creation_time
for idx, doc in enumerate(document_ids):
OrderDocument(order=self, document_id=doc, index=idx)
@property
def documents(self):
return [item.document for item in self.items]
| mit |
luhanhan/horizon | horizon/test/tests/exceptions.py | 54 | 1695 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.encoding import force_text
from horizon import exceptions
from horizon.test import helpers as test
class HandleTests(test.TestCase):
def test_handle_translated(self):
translated_unicode = u'\u30b3\u30f3\u30c6\u30ca\u30fc\u304c' \
u'\u7a7a\u3067\u306f\u306a\u3044\u305f' \
u'\u3081\u3001\u524a\u9664\u3067\u304d' \
u'\u307e\u305b\u3093\u3002'
# Japanese translation of:
# 'Because the container is not empty, it can not be deleted.'
expected = ['error', force_text(translated_unicode), '']
req = self.request
req.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
try:
raise exceptions.Conflict(translated_unicode)
except exceptions.Conflict:
exceptions.handle(req)
# The real test here is to make sure the handle method doesn't throw a
# UnicodeEncodeError, but making sure the message is correct could be
# useful as well.
self.assertItemsEqual(req.horizon['async_messages'], [expected])
| apache-2.0 |
proversity-org/edx-platform | lms/djangoapps/courseware/tests/test_microsites.py | 4 | 14676 | """
Tests related to the Site Configuration feature
"""
import pytest
from bs4 import BeautifulSoup
from contextlib import contextmanager
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from mock import patch
from nose.plugins.attrib import attr
from six import text_type
from course_modes.models import CourseMode
from courseware.tests.helpers import LoginEnrollmentTestCase
from xmodule.course_module import CATALOG_VISIBILITY_CATALOG_AND_ABOUT, CATALOG_VISIBILITY_NONE
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
@attr(shard=1)
class TestSites(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
This is testing of the Site Configuration feature
"""
STUDENT_INFO = [('view@test.com', 'foo'), ('view2@test.com', 'foo')]
ENABLED_SIGNALS = ['course_published']
@classmethod
def setUpClass(cls):
super(TestSites, cls).setUpClass()
cls.course = CourseFactory.create(
display_name='Robot_Super_Course',
org='TestSiteX',
emit_signals=True,
)
cls.chapter0 = ItemFactory.create(parent_location=cls.course.location, display_name='Overview')
cls.chapter9 = ItemFactory.create(parent_location=cls.course.location, display_name='factory_chapter')
cls.section0 = ItemFactory.create(parent_location=cls.chapter0.location, display_name='Welcome')
cls.section9 = ItemFactory.create(parent_location=cls.chapter9.location, display_name='factory_section')
cls.course_outside_site = CourseFactory.create(
display_name='Robot_Course_Outside_Site',
org='FooX',
emit_signals=True,
)
# have a course which explicitly sets visibility in catalog to False
cls.course_hidden_visibility = CourseFactory.create(
display_name='Hidden_course',
org='TestSiteX',
catalog_visibility=CATALOG_VISIBILITY_NONE,
emit_signals=True,
)
# have a course which explicitly sets visibility in catalog and about to true
cls.course_with_visibility = CourseFactory.create(
display_name='visible_course',
org='TestSiteX',
course="foo",
catalog_visibility=CATALOG_VISIBILITY_CATALOG_AND_ABOUT,
emit_signals=True,
)
def setup_users(self):
# Create student accounts and activate them.
for i in range(len(self.STUDENT_INFO)):
email, password = self.STUDENT_INFO[i]
username = 'u{0}'.format(i)
self.create_account(username, email, password)
self.activate_user(email)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_site_anonymous_homepage_content(self):
"""
Verify that the homepage, when accessed via a Site domain, returns
HTML that reflects the Site branding elements
"""
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
# assert various branding definitions on this Site
# as per the configuration and Site overrides
self.assertContains(resp, 'This is a Test Site Overlay') # Overlay test message
self.assertContains(resp, 'test_site/images/header-logo.png') # logo swap
self.assertContains(resp, 'test_site/css/test_site') # css override
self.assertContains(resp, 'Test Site') # page title
# assert that test course display name is visible
self.assertContains(resp, 'Robot_Super_Course')
# assert that test course with 'visible_in_catalog' to True is showing up
self.assertContains(resp, 'visible_course')
# assert that test course that is outside current configured site is not visible
self.assertNotContains(resp, 'Robot_Course_Outside_Site')
# assert that a course that has visible_in_catalog=False is not visible
self.assertNotContains(resp, 'Hidden_course')
# assert that footer template has been properly overriden on homepage
self.assertContains(resp, 'This is a Test Site footer')
# assert that the edX partners section is not in the HTML
self.assertNotContains(resp, '<section class="university-partners university-partners2x6">')
# assert that the edX partners tag line is not in the HTML
self.assertNotContains(resp, 'Explore free courses from')
def test_no_configuration_anonymous_homepage_content(self):
"""
Make sure we see the right content on the homepage if there is no site configuration defined.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
# assert various branding definitions on this Site ARE NOT VISIBLE
self.assertNotContains(resp, 'This is a Test Site Overlay') # Overlay test message
self.assertNotContains(resp, 'test_site/images/header-logo.png') # logo swap
self.assertNotContains(resp, 'test_site/css/test_site') # css override
self.assertNotContains(resp, '<title>Test Site</title>') # page title
# assert that test course display name IS NOT VISIBLE
self.assertNotContains(resp, 'Robot_Super_Course')
# assert that test course that is outside site IS VISIBLE
self.assertContains(resp, 'Robot_Course_Outside_Site')
# assert that footer template has been properly overriden on homepage
self.assertNotContains(resp, 'This is a Test Site footer')
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
@pytest.mark.django111_expected_failure
def test_site_homepage_course_max(self):
"""
Verify that the number of courses displayed on the homepage honors
the HOMEPAGE_COURSE_MAX setting.
"""
@contextmanager
def homepage_course_max_site_config(limit):
"""Temporarily set the microsite HOMEPAGE_COURSE_MAX setting to desired value."""
with patch.dict(settings.MICROSITE_CONFIGURATION, {
'test_site': dict(
settings.MICROSITE_CONFIGURATION['test_site'],
HOMEPAGE_COURSE_MAX=limit,
)
}):
yield
def assert_displayed_course_count(response, expected_count):
"""Assert that the number of courses displayed matches the expectation."""
soup = BeautifulSoup(response.content, 'html.parser')
courses = soup.find_all(class_='course')
self.assertEqual(len(courses), expected_count)
# By default the number of courses on the homepage is not limited.
# We should see both courses and no link to all courses.
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
assert_displayed_course_count(resp, 2)
self.assertNotContains(resp, 'View all Courses')
# With the limit set to 5, we should still see both courses and no link to all courses.
with homepage_course_max_site_config(5):
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
assert_displayed_course_count(resp, 2)
self.assertNotContains(resp, 'View all Courses')
# With the limit set to 2, we should still see both courses and no link to all courses.
with homepage_course_max_site_config(2):
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
assert_displayed_course_count(resp, 2)
self.assertNotContains(resp, 'View all Courses')
# With the limit set to 1, we should only see one course.
# We should also see the link to all courses.
with homepage_course_max_site_config(1):
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
assert_displayed_course_count(resp, 1)
self.assertContains(resp, 'View all Courses')
# If no site configuration is set, the limit falls back to settings.HOMEPAGE_COURSE_MAX.
with override_settings(HOMEPAGE_COURSE_MAX=1):
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
assert_displayed_course_count(resp, 1)
self.assertContains(resp, 'View all Courses')
# Site configuration takes precedence over settings when both are set.
with homepage_course_max_site_config(2), override_settings(HOMEPAGE_COURSE_MAX=1):
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
assert_displayed_course_count(resp, 2)
self.assertNotContains(resp, 'View all Courses')
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_site_anonymous_copyright_content(self):
"""
Verify that the copyright, when accessed via a Site domain, returns
the expected 200 response
"""
resp = self.client.get('/copyright', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
self.assertContains(resp, 'This is a copyright page for an Open edX site.')
def test_not_site_anonymous_copyright_content(self):
"""
Verify that the copyright page does not exist if we are not in a configured site.
"""
resp = self.client.get('/copyright')
self.assertEqual(resp.status_code, 404)
def test_no_redirect_on_homepage_when_no_enrollments(self):
"""
Verify that a user going to homepage will not redirect if he/she has no course enrollments
"""
self.setup_users()
email, password = self.STUDENT_INFO[0]
self.login(email, password)
resp = self.client.get(reverse('root'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEquals(resp.status_code, 200)
def test_no_redirect_on_homepage_when_has_enrollments(self):
"""
Verify that a user going to homepage will not redirect to dashboard if he/she has
a course enrollment
"""
self.setup_users()
email, password = self.STUDENT_INFO[0]
self.login(email, password)
self.enroll(self.course, True)
resp = self.client.get(reverse('root'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEquals(resp.status_code, 200)
def test_site_course_enrollment(self):
"""
Enroll user in a course scoped in a Site and one course outside of a Site
and make sure that they are only visible in the right Dashboards
"""
self.setup_users()
email, password = self.STUDENT_INFO[1]
self.login(email, password)
self.enroll(self.course, True)
self.enroll(self.course_outside_site, True)
# Access the site dashboard and make sure the right courses appear
resp = self.client.get(reverse('dashboard'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertContains(resp, 'Robot_Super_Course')
self.assertNotContains(resp, 'Robot_Course_Outside_Site')
# Now access the non-site dashboard and make sure the right courses appear
resp = self.client.get(reverse('dashboard'))
self.assertNotContains(resp, 'Robot_Super_Course')
self.assertContains(resp, 'Robot_Course_Outside_Site')
def test_site_course_custom_tabs(self):
"""
Enroll user in a course scoped in a Site and make sure that
template with tabs is overridden
"""
self.setup_users()
email, password = self.STUDENT_INFO[1]
self.login(email, password)
self.enroll(self.course, True)
resp = self.client.get(reverse('courseware', args=[text_type(self.course.id)]),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertContains(resp, 'Test Site Tab:')
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_visible_about_page_settings(self):
"""
Make sure the Site is honoring the visible_about_page permissions that is
set in configuration
"""
url = reverse('about_course', args=[text_type(self.course_with_visibility.id)])
resp = self.client.get(url, HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
url = reverse('about_course', args=[text_type(self.course_hidden_visibility.id)])
resp = self.client.get(url, HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 404)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_paid_course_registration(self):
"""
Make sure that Site overrides on the ENABLE_SHOPPING_CART and
ENABLE_PAID_COURSE_ENROLLMENTS are honored
"""
course_mode = CourseMode(
course_id=self.course_with_visibility.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10,
)
course_mode.save()
# first try on the non site, which
# should pick up the global configuration (where ENABLE_PAID_COURSE_REGISTRATIONS = False)
url = reverse('about_course', args=[text_type(self.course_with_visibility.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enroll in {}".format(self.course_with_visibility.id.course), resp.content)
self.assertNotIn("Add {} to Cart ($10)".format(self.course_with_visibility.id.course), resp.content)
# now try on the site
url = reverse('about_course', args=[text_type(self.course_with_visibility.id)])
resp = self.client.get(url, HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("Enroll in {}".format(self.course_with_visibility.id.course), resp.content)
self.assertIn("Add {} to Cart <span>($10 USD)</span>".format(
self.course_with_visibility.id.course
), resp.content)
self.assertIn('$("#add_to_cart_post").click', resp.content)
| agpl-3.0 |
artefactual/archivematica-history | src/archivematicaCommon/lib/externals/pyes/exceptions.py | 2 | 3109 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .utils import EqualityComparableUsingAttributeDictionary
__author__ = 'Alberto Paro'
__all__ = ['NoServerAvailable',
"QueryError",
"NotFoundException",
"AlreadyExistsException",
"IndexAlreadyExistsException",
"IndexMissingException",
"SearchPhaseExecutionException",
"InvalidQuery",
"InvalidParameterQuery",
"InvalidParameter",
"QueryParameterError",
"ScriptFieldsError",
"ReplicationShardOperationFailedException",
"ClusterBlockException",
"MapperParsingException",
"ElasticSearchException",
'ReduceSearchPhaseException',
"VersionConflictEngineException",
'DocumentAlreadyExistsEngineException',
"DocumentAlreadyExistsException",
"TypeMissingException",
"BulkOperationException"
]
class NoServerAvailable(Exception):
pass
class InvalidQuery(Exception):
pass
class InvalidParameterQuery(InvalidQuery):
pass
class QueryError(Exception):
pass
class QueryParameterError(Exception):
pass
class ScriptFieldsError(Exception):
pass
class InvalidParameter(Exception):
pass
class ElasticSearchException(Exception):
"""Base class of exceptions raised as a result of parsing an error return
from ElasticSearch.
An exception of this class will be raised if no more specific subclass is
appropriate.
"""
def __init__(self, error, status=None, result=None, request=None):
super(ElasticSearchException, self).__init__(error)
self.status = status
self.result = result
self.request = request
class ElasticSearchIllegalArgumentException(ElasticSearchException):
pass
class IndexMissingException(ElasticSearchException):
pass
class NotFoundException(ElasticSearchException):
pass
class AlreadyExistsException(ElasticSearchException):
pass
class IndexAlreadyExistsException(AlreadyExistsException):
pass
class SearchPhaseExecutionException(ElasticSearchException):
pass
class ReplicationShardOperationFailedException(ElasticSearchException):
pass
class ClusterBlockException(ElasticSearchException):
pass
class MapperParsingException(ElasticSearchException):
pass
class ReduceSearchPhaseException(ElasticSearchException):
pass
class VersionConflictEngineException(ElasticSearchException):
pass
class DocumentAlreadyExistsEngineException(ElasticSearchException):
pass
class DocumentAlreadyExistsException(ElasticSearchException):
pass
class TypeMissingException(ElasticSearchException):
pass
class BulkOperationException(ElasticSearchException, EqualityComparableUsingAttributeDictionary):
def __init__(self, errors, bulk_result):
super(BulkOperationException, self).__init__(
u"At least one operation in the bulk request has failed: %s" % errors)
self.errors = errors
self.bulk_result = bulk_result
| agpl-3.0 |
draugiskisprendimai/odoo | addons/sale/report/sale_report.py | 71 | 5981 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
class sale_report(osv.osv):
_name = "sale.report"
_description = "Sales Orders Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.datetime('Date Order', readonly=True), # TDE FIXME master: rename into date_order
'date_confirm': fields.date('Date Confirm', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'product_uom_qty': fields.float('# of Qty', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Price', readonly=True),
'delay': fields.float('Commitment Delay', digits=(16,2), readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'nbr': fields.integer('# of Lines', readonly=True), # TDE FIXME master: rename into nbr_lines
'state': fields.selection([
('cancel', 'Cancelled'),
('draft', 'Draft'),
('confirmed', 'Confirmed'),
('exception', 'Exception'),
('done', 'Done')], 'Order Status', readonly=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_order = 'date desc'
def _select(self):
select_str = """
WITH currency_rate (currency_id, rate, date_start, date_end) AS (
SELECT r.currency_id, r.rate, r.name AS date_start,
(SELECT name FROM res_currency_rate r2
WHERE r2.name > r.name AND
r2.currency_id = r.currency_id
ORDER BY r2.name ASC
LIMIT 1) AS date_end
FROM res_currency_rate r
)
SELECT min(l.id) as id,
l.product_id as product_id,
t.uom_id as product_uom,
sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty,
sum(l.product_uom_qty * l.price_unit / cr.rate * (100.0-l.discount) / 100.0) as price_total,
count(*) as nbr,
s.date_order as date,
s.date_confirm as date_confirm,
s.partner_id as partner_id,
s.user_id as user_id,
s.company_id as company_id,
extract(epoch from avg(date_trunc('day',s.date_confirm)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
l.state,
t.categ_id as categ_id,
s.pricelist_id as pricelist_id,
s.project_id as analytic_account_id,
s.section_id as section_id
"""
return select_str
def _from(self):
from_str = """
sale_order_line l
join sale_order s on (l.order_id=s.id)
left join product_product p on (l.product_id=p.id)
left join product_template t on (p.product_tmpl_id=t.id)
left join product_uom u on (u.id=l.product_uom)
left join product_uom u2 on (u2.id=t.uom_id)
left join product_pricelist pp on (s.pricelist_id = pp.id)
join currency_rate cr on (cr.currency_id = pp.currency_id and
cr.date_start <= coalesce(s.date_order, now()) and
(cr.date_end is null or cr.date_end > coalesce(s.date_order, now())))
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY l.product_id,
l.order_id,
t.uom_id,
t.categ_id,
s.date_order,
s.date_confirm,
s.partner_id,
s.user_id,
s.company_id,
l.state,
s.pricelist_id,
s.project_id,
s.section_id
"""
return group_by_str
def init(self, cr):
# self._table = sale_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM ( %s )
%s
)""" % (self._table, self._select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
abomyi/django | tests/template_tests/syntax_tests/test_cycle.py | 104 | 5516 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class CycleTagTests(SimpleTestCase):
@setup({'cycle01': '{% cycle a %}'})
def test_cycle01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle01')
@setup({'cycle05': '{% cycle %}'})
def test_cycle05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle05')
@setup({'cycle06': '{% cycle a %}'})
def test_cycle06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle06')
@setup({'cycle07': '{% cycle a,b,c as foo %}{% cycle bar %}'})
def test_cycle07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle07')
@setup({'cycle10': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}"})
def test_cycle10(self):
output = self.engine.render_to_string('cycle10')
self.assertEqual(output, 'ab')
@setup({'cycle11': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle11(self):
output = self.engine.render_to_string('cycle11')
self.assertEqual(output, 'abc')
@setup({'cycle12': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle12(self):
output = self.engine.render_to_string('cycle12')
self.assertEqual(output, 'abca')
@setup({'cycle13': "{% for i in test %}{% cycle 'a' 'b' %}{{ i }},{% endfor %}"})
def test_cycle13(self):
output = self.engine.render_to_string('cycle13', {'test': list(range(5))})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle14': '{% cycle one two as foo %}{% cycle foo %}'})
def test_cycle14(self):
output = self.engine.render_to_string('cycle14', {'one': '1', 'two': '2'})
self.assertEqual(output, '12')
@setup({'cycle15': '{% for i in test %}{% cycle aye bee %}{{ i }},{% endfor %}'})
def test_cycle15(self):
output = self.engine.render_to_string('cycle15', {'test': list(range(5)), 'aye': 'a', 'bee': 'b'})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle16': '{% cycle one|lower two as foo %}{% cycle foo %}'})
def test_cycle16(self):
output = self.engine.render_to_string('cycle16', {'one': 'A', 'two': '2'})
self.assertEqual(output, 'a2')
@setup({'cycle17': "{% cycle 'a' 'b' 'c' as abc silent %}"
"{% cycle abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle17(self):
output = self.engine.render_to_string('cycle17')
self.assertEqual(output, '')
@setup({'cycle18': "{% cycle 'a' 'b' 'c' as foo invalid_flag %}"})
def test_cycle18(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle18')
@setup({'cycle19': "{% cycle 'a' 'b' as silent %}{% cycle silent %}"})
def test_cycle19(self):
output = self.engine.render_to_string('cycle19')
self.assertEqual(output, 'ab')
@setup({'cycle20': '{% cycle one two as foo %} & {% cycle foo %}'})
def test_cycle20(self):
output = self.engine.render_to_string('cycle20', {'two': 'C & D', 'one': 'A & B'})
self.assertEqual(output, 'A & B & C & D')
@setup({'cycle21': '{% filter force_escape %}'
'{% cycle one two as foo %} & {% cycle foo %}{% endfilter %}'})
def test_cycle21(self):
output = self.engine.render_to_string('cycle21', {'two': 'C & D', 'one': 'A & B'})
self.assertEqual(output, 'A &amp; B & C &amp; D')
@setup({'cycle22': "{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{{ x }}{% endfor %}"})
def test_cycle22(self):
output = self.engine.render_to_string('cycle22', {'values': [1, 2, 3, 4]})
self.assertEqual(output, '1234')
@setup({'cycle23': "{% for x in values %}"
"{% cycle 'a' 'b' 'c' as abc silent %}{{ abc }}{{ x }}{% endfor %}"})
def test_cycle23(self):
output = self.engine.render_to_string('cycle23', {'values': [1, 2, 3, 4]})
self.assertEqual(output, 'a1b2c3a4')
@setup({
'cycle24': "{% for x in values %}"
"{% cycle 'a' 'b' 'c' as abc silent %}{% include 'included-cycle' %}{% endfor %}",
'included-cycle': '{{ abc }}',
})
def test_cycle24(self):
output = self.engine.render_to_string('cycle24', {'values': [1, 2, 3, 4]})
self.assertEqual(output, 'abca')
@setup({'cycle25': '{% cycle a as abc %}'})
def test_cycle25(self):
output = self.engine.render_to_string('cycle25', {'a': '<'})
self.assertEqual(output, '<')
@setup({'cycle26': '{% cycle a b as ab %}{% cycle ab %}'})
def test_cycle26(self):
output = self.engine.render_to_string('cycle26', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@setup({'cycle27': '{% autoescape off %}{% cycle a b as ab %}{% cycle ab %}{% endautoescape %}'})
def test_cycle27(self):
output = self.engine.render_to_string('cycle27', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@setup({'cycle28': '{% cycle a|safe b as ab %}{% cycle ab %}'})
def test_cycle28(self):
output = self.engine.render_to_string('cycle28', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
| bsd-3-clause |
PetarV-/facejack | get_face/face_detect_cv3.py | 1 | 4500 | import cv2
import operator
import numpy as np
import pickle
import requests
import threading
myl = threading.Lock()
def dispact_and_update(img, hack, base_im, x, y, w, h):
try:
myurl = "http://facejack.westeurope.cloudapp.azure.com:5001/imsend"
headers = {
'content-type': "application/x-www-form-urlencoded",
'cache-control': "no-cache"
}
r = requests.post(url=myurl, data=img, headers=headers, params={'hack': str(hack)}).json()
reply = 'authentication' in r and r['authentication'] == "ALLOWED"
disp_face = cv2.resize(base_im[y:y + h, x:x + w], (224, 224), 0, 0, cv2.INTER_LANCZOS4)
if reply:
cv2.rectangle(disp_face, (0, 0), (222, 222), (0, 255, 0), 2)
else:
cv2.rectangle(disp_face, (0, 0), (222, 222), (0, 0, 255), 2)
cv2.imshow("Face", disp_face)
finally:
myl.release()
def push_to_server(img, hack, base_im, x, y, w, h):
t = threading.Thread(target=dispact_and_update, args=(img, hack, base_im, x, y, w, h))
t.setDaemon(True)
myl.acquire()
t.start()
def main():
cascPath = "get-face/haarcascade_frontalface_default.xml"
cascPath = "haarcascade_frontalface_default.xml"
# Create the haar cascade
hack = False
faceCascade = cv2.CascadeClassifier(cascPath)
# Read the image
vidcap = cv2.VideoCapture(0)
location_persistence_tolerance = 100
last_seen = None
while vidcap.isOpened():
q=False
retval, image= vidcap.read()
# image = cv2.flip(img, 1)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Detect faces in the image
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(60, 60)
# flags = cv2.CV_HAAR_SCALE_IMAGE
)
# print("Found {0} faces!".format(len(faces)))
red = (0, 0, 255)
green = (0, 255, 0)
# Draw a rectangle around the facesh
if len(faces) > 0:
# get largest face
(x, y, w, h) = max(faces, key=(lambda f: operator.itemgetter(2)(f)))
# check persistance
if last_seen:
lx, ly, lw, lh, count = last_seen
if abs(x - lx) < location_persistence_tolerance and abs(y - ly) < location_persistence_tolerance:
last_seen = x, y, w, h, count+1
if count > 20:
sub_face = cv2.resize(image[y:y + h, x:x + w], (224,224), 0, 0, cv2.INTER_LANCZOS4)
s_face = cv2.cvtColor(sub_face, cv2.COLOR_BGR2RGB)
cv2.imshow("Face", sub_face)
last_seen = x, y, w, h, 1
dat = pickle.dumps(s_face)
# print(dat)
push_to_server(dat, hack, image, x, y, w, h)
# r = requests.post(url = myurl, data=dat, headers=headers, params={'hack': str(hack)}).json()
#
# reply = 'authentication' in r and r['authentication'] == "ALLOWED"
# disp_face = cv2.resize(image[y:y + h, x:x + w], (224,224), 0, 0, cv2.INTER_LANCZOS4)
# if reply:
# cv2.rectangle(disp_face,(0,0), (222,222), (0,255,0), 2)
# else:
# cv2.rectangle(disp_face, (0, 0), (222, 222), (0,0,255), 2)
# cv2.imshow("Face", disp_face)
else:
last_seen= None
else:
last_seen = x, y, w, h, 1
cv2.rectangle(image, (x, y), (x + w, y + h), green, 2)
mx, my = x,y
for (x, y, w, h) in faces:
if mx ==x and my==y:
cv2.rectangle(image, (x, y), (x + w, y + h), green, 2)
else:
cv2.rectangle(image, (x, y), (x + w, y + h), red, 2)
# print(last_seen)
key_press = (cv2.waitKey(2) & 0xFF)
if key_press == ord('q'):
q = True
elif key_press == ord('h'):
hack = not hack
if hack:
# print("hack")
cv2.putText(image, 'HACK ON', (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (255,255,128) )
cv2.imshow("FaceJACK", image)
if q:
break
vidcap.release()
cv2.destroyAllWindows()
if __name__ == "__main__":
main() | mit |
moreati/django | django/utils/http.py | 285 | 9978 | from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import allow_lazy
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode,
urlparse,
)
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = str(":/?#[]@")
RFC3986_SUBDELIMS = str("!$&'()*+,;=")
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(quote(force_str(url), force_str(safe)))
urlquote = allow_lazy(urlquote, six.text_type)
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(quote_plus(force_str(url), force_str(safe)))
urlquote_plus = allow_lazy(urlquote_plus, six.text_type)
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(unquote(force_str(quoted_url)))
urlunquote = allow_lazy(urlunquote, six.text_type)
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(unquote_plus(force_str(quoted_url)))
urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type)
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return original_urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if six.PY2 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = force_bytes(s)
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
p1, p2 = urlparse(url1), urlparse(url2)
try:
o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme])
o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme])
return o1 == o2
except (ValueError, KeyError):
return False
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if url is not None:
url = url.strip()
if not url:
return False
# Chrome treats \ completely as /
url = url.replace('\\', '/')
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
url_info = urlparse(url)
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
return ((not url_info.netloc or url_info.netloc == host) and
(not url_info.scheme or url_info.scheme in ['http', 'https']))
| bsd-3-clause |
Zac-HD/home-assistant | homeassistant/components/sensor/systemmonitor.py | 6 | 6797 | """
Support for monitoring the local system.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.systemmonitor/
"""
import logging
import os
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_RESOURCES, STATE_OFF, STATE_ON, STATE_UNKNOWN, CONF_TYPE)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
REQUIREMENTS = ['psutil==5.1.3']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'disk_free': ['Disk Free', 'GiB', 'mdi:harddisk'],
'disk_use': ['Disk Use', 'GiB', 'mdi:harddisk'],
'disk_use_percent': ['Disk Use', '%', 'mdi:harddisk'],
'ipv4_address': ['IPv4 address', '', 'mdi:server-network'],
'ipv6_address': ['IPv6 address', '', 'mdi:server-network'],
'last_boot': ['Last Boot', '', 'mdi:clock'],
'load_15m': ['Average Load (15m)', '', 'mdi:memory'],
'load_1m': ['Average Load (1m)', '', 'mdi:memory'],
'load_5m': ['Average Load (5m)', '', 'mdi:memory'],
'memory_free': ['RAM Free', 'MiB', 'mdi:memory'],
'memory_use': ['RAM Use', 'MiB', 'mdi:memory'],
'memory_use_percent': ['RAM Use', '%', 'mdi:memory'],
'network_in': ['Received', 'MiB', 'mdi:server-network'],
'network_out': ['Sent', 'MiB', 'mdi:server-network'],
'packets_in': ['Packets received', ' ', 'mdi:server-network'],
'packets_out': ['Packets sent', ' ', 'mdi:server-network'],
'process': ['Process', ' ', 'mdi:memory'],
'processor_use': ['CPU Use', '%', 'mdi:memory'],
'since_last_boot': ['Since Last Boot', '', 'mdi:clock'],
'swap_free': ['Swap Free', 'GiB', 'mdi:harddisk'],
'swap_use': ['Swap Use', 'GiB', 'mdi:harddisk'],
'swap_use_percent': ['Swap Use', '%', 'mdi:harddisk'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_RESOURCES, default=['disk_use']):
vol.All(cv.ensure_list, [vol.Schema({
vol.Required(CONF_TYPE): vol.In(SENSOR_TYPES),
vol.Optional('arg'): cv.string,
})])
})
IO_COUNTER = {
'network_out': 0,
'network_in': 1,
'packets_out': 2,
'packets_in': 3,
}
IF_ADDRS = {
'ipv4_address': 0,
'ipv6_address': 1,
}
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the system monitor sensors."""
dev = []
for resource in config[CONF_RESOURCES]:
if 'arg' not in resource:
resource['arg'] = ''
dev.append(SystemMonitorSensor(resource[CONF_TYPE], resource['arg']))
add_devices(dev)
class SystemMonitorSensor(Entity):
"""Implementation of a system monitor sensor."""
def __init__(self, sensor_type, argument=''):
"""Initialize the sensor."""
self._name = '{} {}'.format(SENSOR_TYPES[sensor_type][0], argument)
self.argument = argument
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name.rstrip()
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Get the latest system information."""
import psutil
if self.type == 'disk_use_percent':
self._state = psutil.disk_usage(self.argument).percent
elif self.type == 'disk_use':
self._state = round(psutil.disk_usage(self.argument).used /
1024**3, 1)
elif self.type == 'disk_free':
self._state = round(psutil.disk_usage(self.argument).free /
1024**3, 1)
elif self.type == 'memory_use_percent':
self._state = psutil.virtual_memory().percent
elif self.type == 'memory_use':
self._state = round((psutil.virtual_memory().total -
psutil.virtual_memory().available) /
1024**2, 1)
elif self.type == 'memory_free':
self._state = round(psutil.virtual_memory().available / 1024**2, 1)
elif self.type == 'swap_use_percent':
self._state = psutil.swap_memory().percent
elif self.type == 'swap_use':
self._state = round(psutil.swap_memory().used / 1024**3, 1)
elif self.type == 'swap_free':
self._state = round(psutil.swap_memory().free / 1024**3, 1)
elif self.type == 'processor_use':
self._state = round(psutil.cpu_percent(interval=None))
elif self.type == 'process':
if any(self.argument in l.name() for l in psutil.process_iter()):
self._state = STATE_ON
else:
self._state = STATE_OFF
elif self.type == 'network_out' or self.type == 'network_in':
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
counter = counters[self.argument][IO_COUNTER[self.type]]
self._state = round(counter / 1024**2, 1)
else:
self._state = STATE_UNKNOWN
elif self.type == 'packets_out' or self.type == 'packets_in':
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
self._state = counters[self.argument][IO_COUNTER[self.type]]
else:
self._state = STATE_UNKNOWN
elif self.type == 'ipv4_address' or self.type == 'ipv6_address':
addresses = psutil.net_if_addrs()
if self.argument in addresses:
self._state = addresses[self.argument][IF_ADDRS[self.type]][1]
else:
self._state = STATE_UNKNOWN
elif self.type == 'last_boot':
self._state = dt_util.as_local(
dt_util.utc_from_timestamp(psutil.boot_time())
).date().isoformat()
elif self.type == 'since_last_boot':
self._state = dt_util.utcnow() - dt_util.utc_from_timestamp(
psutil.boot_time())
elif self.type == 'load_1m':
self._state = os.getloadavg()[0]
elif self.type == 'load_5m':
self._state = os.getloadavg()[1]
elif self.type == 'load_15m':
self._state = os.getloadavg()[2]
| apache-2.0 |
nathanielbecker/business-contacter-django-app | myproject/cookie_app/migrations/0030_historicalinitial_borr_list_page_historicalmore_data_page.py | 1 | 11726 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cookie_app', '0029_auto_20150130_0936'),
]
operations = [
migrations.CreateModel(
name='HistoricalInitial_Borr_List_Page',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, auto_created=True, blank=True)),
('AtoZ_ID', models.CharField(default='0000000', max_length=150, null=True, verbose_name='id')),
('BankName', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Lender Name')),
('Business_Name', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Company Name')),
('checkins', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Facebook Checkins')),
('Est_Rent_Annual_Expense', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Estimated Rent')),
('executivedetails', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Executive Name')),
('final_phone', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Validated Phone')),
('first_loan_date', models.CharField(default='0000000', max_length=150, null=True, verbose_name='First Loan Date')),
('Franchise', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Franchise')),
('InitialInterestRate', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Interest Rate')),
('last_loan_date', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Last Loan Date')),
('last_loan_end', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Last Loan End')),
('likes', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Facebook Likes')),
('link', models.URLField(default='0000000', max_length=150, null=True, verbose_name='Facebook Link')),
('final_employees', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Employees (est.)')),
('Main_Line_of_Business', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Industry')),
('Manufacturer', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Manufacturer')),
('NaicsDescription', models.CharField(default='0000000', max_length=150, null=True, verbose_name='NAICS Description')),
('num_loans', models.CharField(default='0000000', max_length=150, null=True, verbose_name='SBA Loans (2007+)')),
('ownsrentsindicator', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Own/Rent')),
('Physical_City', models.CharField(default='0000000', max_length=150, null=True, verbose_name='City')),
('Physical_State', models.CharField(default='0000000', max_length=150, null=True, verbose_name='State')),
('Physical_ZIP', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Zip')),
('rating', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Yelp Rating')),
('Revenue_Yr', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Revenue (est.)')),
('review_count', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Yelp Review Count')),
('Square_Footage', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Square Footage')),
('street3', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Address')),
('sum_loans', models.CharField(default='0000000', max_length=150, null=True, verbose_name='SBA Loan vol. (2007+)')),
('Website', models.URLField(default='0000000', max_length=150, null=True, verbose_name='Website')),
('X2013_Employees', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Employees (2013 est.)')),
('X2013_Revenue_Yr', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Revenue (2013 est.)')),
('final_yr_incorporated', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Year of Incorporation')),
('url', models.URLField(default='0000000', max_length=150, null=True, verbose_name='Yelp Link')),
('centile', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Loan Likelihood Score')),
('Created_id', models.IntegerField(db_index=True, null=True, blank=True)),
('FindMoreData', models.BooleanField(default=False, verbose_name='Find More Information')),
('FollowUp', models.BooleanField(default=False, verbose_name='Contact Business')),
('history_id', models.AutoField(serialize=False, primary_key=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(max_length=1, choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')])),
('history_user', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical initial_ borr_ list_ page',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='HistoricalMore_Data_Page',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, auto_created=True, blank=True)),
('FindMoreData', models.BooleanField(default=False, verbose_name='Find More Information')),
('FollowUp', models.BooleanField(default=False, verbose_name='Contact Business')),
('AtoZ_ID', models.CharField(default='0000000', max_length=150, null=True, verbose_name='id')),
('BankName', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Lender Name')),
('Business_Name', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Company Name')),
('checkins', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Facebook Checkins')),
('Est_Rent_Annual_Expense', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Estimated Rent')),
('executivedetails', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Executive Name')),
('final_phone', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Validated Phone')),
('first_loan_date', models.CharField(default='0000000', max_length=150, null=True, verbose_name='First Loan Date')),
('Franchise', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Franchise')),
('InitialInterestRate', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Interest Rate')),
('last_loan_date', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Last Loan Date')),
('last_loan_end', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Last Loan End')),
('likes', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Facebook Likes')),
('link', models.URLField(default='0000000', max_length=150, null=True, verbose_name='Facebook Link')),
('final_employees', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Employees (est.)')),
('Main_Line_of_Business', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Industry')),
('Manufacturer', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Manufacturer')),
('NaicsDescription', models.CharField(default='0000000', max_length=150, null=True, verbose_name='NAICS Description')),
('num_loans', models.CharField(default='0000000', max_length=150, null=True, verbose_name='SBA Loans (2007+)')),
('ownsrentsindicator', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Own/Rent')),
('Physical_City', models.CharField(default='0000000', max_length=150, null=True, verbose_name='City')),
('Physical_State', models.CharField(default='0000000', max_length=150, null=True, verbose_name='State')),
('Physical_ZIP', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Zip')),
('rating', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Yelp Rating')),
('Revenue_Yr', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Revenue (est.)')),
('review_count', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Yelp Review Count')),
('Square_Footage', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Square Footage')),
('street3', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Address')),
('sum_loans', models.CharField(default='0000000', max_length=150, null=True, verbose_name='SBA Loan vol. (2007+)')),
('Website', models.URLField(default='0000000', max_length=150, null=True, verbose_name='Website')),
('X2013_Employees', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Employees (2013 est.)')),
('X2013_Revenue_Yr', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Revenue (2013 est.)')),
('final_yr_incorporated', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Year of Incorporation')),
('url', models.URLField(default='0000000', max_length=150, null=True, verbose_name='Yelp Link')),
('centile', models.CharField(default='0000000', max_length=150, null=True, verbose_name='Loan Likelihood Score')),
('Created_id', models.IntegerField(db_index=True, null=True, blank=True)),
('history_id', models.AutoField(serialize=False, primary_key=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(max_length=1, choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')])),
('history_user', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical more_ data_ page',
},
bases=(models.Model,),
),
]
| apache-2.0 |
HarryElSuzio/ShaniXBMCWork | script.video.F4mProxy/lib/utils/pem.py | 92 | 3667 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
from .compat import *
import binascii
#This code is shared with tackpy (somewhat), so I'd rather make minimal
#changes, and preserve the use of a2b_base64 throughout.
def dePem(s, name):
"""Decode a PEM string into a bytearray of its payload.
The input must contain an appropriate PEM prefix and postfix
based on the input name string, e.g. for name="CERTIFICATE"::
-----BEGIN CERTIFICATE-----
MIIBXDCCAUSgAwIBAgIBADANBgkqhkiG9w0BAQUFADAPMQ0wCwYDVQQDEwRUQUNL
...
KoZIhvcNAQEFBQADAwA5kw==
-----END CERTIFICATE-----
The first such PEM block in the input will be found, and its
payload will be base64 decoded and returned.
"""
prefix = "-----BEGIN %s-----" % name
postfix = "-----END %s-----" % name
start = s.find(prefix)
if start == -1:
raise SyntaxError("Missing PEM prefix")
end = s.find(postfix, start+len(prefix))
if end == -1:
raise SyntaxError("Missing PEM postfix")
s = s[start+len("-----BEGIN %s-----" % name) : end]
retBytes = a2b_base64(s) # May raise SyntaxError
return retBytes
def dePemList(s, name):
"""Decode a sequence of PEM blocks into a list of bytearrays.
The input must contain any number of PEM blocks, each with the appropriate
PEM prefix and postfix based on the input name string, e.g. for
name="TACK BREAK SIG". Arbitrary text can appear between and before and
after the PEM blocks. For example::
Created by TACK.py 0.9.3 Created at 2012-02-01T00:30:10Z
-----BEGIN TACK BREAK SIG-----
ATKhrz5C6JHJW8BF5fLVrnQss6JnWVyEaC0p89LNhKPswvcC9/s6+vWLd9snYTUv
YMEBdw69PUP8JB4AdqA3K6Ap0Fgd9SSTOECeAKOUAym8zcYaXUwpk0+WuPYa7Zmm
SkbOlK4ywqt+amhWbg9txSGUwFO5tWUHT3QrnRlE/e3PeNFXLx5Bckg=
-----END TACK BREAK SIG-----
Created by TACK.py 0.9.3 Created at 2012-02-01T00:30:11Z
-----BEGIN TACK BREAK SIG-----
ATKhrz5C6JHJW8BF5fLVrnQss6JnWVyEaC0p89LNhKPswvcC9/s6+vWLd9snYTUv
YMEBdw69PUP8JB4AdqA3K6BVCWfcjN36lx6JwxmZQncS6sww7DecFO/qjSePCxwM
+kdDqX/9/183nmjx6bf0ewhPXkA0nVXsDYZaydN8rJU1GaMlnjcIYxY=
-----END TACK BREAK SIG-----
All such PEM blocks will be found, decoded, and return in an ordered list
of bytearrays, which may have zero elements if not PEM blocks are found.
"""
bList = []
prefix = "-----BEGIN %s-----" % name
postfix = "-----END %s-----" % name
while 1:
start = s.find(prefix)
if start == -1:
return bList
end = s.find(postfix, start+len(prefix))
if end == -1:
raise SyntaxError("Missing PEM postfix")
s2 = s[start+len(prefix) : end]
retBytes = a2b_base64(s2) # May raise SyntaxError
bList.append(retBytes)
s = s[end+len(postfix) : ]
def pem(b, name):
"""Encode a payload bytearray into a PEM string.
The input will be base64 encoded, then wrapped in a PEM prefix/postfix
based on the name string, e.g. for name="CERTIFICATE"::
-----BEGIN CERTIFICATE-----
MIIBXDCCAUSgAwIBAgIBADANBgkqhkiG9w0BAQUFADAPMQ0wCwYDVQQDEwRUQUNL
...
KoZIhvcNAQEFBQADAwA5kw==
-----END CERTIFICATE-----
"""
s1 = b2a_base64(b)[:-1] # remove terminating \n
s2 = ""
while s1:
s2 += s1[:64] + "\n"
s1 = s1[64:]
s = ("-----BEGIN %s-----\n" % name) + s2 + \
("-----END %s-----\n" % name)
return s
def pemSniff(inStr, name):
searchStr = "-----BEGIN %s-----" % name
return searchStr in inStr
| gpl-2.0 |
google-code/arisgames | zxing-master/cpp/scons/scons-local-2.0.0.final.0/SCons/Variables/BoolVariable.py | 34 | 3062 | """engine.SCons.Variables.BoolVariable
This file defines the option type for SCons implementing true/false values.
Usage example:
opts = Variables()
opts.Add(BoolVariable('embedded', 'build for an embedded system', 0))
...
if env['embedded'] == 1:
...
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/BoolVariable.py 5023 2010/06/14 22:05:46 scons"
__all__ = ['BoolVariable',]
import SCons.Errors
__true_strings = ('y', 'yes', 'true', 't', '1', 'on' , 'all' )
__false_strings = ('n', 'no', 'false', 'f', '0', 'off', 'none')
def _text2bool(val):
"""
Converts strings to True/False depending on the 'truth' expressed by
the string. If the string can't be converted, the original value
will be returned.
See '__true_strings' and '__false_strings' for values considered
'true' or 'false respectivly.
This is usable as 'converter' for SCons' Variables.
"""
lval = val.lower()
if lval in __true_strings: return True
if lval in __false_strings: return False
raise ValueError("Invalid value for boolean option: %s" % val)
def _validator(key, val, env):
"""
Validates the given value to be either '0' or '1'.
This is usable as 'validator' for SCons' Variables.
"""
if not env[key] in (True, False):
raise SCons.Errors.UserError(
'Invalid value for boolean option %s: %s' % (key, env[key]))
def BoolVariable(key, help, default):
"""
The input parameters describe a boolen option, thus they are
returned with the correct converter and validator appended. The
'help' text will by appended by '(yes|no) to show the valid
valued. The result is usable for input to opts.Add().
"""
return (key, '%s (yes|no)' % help, default,
_validator, _text2bool)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
2014c2g4/w16b_test | static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/base.py | 603 | 4652 | #!/usr/bin/env python
## https://bitbucket.org/pygame/pygame/raw/2383b8ab0e2273bc83c545ab9c18fee1f3459c64/pygame/base.py
'''Pygame core routines
Contains the core routines that are used by the rest of the
pygame modules. Its routines are merged directly into the pygame
namespace. This mainly includes the auto-initialization `init` and
`quit` routines.
There is a small module named `locals` that also gets merged into
this namespace. This contains all the constants needed by pygame.
Object constructors also get placed into this namespace, you can
call functions like `Rect` and `Surface` to create objects of
that type. As a convenience, you can import the members of
pygame.locals directly into your module's namespace with::
from pygame.locals import *
Most of the pygame examples do this if you'd like to take a look.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import atexit
import sys
#import SDL
_quitfunctions = []
class error(RuntimeError):
pass
def init():
'''Autoinitialize all imported pygame modules.
Initialize all imported pygame modules. Includes pygame modules
that are not part of the base modules (like font and image).
It does not raise exceptions, but instead silently counts which
modules have failed to init. The return argument contains a count
of the number of modules initialized, and the number of modules
that failed to initialize.
You can always initialize the modules you want by hand. The
modules that need it have an `init` and `quit` routine built in,
which you can call directly. They also have a `get_init` routine
which you can use to doublecheck the initialization. Note that
the manual `init` routines will raise an exception on error. Be
aware that most platforms require the display module to be
initialized before others. This `init` will handle that for you,
but if you initialize by hand, be aware of this constraint.
As with the manual `init` routines. It is safe to call this
`init` as often as you like.
:rtype: int, int
:return: (count_passed, count_failed)
'''
success = 0
fail = 0
#SDL.SDL_Init(SDL.SDL_INIT_EVENTTHREAD | SDL.SDL_INIT_TIMER)
if _video_autoinit():
success += 1
else:
fail += 1
for mod in sys.modules.values():
if hasattr(mod, '__PYGAMEinit__') and callable(mod.__PYGAMEinit__):
try:
mod.__PYGAMEinit__()
success += 1
except:
fail += 1
return success, fail
def register_quit(func):
'''Routine to call when pygame quits.
The given callback routine will be called when pygame is
quitting. Quit callbacks are served on a 'last in, first out'
basis.
'''
_quitfunctions.append(func)
def _video_autoquit():
if SDL.SDL_WasInit(SDL.SDL_INIT_VIDEO):
SDL.SDL_QuitSubSystem(SDL.SDL_INIT_VIDEO)
def _video_autoinit():
return 1
#if not SDL.SDL_WasInit(SDL.SDL_INIT_VIDEO):
# SDL.SDL_InitSubSystem(SDL.SDL_INIT_VIDEO)
# SDL.SDL_EnableUNICODE(1)
#return 1
def _atexit_quit():
while _quitfunctions:
func = _quitfunctions.pop()
func()
_video_autoquit()
#SDL.SDL_Quit()
def get_sdl_version():
'''Get the version of the linked SDL runtime.
:rtype: int, int, int
:return: major, minor, patch
'''
#v = SDL.SDL_Linked_Version()
#return v.major, v.minor, v.patch
return None, None, None
def quit():
'''Uninitialize all pygame modules.
Uninitialize all pygame modules that have been initialized. Even
if you initialized the module by hand, this `quit` will
uninitialize it for you.
All the pygame modules are uninitialized automatically when your
program exits, so you will usually not need this routine. If you
program plans to keep running after it is done with pygame, then
would be a good time to make this call.
'''
_atexit_quit()
def get_error():
'''Get current error message.
SDL maintains an internal current error message. This message is
usually given to you when an SDL related exception occurs, but
sometimes you may want to call this directly yourself.
:rtype: str
'''
#return SDL.SDL_GetError()
return ''
def _rgba_from_obj(obj):
if not type(obj) in (tuple, list):
return None
if len(obj) == 1:
return _rgba_from_obj(obj[0])
elif len(obj) == 3:
return (int(obj[0]), int(obj[1]), int(obj[2]), 255)
elif len(obj) == 4:
return obj
else:
return None
atexit.register(_atexit_quit)
| gpl-3.0 |
GdZ/scriptfile | software/googleAppEngine/lib/django_1_2/django/test/client.py | 43 | 18903 | import urllib
from urlparse import urlparse, urlunparse, urlsplit
import sys
import os
import re
import mimetypes
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.conf import settings
from django.contrib.auth import authenticate, login
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest
from django.core.signals import got_request_exception
from django.http import SimpleCookie, HttpRequest, QueryDict
from django.template import TemplateDoesNotExist
from django.test import signals
from django.utils.functional import curry
from django.utils.encoding import smart_str
from django.utils.http import urlencode
from django.utils.importlib import import_module
from django.utils.itercompat import is_iterable
from django.db import transaction, close_connection
from django.test.utils import ContextList
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class FakePayload(object):
"""
A wrapper around StringIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content):
self.__content = StringIO(content)
self.__len = len(content)
def read(self, num_bytes=None):
if num_bytes is None:
num_bytes = self.__len or 1
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes.
Uses the WSGI interface to compose requests, but returns
the raw HttpResponse object
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
from django.conf import settings
from django.core import signals
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
signals.request_started.send(sender=self.__class__)
try:
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
response = self.get_response(request)
# Apply response middleware.
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
response = self.apply_response_fixes(request, response)
finally:
signals.request_finished.disconnect(close_connection)
signals.request_finished.send(sender=self.__class__)
signals.request_finished.connect(close_connection)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
"""
store.setdefault('template', []).append(template)
store.setdefault('context', ContextList()).append(context)
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_str = lambda s: smart_str(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, basestring) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend([
'--' + boundary,
'Content-Disposition: form-data; name="%s"' % to_str(key),
'',
to_str(item)
])
else:
lines.extend([
'--' + boundary,
'Content-Disposition: form-data; name="%s"' % to_str(key),
'',
to_str(value)
])
lines.extend([
'--' + boundary + '--',
'',
])
return '\r\n'.join(lines)
def encode_file(boundary, key, file):
to_str = lambda s: smart_str(s, settings.DEFAULT_CHARSET)
content_type = mimetypes.guess_type(file.name)[0]
if content_type is None:
content_type = 'application/octet-stream'
return [
'--' + boundary,
'Content-Disposition: form-data; name="%s"; filename="%s"' \
% (to_str(key), to_str(os.path.basename(file.name))),
'Content-Type: %s' % content_type,
'',
file.read()
]
class Client(object):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
self.handler = ClientHandler(enforce_csrf_checks)
self.defaults = defaults
self.cookies = SimpleCookie()
self.exc_info = None
self.errors = StringIO()
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if 'django.contrib.sessions' in settings.INSTALLED_APPS:
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME, None)
if cookie:
return engine.SessionStore(cookie.value)
return {}
session = property(_session)
def _get_path(self, parsed):
# If there are parameters, add them
if parsed[3]:
return urllib.unquote(parsed[2] + ";" + parsed[3])
else:
return urllib.unquote(parsed[2])
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': '/',
'QUERY_STRING': '',
'REMOTE_ADDR': '127.0.0.1',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1,0),
'wsgi.url_scheme': 'http',
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signals.template_rendered.connect(on_template_render, dispatch_uid="template-render")
# Capture exceptions created by the handler.
got_request_exception.connect(self.store_exc_info, dispatch_uid="request-exception")
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist, e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
raise exc_info[1], None, exc_info[2]
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
# If there was only one template rendered (the most likely case),
# flatten the list to a single element.
for detail in ('template', 'context'):
if data.get(detail):
if len(data[detail]) == 1:
setattr(response, detail, data[detail][0]);
else:
setattr(response, detail, data[detail])
else:
setattr(response, detail, None)
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid="template-render")
got_request_exception.disconnect(dispatch_uid="request-exception")
def get(self, path, data={}, follow=False, **extra):
"""
Requests a response from the server using GET.
"""
parsed = urlparse(path)
r = {
'CONTENT_TYPE': 'text/html; charset=utf-8',
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'GET',
'wsgi.input': FakePayload('')
}
r.update(extra)
response = self.request(**r)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data={}, content_type=MULTIPART_CONTENT,
follow=False, **extra):
"""
Requests a response from the server using POST.
"""
if content_type is MULTIPART_CONTENT:
post_data = encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
post_data = smart_str(data, encoding=charset)
parsed = urlparse(path)
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': parsed[4],
'REQUEST_METHOD': 'POST',
'wsgi.input': FakePayload(post_data),
}
r.update(extra)
response = self.request(**r)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data={}, follow=False, **extra):
"""
Request a response from the server using HEAD.
"""
parsed = urlparse(path)
r = {
'CONTENT_TYPE': 'text/html; charset=utf-8',
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'HEAD',
'wsgi.input': FakePayload('')
}
r.update(extra)
response = self.request(**r)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data={}, follow=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
parsed = urlparse(path)
r = {
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'OPTIONS',
'wsgi.input': FakePayload('')
}
r.update(extra)
response = self.request(**r)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data={}, content_type=MULTIPART_CONTENT,
follow=False, **extra):
"""
Send a resource to the server using PUT.
"""
if content_type is MULTIPART_CONTENT:
post_data = encode_multipart(BOUNDARY, data)
else:
post_data = data
# Make `data` into a querystring only if it's not already a string. If
# it is a string, we'll assume that the caller has already encoded it.
query_string = None
if not isinstance(data, basestring):
query_string = urlencode(data, doseq=True)
parsed = urlparse(path)
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': query_string or parsed[4],
'REQUEST_METHOD': 'PUT',
'wsgi.input': FakePayload(post_data),
}
r.update(extra)
response = self.request(**r)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data={}, follow=False, **extra):
"""
Send a DELETE request to the server.
"""
parsed = urlparse(path)
r = {
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'DELETE',
'wsgi.input': FakePayload('')
}
r.update(extra)
response = self.request(**r)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Client to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
user = authenticate(**credentials)
if user and user.is_active \
and 'django.contrib.sessions' in settings.INSTALLED_APPS:
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
return True
else:
return False
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
session = import_module(settings.SESSION_ENGINE).SessionStore()
session_cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if session_cookie:
session.delete(session_key=session_cookie.value)
self.cookies = SimpleCookie()
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
url = response['Location']
scheme, netloc, path, query, fragment = urlsplit(url)
redirect_chain = response.redirect_chain
redirect_chain.append((url, response.status_code))
if scheme:
extra['wsgi.url_scheme'] = scheme
# The test client doesn't handle external links,
# but since the situation is simulated in test_client,
# we fake things here by ignoring the netloc portion of the
# redirected URL.
response = self.get(path, QueryDict(query), follow=False, **extra)
response.redirect_chain = redirect_chain
# Prevent loops
if response.redirect_chain[-1] in response.redirect_chain[0:-1]:
break
return response
| mit |
darkshark007/PoGoCollection | Data/species.py | 1 | 86630 | import moves
class Species:
def __init__(self, name):
species_data = get_species_data_from_species(name)
if species_data == None:
self.Id = -1
return
self.Name = species_data[SPECIES_KEYS.Name]
self.Family = species_data[SPECIES_KEYS.Family]
self.Id = species_data[SPECIES_KEYS.Id]
self.HP = species_data[SPECIES_KEYS.HP]
self.Attack = species_data[SPECIES_KEYS.Attack]
self.Defense = species_data[SPECIES_KEYS.Defense]
self.Min_CP = species_data[SPECIES_KEYS.Min_CP]
self.Max_CP = species_data[SPECIES_KEYS.Max_CP]
self.Type1 = species_data[SPECIES_KEYS.Type1]
self.Type2 = species_data[SPECIES_KEYS.Type2]
self.Evolves_Into = species_data[SPECIES_KEYS.Evolves_Into]
self.Evolves_From = species_data[SPECIES_KEYS.Evolves_From]
self.Quick_Moves = species_data[SPECIES_KEYS.Quick_Moves]
self.Charge_Moves = species_data[SPECIES_KEYS.Charge_Moves]
@staticmethod
def _get_species_from_species_name(name):
spc = Species()
class SPECIES_KEYS:
Name = 0
Family = 1
Id = 2
HP = 3
Attack = 4
Defense = 5
Min_CP = 6
Max_CP = 7
Type1 = 8
Type2 = 9
Evolves_Into = 10
Evolves_From = 11
Quick_Moves = 12
Charge_Moves = 13
# Adapted from the GAME_MASTER_FILE Json Output at:
# https://github.com/BrunnerLivio/pokemongo-game-master
# https://raw.githubusercontent.com/pokemongo-dev-contrib/pokemongo-game-master/master/versions/latest/GAME_MASTER.json
RAW_SPECIES_DATA = [
["Bulbasaur","1",1,90,118,118,764,988,"Grass","Poison",["Ivysaur"],"",["Vine Whip", "Tackle"],["Sludge Bomb", "Seed Bomb", "Power Whip"]],
["Ivysaur","1",2,120,151,151,1278,1563,"Grass","Poison",["Venusaur"],"Bulbasaur",["Razor Leaf", "Vine Whip"],["Sludge Bomb", "Solar Beam", "Power Whip"]],
["Venusaur","1",3,160,198,198,2216,2586,"Grass","Poison",[""],"Ivysaur",["Razor Leaf", "Vine Whip"],["Sludge Bomb", "Petal Blizzard", "Solar Beam"]],
["Charmander","4",4,78,116,96,631,837,"Fire","",["Charmeleon"],"",["Ember", "Scratch"],["Flame Charge", "Flame Burst", "Flamethrower"]],
["Charmeleon","4",5,116,158,129,1215,1494,"Fire","",["Charizard"],"Charmander",["Ember", "Fire Fang"],["Fire Punch", "Flame Burst", "Flamethrower"]],
["Charizard","4",6,156,223,176,2324,2705,"Fire","Flying",[""],"Charmeleon",["Fire Spin", "Air Slash"],["Fire Blast", "Dragon Claw", "Overheat"]],
["Squirtle","7",7,88,94,122,612,814,"Water","",["Wartortle"],"",["Bubble", "Tackle"],["Aqua Jet", "Aqua Tail", "Water Pulse"]],
["Wartortle","7",8,118,126,155,1071,1333,"Water","",["Blastoise"],"Squirtle",["Water Gun", "Bite"],["Aqua Jet", "Ice Beam", "Hydro Pump"]],
["Blastoise","7",9,158,171,210,1959,2308,"Water","",[""],"Wartortle",["Water Gun", "Bite"],["Flash Cannon", "Ice Beam", "Hydro Pump"]],
["Caterpie","10",10,90,55,62,258,395,"Bug","",["Metapod"],"",["Bug Bite", "Tackle"],["Struggle"]],
["Metapod","10",11,100,45,94,274,422,"Bug","",["Butterfree"],"Caterpie",["Bug Bite", "Tackle"],["Struggle"]],
["Butterfree","10",12,120,167,151,1414,1713,"Bug","Flying",[""],"Metapod",["Struggle Bug", "Confusion"],["Bug Buzz", "Psychic", "Signal Beam"]],
["Weedle","13",13,80,63,55,262,400,"Bug","Poison",["Kakuna"],"",["Bug Bite", "Poison Sting"],["Struggle"]],
["Kakuna","13",14,90,46,86,254,395,"Bug","Poison",["Beedrill"],"Weedle",["Bug Bite", "Poison Sting"],["Struggle"]],
["Beedrill","13",15,130,169,150,1484,1790,"Bug","Poison",[""],"Kakuna",["Infestation", "Poison Jab"],["Sludge Bomb", "Aerial Ace", "X-Scissor"]],
["Pidgey","16",16,80,85,76,416,584,"Normal","Flying",["Pidgeotto"],"",["Quick Attack", "Tackle"],["Twister", "Aerial Ace", "Air Cutter"]],
["Pidgeotto","16",17,126,117,108,858,1093,"Normal","Flying",["Pidgeot"],"Pidgey",["Wing Attack", "Steel Wing"],["Twister", "Aerial Ace", "Air Cutter"]],
["Pidgeot","16",18,166,166,157,1685,2008,"Normal","Flying",[""],"Pidgeotto",["Air Slash", "Steel Wing"],["Hurricane", "Aerial Ace", "Brave Bird"]],
["Rattata","19",19,60,103,70,419,592,"Normal","",["Raticate"],"",["Tackle", "Quick Attack"],["Dig", "Hyper Fang", "Body Slam"]],
["Rattata_alola","19_alola",19,60,103,70,419,592,"Dark","Normal",["Raticate"],"",["Tackle", "Quick Attack"],["Crunch", "Hyper Fang", "Shadow Ball"]],
["Rattata_normal","19_normal",19,60,103,70,419,592,"Normal","",["Raticate"],"",["Tackle", "Quick Attack"],["Dig", "Hyper Fang", "Body Slam"]],
["Raticate","19",20,110,161,144,1274,1560,"Normal","",[""],"Rattata",["Bite", "Quick Attack"],["Dig", "Hyper Fang", "Hyper Beam"]],
["Raticate_alola","19_alola",20,150,135,159,1311,1598,"Dark","Normal",[""],"Rattata",["Bite", "Quick Attack"],["Crunch", "Hyper Fang", "Hyper Beam"]],
["Raticate_normal","19_normal",20,110,161,144,1274,1560,"Normal","",[""],"Rattata",["Bite", "Quick Attack"],["Dig", "Hyper Fang", "Hyper Beam"]],
["Spearow","21",21,80,112,61,492,678,"Normal","Flying",["Fearow"],"",["Peck", "Quick Attack"],["Aerial Ace", "Drill Peck", "Sky Attack"]],
["Fearow","21",22,130,182,135,1516,1827,"Normal","Flying",[""],"Spearow",["Peck", "Steel Wing"],["Aerial Ace", "Drill Run", "Sky Attack"]],
["Ekans","23",23,70,110,102,584,784,"Poison","",["Arbok"],"",["Poison Sting", "Acid"],["Wrap", "Poison Fang", "Sludge Bomb"]],
["Arbok","23",24,120,167,158,1446,1749,"Poison","",[""],"Ekans",["Bite", "Acid"],["Dark Pulse", "Sludge Wave", "Gunk Shot"]],
["Pikachu","25",25,70,112,101,592,793,"Electric","",["Raichu"],"Pichu",["Thunder Shock", "Quick Attack"],["Discharge", "Thunderbolt", "Wild Charge"]],
["Raichu","25",26,120,193,165,1708,2039,"Electric","",[""],"Pikachu",["Volt Switch", "Spark"],["Brick Break", "Thunder Punch", "Wild Charge"]],
["Raichu_alola","25_alola",26,120,201,172,1816,2158,"Electric","Psychic",[""],"Pikachu",["Volt Switch", "Spark"],["Psychic", "Thunder Punch", "Wild Charge"]],
["Raichu_normal","25_normal",26,120,193,165,1708,2039,"Electric","",[""],"Pikachu",["Volt Switch", "Spark"],["Brick Break", "Thunder Punch", "Wild Charge"]],
["Sandshrew","27",27,100,126,145,954,1203,"Ground","",["Sandslash"],"",["Scratch", "Mud Shot"],["Dig", "Rock Slide", "Sand Tomb"]],
["Sandshrew_alola","27_alola",27,100,125,154,975,1227,"Ice","Steel",["Sandslash"],"",["Metal Claw", "Powder Snow"],["Blizzard", "Gyro Ball", "Night Slash"]],
["Sandshrew_normal","27_normal",27,100,126,145,954,1203,"Ground","",["Sandslash"],"",["Scratch", "Mud Shot"],["Dig", "Rock Slide", "Sand Tomb"]],
["Sandslash","27",28,150,182,202,1992,2344,"Ground","",[""],"Sandshrew",["Metal Claw", "Mud Shot"],["Earthquake", "Rock Tomb", "Bulldoze"]],
["Sandslash_alola","27_alola",28,150,177,221,2027,2383,"Ice","Steel",[""],"Sandshrew",["Metal Claw", "Powder Snow"],["Blizzard", "Gyro Ball", "Bulldoze"]],
["Sandslash_normal","27_normal",28,150,182,202,1992,2344,"Ground","",[""],"Sandshrew",["Metal Claw", "Mud Shot"],["Earthquake", "Rock Tomb", "Bulldoze"]],
["Nidoran F","29",29,110,86,94,550,741,"Poison","",["Nidorina"],"",["Bite", "Poison Sting"],["Poison Fang", "Body Slam", "Sludge Bomb"]],
["Nidorina","29",30,140,117,126,977,1227,"Poison","",["Nidoqueen"],"Nidoran F",["Bite", "Poison Sting"],["Poison Fang", "Dig", "Sludge Bomb"]],
["Nidoqueen","29",31,180,180,174,2003,2354,"Poison","Ground",[""],"Nidorina",["Poison Jab", "Bite"],["Earthquake", "Sludge Wave", "Stone Edge"]],
["Nidoran M","32",32,92,105,76,552,744,"Poison","",["Nidorino"],"",["Peck", "Poison Sting"],["Horn Attack", "Body Slam", "Sludge Bomb"]],
["Nidorino","32",33,122,137,112,1007,1261,"Poison","",["Nidoking"],"Nidoran M",["Poison Jab", "Poison Sting"],["Horn Attack", "Dig", "Sludge Bomb"]],
["Nidoking","32",34,162,204,157,2046,2403,"Poison","Ground",[""],"Nidorino",["Poison Jab", "Iron Tail"],["Earthquake", "Sludge Wave", "Megahorn"]],
["Clefairy","35",35,140,107,116,857,1093,"Fairy","",["Clefable"],"Cleffa",["Pound", "Zen Headbutt"],["Disarming Voice", "Body Slam", "Moonblast"]],
["Clefable","35",36,190,178,171,2018,2370,"Fairy","",[""],"Clefairy",["Charge Beam", "Zen Headbutt"],["Dazzling Gleam", "Psychic", "Moonblast"]],
["Vulpix","37",37,76,96,122,581,779,"Fire","",["Ninetales"],"",["Quick Attack", "Ember"],["Body Slam", "Flamethrower", "Flame Charge"]],
["Vulpix_alola","37_alola",37,76,96,122,581,779,"Ice","",["Ninetales"],"",["Zen Headbutt", "Powder Snow"],["Dark Pulse", "Ice Beam", "Blizzard"]],
["Vulpix_normal","37_normal",37,76,96,122,581,779,"Fire","",["Ninetales"],"",["Quick Attack", "Ember"],["Body Slam", "Flamethrower", "Flame Charge"]],
["Ninetales","37",38,146,169,204,1834,2173,"Fire","",[""],"Vulpix",["Feint Attack", "Fire Spin"],["Heat Wave", "Overheat", "Solar Beam"]],
["Ninetales_alola","37_alola",38,146,170,207,1859,2200,"Ice","Fairy",[""],"Vulpix",["Feint Attack", "Powder Snow"],["Dazzling Gleam", "Ice Beam", "Blizzard"]],
["Ninetales_normal","37_normal",38,146,169,204,1834,2173,"Fire","",[""],"Vulpix",["Feint Attack", "Fire Spin"],["Heat Wave", "Overheat", "Solar Beam"]],
["Jigglypuff","39",39,230,80,44,506,718,"Normal","Fairy",["Wigglytuff"],"Igglybuff",["Pound", "Feint Attack"],["Disarming Voice", "Gyro Ball", "Dazzling Gleam"]],
["Wigglytuff","39",40,280,156,93,1583,1919,"Normal","Fairy",[""],"Jigglypuff",["Pound", "Feint Attack"],["Dazzling Gleam", "Hyper Beam", "Play Rough"]],
["Zubat","41",41,80,83,76,407,573,"Poison","Flying",["Golbat"],"",["Quick Attack", "Bite"],["Poison Fang", "Air Cutter", "Swift"]],
["Golbat","41",42,150,161,153,1534,1843,"Poison","Flying",["Crobat"],"Zubat",["Wing Attack", "Bite"],["Shadow Ball", "Air Cutter", "Poison Fang"]],
["Oddish","43",43,90,131,116,841,1077,"Grass","Poison",["Gloom"],"",["Razor Leaf", "Acid"],["Seed Bomb", "Sludge Bomb", "Moonblast"]],
["Gloom","43",44,120,153,139,1242,1523,"Grass","Poison",["Vileplume","Bellossom"],"Oddish",["Razor Leaf", "Acid"],["Petal Blizzard", "Sludge Bomb", "Moonblast"]],
["Vileplume","43",45,150,202,170,2029,2384,"Grass","Poison",[""],"Gloom",["Razor Leaf", "Acid"],["Petal Blizzard", "Solar Beam", "Moonblast"]],
["Paras","46",46,70,121,99,633,842,"Bug","Grass",["Parasect"],"",["Scratch", "Bug Bite"],["Cross Poison", "X-Scissor", "Seed Bomb"]],
["Parasect","46",47,120,165,146,1373,1669,"Bug","Grass",[""],"Paras",["Struggle Bug", "Fury Cutter"],["Cross Poison", "X-Scissor", "Solar Beam"]],
["Venonat","48",48,120,100,102,695,909,"Bug","Poison",["Venomoth"],"",["Bug Bite", "Confusion"],["Poison Fang", "Psybeam", "Signal Beam"]],
["Venomoth","48",49,140,179,150,1631,1951,"Bug","Poison",[""],"Venonat",["Infestation", "Confusion"],["Silver Wind", "Psychic", "Bug Buzz"]],
["Diglett","50",50,20,109,88,287,468,"Ground","",["Dugtrio"],"",["Mud Slap", "Scratch"],["Dig", "Mud Bomb", "Rock Tomb"]],
["Diglett_alola","50_alola",50,20,109,89,289,470,"Ground","Steel",["Dugtrio"],"",["Mud Slap", "Metal Claw"],["Dig", "Mud Bomb", "Rock Tomb"]],
["Diglett_normal","50_normal",50,20,109,88,287,468,"Ground","",["Dugtrio"],"",["Mud Slap", "Scratch"],["Dig", "Mud Bomb", "Rock Tomb"]],
["Dugtrio","50",51,70,167,147,1065,1343,"Ground","",[""],"Diglett",["Sucker Punch", "Mud Slap"],["Earthquake", "Mud Bomb", "Stone Edge"]],
["Dugtrio_alola","50_alola",51,70,201,148,1286,1599,"Ground","Steel",[""],"Diglett",["Metal Claw", "Mud Slap"],["Earthquake", "Mud Bomb", "Iron Head"]],
["Dugtrio_normal","50_normal",51,70,167,147,1065,1343,"Ground","",[""],"Diglett",["Sucker Punch", "Mud Slap"],["Earthquake", "Mud Bomb", "Stone Edge"]],
["Meowth","52",52,80,92,81,465,642,"Normal","",["Persian"],"",["Scratch", "Bite"],["Night Slash", "Dark Pulse", "Foul Play"]],
["Meowth_alola","52_alola",52,80,99,81,501,684,"Dark","",["Persian"],"",["Scratch", "Bite"],["Night Slash", "Dark Pulse", "Foul Play"]],
["Meowth_normal","52_normal",52,80,92,81,465,642,"Normal","",["Persian"],"",["Scratch", "Bite"],["Night Slash", "Dark Pulse", "Foul Play"]],
["Persian","52",53,130,150,139,1268,1550,"Normal","",[""],"Meowth",["Scratch", "Feint Attack"],["Foul Play", "Power Gem", "Play Rough"]],
["Persian_alola","52_alola",53,130,158,139,1336,1626,"Dark","",[""],"Meowth",["Scratch", "Feint Attack"],["Foul Play", "Dark Pulse", "Play Rough"]],
["Persian_normal","52_normal",53,130,150,139,1268,1550,"Normal","",[""],"Meowth",["Scratch", "Feint Attack"],["Foul Play", "Power Gem", "Play Rough"]],
["Psyduck","54",54,100,122,96,751,973,"Water","",["Golduck"],"",["Water Gun", "Zen Headbutt"],["Psybeam", "Aqua Tail", "Cross Chop"]],
["Golduck","54",55,160,191,163,1940,2287,"Water","",[""],"Psyduck",["Water Gun", "Confusion"],["Psychic", "Hydro Pump", "Ice Beam"]],
["Mankey","56",56,80,148,87,776,1009,"Fighting","",["Primeape"],"",["Karate Chop", "Scratch"],["Cross Chop", "Low Sweep", "Brick Break"]],
["Primeape","56",57,130,207,144,1781,2120,"Fighting","",[""],"Mankey",["Low Kick", "Counter"],["Close Combat", "Low Sweep", "Night Slash"]],
["Growlithe","58",58,110,136,96,879,1118,"Fire","",["Arcanine"],"",["Ember", "Bite"],["Flame Wheel", "Body Slam", "Flamethrower"]],
["Arcanine","58",59,180,227,166,2468,2859,"Fire","",[""],"Growlithe",["Fire Fang", "Snarl"],["Fire Blast", "Wild Charge", "Crunch"]],
["Poliwag","60",60,80,101,82,514,700,"Water","",["Poliwhirl"],"",["Bubble", "Mud Shot"],["Bubble Beam", "Mud Bomb", "Body Slam"]],
["Poliwhirl","60",61,130,130,130,1063,1322,"Water","",["Poliwrath","Politoed"],"Poliwag",["Bubble", "Mud Shot"],["Water Pulse", "Mud Bomb", "Bubble Beam"]],
["Poliwrath","60",62,180,182,187,2100,2459,"Water","Fighting",[""],"Poliwhirl",["Bubble", "Rock Smash"],["Hydro Pump", "Dynamic Punch", "Ice Punch"]],
["Abra","63",63,50,195,103,880,1156,"Psychic","",["Kadabra"],"",["Zen Headbutt", "Charge Beam"],["Psyshock", "Signal Beam", "Shadow Ball"]],
["Kadabra","63",64,80,232,138,1533,1873,"Psychic","",["Alakazam"],"Abra",["Psycho Cut", "Confusion"],["Psybeam", "Dazzling Gleam", "Shadow Ball"]],
["Alakazam","63",65,110,271,194,2490,2907,"Psychic","",[""],"Kadabra",["Psycho Cut", "Confusion"],["Futuresight", "Focus Blast", "Shadow Ball"]],
["Machop","66",66,140,137,88,956,1208,"Fighting","",["Machoke"],"",["Rock Smash", "Karate Chop"],["Low Sweep", "Brick Break", "Cross Chop"]],
["Machoke","66",67,160,177,130,1605,1923,"Fighting","",["Machamp"],"Machop",["Low Kick", "Karate Chop"],["Submission", "Brick Break", "Dynamic Punch"]],
["Machamp","66",68,180,234,162,2513,2909,"Fighting","",[""],"Machoke",["Bullet Punch", "Counter"],["Heavy Slam", "Dynamic Punch", "Close Combat"]],
["Bellsprout","69",69,100,139,64,699,923,"Grass","Poison",["Weepinbell"],"",["Vine Whip", "Acid"],["Power Whip", "Sludge Bomb", "Wrap"]],
["Weepinbell","69",70,130,172,95,1202,1485,"Grass","Poison",["Victreebel"],"Bellsprout",["Bullet Seed", "Acid"],["Power Whip", "Sludge Bomb", "Seed Bomb"]],
["Victreebel","69",71,160,207,138,1934,2285,"Grass","Poison",[""],"Weepinbell",["Razor Leaf", "Acid"],["Leaf Blade", "Sludge Bomb", "Solar Beam"]],
["Tentacool","72",72,80,97,182,736,963,"Water","Poison",["Tentacruel"],"",["Bubble", "Poison Sting"],["Bubble Beam", "Water Pulse", "Wrap"]],
["Tentacruel","72",73,160,166,237,2033,2390,"Water","Poison",[""],"Tentacool",["Acid", "Poison Jab"],["Hydro Pump", "Sludge Wave", "Blizzard"]],
["Geodude","74",74,80,132,163,948,1202,"Rock","Ground",["Graveler"],"",["Rock Throw", "Tackle"],["Rock Slide", "Rock Tomb", "Dig"]],
["Geodude_alola","74_alola",74,80,132,163,948,1202,"Rock","Electric",["Graveler"],"",["Rock Throw", "Volt Switch"],["Rock Slide", "Rock Tomb", "Thunderbolt"]],
["Geodude_normal","74_normal",74,80,132,163,948,1202,"Rock","Ground",["Graveler"],"",["Rock Throw", "Tackle"],["Rock Slide", "Rock Tomb", "Dig"]],
["Graveler","74",75,110,164,196,1514,1828,"Rock","Ground",["Golem"],"Geodude",["Rock Throw", "Mud Slap"],["Dig", "Stone Edge", "Rock Blast"]],
["Graveler_alola","74_alola",75,110,164,196,1514,1828,"Rock","Electric",["Golem"],"Geodude",["Rock Throw", "Volt Switch"],["Thunderbolt", "Stone Edge", "Rock Blast"]],
["Graveler_normal","74_normal",75,110,164,196,1514,1828,"Rock","Ground",["Golem"],"Geodude",["Rock Throw", "Mud Slap"],["Dig", "Stone Edge", "Rock Blast"]],
["Golem","74",76,160,211,229,2540,2937,"Rock","Ground",[""],"Graveler",["Rock Throw", "Mud Slap"],["Stone Edge", "Rock Blast", "Earthquake"]],
["Golem_alola","74_alola",76,160,211,229,2540,2937,"Rock","Electric",[""],"Graveler",["Rock Throw", "Volt Switch"],["Stone Edge", "Rock Blast", "Wild Charge"]],
["Golem_normal","74_normal",76,160,211,229,2540,2937,"Rock","Ground",[""],"Graveler",["Rock Throw", "Mud Slap"],["Stone Edge", "Rock Blast", "Earthquake"]],
["Ponyta","77",77,100,170,132,1228,1513,"Fire","",["Rapidash"],"",["Tackle", "Ember"],["Flame Charge", "Flame Wheel", "Stomp"]],
["Rapidash","77",78,130,207,167,1918,2268,"Fire","",[""],"Ponyta",["Low Kick", "Fire Spin"],["Fire Blast", "Drill Run", "Heat Wave"]],
["Slowpoke","79",79,180,109,109,960,1212,"Water","Psychic",["Slowbro","Slowking"],"",["Water Gun", "Confusion"],["Water Pulse", "Psyshock", "Psychic"]],
["Slowbro","79",80,190,177,194,2137,2499,"Water","Psychic",[""],"Slowpoke",["Water Gun", "Confusion"],["Water Pulse", "Psychic", "Ice Beam"]],
["Magnemite","81",81,50,165,128,830,1091,"Electric","Steel",["Magneton"],"",["Spark", "Thunder Shock"],["Discharge", "Magnet Bomb", "Thunderbolt"]],
["Magneton","81",82,100,223,182,1892,2253,"Electric","Steel",[""],"Magnemite",["Spark", "Charge Beam"],["Zap Cannon", "Magnet Bomb", "Flash Cannon"]],
["Farfetchd","83",83,104,124,118,864,1099,"Normal","Flying",[""],"",["Air Slash", "Fury Cutter"],["Aerial Ace", "Air Cutter", "Leaf Blade"]],
["Doduo","84",84,70,158,88,780,1018,"Normal","Flying",["Dodrio"],"",["Peck", "Quick Attack"],["Drill Peck", "Aerial Ace", "Brave Bird"]],
["Dodrio","84",85,120,218,145,1808,2154,"Normal","Flying",[""],"Doduo",["Feint Attack", "Steel Wing"],["Drill Peck", "Aerial Ace", "Brave Bird"]],
["Seel","86",86,130,85,128,689,905,"Water","",["Dewgong"],"",["Ice Shard", "Lick"],["Aurora Beam", "Icy Wind", "Aqua Tail"]],
["Dewgong","86",87,180,139,184,1591,1908,"Water","Ice",[""],"Seel",["Frost Breath", "Iron Tail"],["Aurora Beam", "Water Pulse", "Blizzard"]],
["Grimer","88",88,160,135,90,1019,1279,"Poison","",["Muk"],"",["Poison Jab", "Mud Slap"],["Sludge", "Mud Bomb", "Sludge Bomb"]],
["Grimer_alola","88_alola",88,160,135,90,1019,1279,"Poison","Dark",["Muk"],"",["Poison Jab", "Bite"],["Crunch", "Gunk Shot", "Sludge Bomb"]],
["Grimer_normal","88_normal",88,160,135,90,1019,1279,"Poison","",["Muk"],"",["Poison Jab", "Mud Slap"],["Sludge", "Mud Bomb", "Sludge Bomb"]],
["Muk","88",89,210,190,184,2349,2728,"Poison","",[""],"Grimer",["Infestation", "Poison Jab"],["Dark Pulse", "Gunk Shot", "Sludge Wave"]],
["Muk_alola","88_alola",89,210,190,184,2349,2728,"Poison","Dark",[""],"Grimer",["Bite", "Poison Jab"],["Dark Pulse", "Gunk Shot", "Sludge Wave"]],
["Muk_normal","88_normal",89,210,190,184,2349,2728,"Poison","",[""],"Grimer",["Infestation", "Poison Jab"],["Dark Pulse", "Gunk Shot", "Sludge Wave"]],
["Shellder","90",90,60,116,168,732,965,"Water","",["Cloyster"],"",["Ice Shard", "Tackle"],["Bubble Beam", "Water Pulse", "Icy Wind"]],
["Cloyster","90",91,100,186,323,2102,2492,"Water","Ice",[""],"Shellder",["Frost Breath", "Ice Shard"],["Aurora Beam", "Hydro Pump", "Avalanche"]],
["Gastly","92",92,60,186,70,758,1009,"Ghost","Poison",["Haunter"],"",["Lick", "Astonish"],["Night Shade", "Dark Pulse", "Sludge Bomb"]],
["Haunter","92",93,90,223,112,1408,1728,"Ghost","Poison",["Gengar"],"Gastly",["Shadow Claw", "Astonish"],["Shadow Punch", "Dark Pulse", "Sludge Bomb"]],
["Gengar","92",94,120,261,156,2246,2637,"Ghost","Poison",[""],"Haunter",["Sucker Punch", "Hex"],["Shadow Ball", "Focus Blast", "Sludge Bomb"]],
["Onix","95",95,70,85,288,759,1009,"Rock","Ground",["Steelix"],"",["Rock Throw", "Tackle"],["Sand Tomb", "Stone Edge", "Heavy Slam"]],
["Drowzee","96",96,120,89,158,770,999,"Psychic","",["Hypno"],"",["Pound", "Confusion"],["Psybeam", "Psyshock", "Psychic"]],
["Hypno","96",97,170,144,215,1731,2063,"Psychic","",[""],"Drowzee",["Zen Headbutt", "Confusion"],["Futuresight", "Psychic", "Focus Blast"]],
["Krabby","98",98,60,181,156,1101,1396,"Water","",["Kingler"],"",["Bubble", "Mud Shot"],["Vice Grip", "Bubble Beam", "Water Pulse"]],
["Kingler","98",99,110,240,214,2316,2713,"Water","",[""],"Krabby",["Bubble", "Metal Claw"],["Vice Grip", "X-Scissor", "Water Pulse"]],
["Voltorb","100",100,80,109,114,654,863,"Electric","",["Electrode"],"",["Spark", "Tackle"],["Discharge", "Thunderbolt", "Gyro Ball"]],
["Electrode","100",101,120,173,179,1594,1913,"Electric","",[""],"Voltorb",["Spark", "Volt Switch"],["Discharge", "Thunderbolt", "Hyper Beam"]],
["Exeggcute","102",102,120,107,140,872,1110,"Grass","Psychic",["Exeggutor"],"",["Confusion", "Bullet Seed"],["Seed Bomb", "Psychic", "Ancient Power"]],
["Exeggutor","102",103,190,233,158,2539,2937,"Grass","Psychic",[""],"Exeggcute",["Bullet Seed", "Extrasensory"],["Seed Bomb", "Psychic", "Solar Beam"]],
["Exeggutor_alola","102_alola",103,190,230,158,2506,2902,"Grass","Dragon",[""],"Exeggcute",["Bullet Seed", "Dragon Tail"],["Seed Bomb", "Dragon Pulse", "Solar Beam"]],
["Exeggutor_normal","102_normal",103,190,233,158,2539,2937,"Grass","Psychic",[""],"Exeggcute",["Bullet Seed", "Extrasensory"],["Seed Bomb", "Psychic", "Solar Beam"]],
["Cubone","104",104,100,90,165,727,950,"Ground","",["Marowak"],"",["Mud Slap", "Rock Smash"],["Bone Club", "Dig", "Bulldoze"]],
["Marowak","104",105,120,144,200,1403,1703,"Ground","",[""],"Cubone",["Mud Slap", "Rock Smash"],["Bone Club", "Dig", "Earthquake"]],
["Marowak_alola","104_alola",105,120,144,200,1403,1703,"Fire","Ghost",[""],"Cubone",["Hex", "Rock Smash"],["Bone Club", "Shadow Ball", "Fire Blast"]],
["Marowak_normal","104_normal",105,120,144,200,1403,1703,"Ground","",[""],"Cubone",["Mud Slap", "Rock Smash"],["Bone Club", "Dig", "Earthquake"]],
["Hitmonlee","236",106,100,224,211,2046,2423,"Fighting","",[""],"Tyrogue",["Low Kick", "Rock Smash"],["Close Combat", "Low Sweep", "Stone Edge"]],
["Hitmonchan","236",107,100,193,212,1767,2113,"Fighting","",[""],"Tyrogue",["Bullet Punch", "Counter"],["Fire Punch", "Ice Punch", "Thunder Punch", "Close Combat"]],
["Lickitung","108",108,180,108,137,1066,1332,"Normal","",[""],"",["Lick", "Zen Headbutt"],["Hyper Beam", "Stomp", "Power Whip"]],
["Koffing","109",109,80,119,164,857,1099,"Poison","",["Weezing"],"",["Tackle", "Infestation"],["Sludge", "Sludge Bomb", "Dark Pulse"]],
["Weezing","109",110,130,174,221,1855,2199,"Poison","",[""],"Koffing",["Tackle", "Infestation"],["Sludge Bomb", "Shadow Ball", "Dark Pulse"]],
["Rhyhorn","111",111,160,140,157,1395,1691,"Ground","Rock",["Rhydon"],"",["Mud Slap", "Rock Smash"],["Bulldoze", "Horn Attack", "Stomp"]],
["Rhydon","111",112,210,222,206,2904,3324,"Ground","Rock",[""],"Rhyhorn",["Mud Slap", "Rock Smash"],["Surf", "Earthquake", "Stone Edge"]],
["Chansey","113",113,500,60,176,1119,1479,"Normal","",["Blissey"],"",["Pound", "Zen Headbutt"],["Psychic", "Hyper Beam", "Dazzling Gleam"]],
["Tangela","114",114,130,183,205,1879,2224,"Grass","",[""],"",["Vine Whip", "Infestation"],["Grass Knot", "Sludge Bomb", "Solar Beam"]],
["Kangaskhan","115",115,210,181,165,2119,2481,"Normal","",[""],"",["Mud Slap", "Low Kick"],["Crunch", "Earthquake", "Outrage"]],
["Horsea","116",116,60,129,125,702,928,"Water","",["Seadra"],"",["Water Gun", "Bubble"],["Bubble Beam", "Dragon Pulse", "Flash Cannon"]],
["Seadra","116",117,110,187,182,1664,1993,"Water","",["Kingdra"],"Horsea",["Water Gun", "Dragon Breath"],["Aurora Beam", "Dragon Pulse", "Hydro Pump"]],
["Goldeen","118",118,90,123,115,787,1014,"Water","",["Seaking"],"",["Peck", "Mud Shot"],["Water Pulse", "Horn Attack", "Aqua Tail"]],
["Seaking","118",119,160,175,154,1727,2055,"Water","",[""],"Goldeen",["Peck", "Waterfall"],["Ice Beam", "Water Pulse", "Megahorn"]],
["Staryu","120",120,60,137,112,706,933,"Water","",["Starmie"],"",["Tackle", "Water Gun"],["Swift", "Bubble Beam", "Power Gem"]],
["Starmie","120",121,120,210,184,1962,2319,"Water","Psychic",[""],"Staryu",["Hidden Power", "Water Gun"],["Hydro Pump", "Power Gem", "Psychic"]],
["Mr Mime","122",122,80,192,233,1648,1998,"Psychic","Fairy",[""],"",["Confusion", "Zen Headbutt"],["Psybeam", "Psychic", "Shadow Ball"]],
["Scyther","123",123,140,218,170,2115,2481,"Bug","Flying",["Scizor"],"",["Fury Cutter", "Air Slash"],["Night Slash", "X-Scissor", "Aerial Ace"]],
["Jynx","124",124,130,223,182,2157,2530,"Ice","Psychic",[""],"Smoochum",["Frost Breath", "Confusion"],["Draining Kiss", "Avalanche", "Psyshock"]],
["Electabuzz","125",125,130,198,173,1867,2212,"Electric","",[""],"Elekid",["Thunder Shock", "Low Kick"],["Thunder Punch", "Thunderbolt", "Thunder"]],
["Magmar","126",126,130,206,169,1920,2270,"Fire","",[""],"Magby",["Ember", "Karate Chop"],["Fire Blast", "Fire Punch", "Flamethrower"]],
["Pinsir","127",127,130,238,197,2395,2790,"Bug","",[""],"",["Rock Smash", "Bug Bite"],["Vice Grip", "X-Scissor", "Close Combat"]],
["Tauros","128",128,150,198,197,2141,2505,"Normal","",[""],"",["Tackle", "Zen Headbutt"],["Horn Attack", "Iron Head", "Earthquake"]],
["Magikarp","129",129,40,29,102,116,222,"Water","",["Gyarados"],"",["Splash"],["Struggle"]],
["Gyarados","129",130,190,237,197,2884,3304,"Water","Flying",[""],"Magikarp",["Bite", "Waterfall"],["Hydro Pump", "Crunch", "Outrage"]],
["Lapras","131",131,260,165,180,2245,2621,"Water","Ice",[""],"",["Frost Breath", "Water Gun"],["Hydro Pump", "Surf", "Blizzard"]],
["Ditto","132",132,96,91,91,535,723,"Normal","",[""],"",["Transform"],["Struggle"]],
["Eevee","133",133,110,104,121,754,975,"Normal","",["Vaporeon","Jolteon","Flareon","Espeon","Umbreon"],"",["Quick Attack", "Tackle"],["Dig", "Swift"]],
["Vaporeon","133",134,260,205,177,2766,3179,"Water","",[""],"Eevee",["Water Gun"],["Water Pulse", "Hydro Pump", "Aqua Tail"]],
["Jolteon","133",135,130,232,201,2359,2749,"Electric","",[""],"Eevee",["Thunder Shock", "Volt Switch"],["Discharge", "Thunderbolt", "Thunder"]],
["Flareon","133",136,130,246,204,2519,2925,"Fire","",[""],"Eevee",["Ember", "Fire Spin"],["Fire Blast", "Flamethrower", "Overheat"]],
["Porygon","137",137,130,153,139,1293,1579,"Normal","",["Porygon2"],"",["Charge Beam", "Hidden Power"],["Solar Beam", "Hyper Beam", "Zap Cannon"]],
["Omanyte","138",138,70,155,174,1076,1355,"Rock","Water",["Omastar"],"",["Water Gun", "Mud Shot"],["Ancient Power", "Bubble Beam", "Rock Blast"]],
["Omastar","138",139,140,207,227,2321,2704,"Rock","Water",[""],"Omanyte",["Mud Shot", "Water Gun"],["Ancient Power", "Hydro Pump", "Rock Blast"]],
["Kabuto","140",140,60,148,162,917,1181,"Rock","Water",["Kabutops"],"",["Scratch", "Mud Shot"],["Ancient Power", "Aqua Jet", "Rock Tomb"]],
["Kabutops","140",141,120,220,203,2159,2535,"Rock","Water",[""],"Kabuto",["Mud Shot", "Rock Smash"],["Ancient Power", "Water Pulse", "Stone Edge"]],
["Aerodactyl","142",142,160,221,164,2251,2627,"Rock","Flying",[""],"",["Steel Wing", "Bite"],["Ancient Power", "Iron Head", "Hyper Beam"]],
["Snorlax","143",143,320,190,190,2946,3379,"Normal","",[""],"",["Zen Headbutt", "Lick"],["Heavy Slam", "Hyper Beam", "Earthquake"]],
["Articuno","144",144,180,192,249,2556,2954,"Ice","Flying",[""],"",["Frost Breath"],["Ice Beam", "Icy Wind", "Blizzard"]],
["Zapdos","145",145,180,253,188,2927,3354,"Electric","Flying",[""],"",["Charge Beam"],["Zap Cannon", "Thunderbolt", "Thunder"]],
["Moltres","146",146,180,251,184,2873,3296,"Fire","Flying",[""],"",["Fire Spin"],["Fire Blast", "Heat Wave", "Overheat"]],
["Dratini","147",147,82,119,94,657,866,"Dragon","",["Dragonair"],"",["Dragon Breath", "Iron Tail"],["Wrap", "Twister", "Aqua Tail"]],
["Dragonair","147",148,122,163,138,1330,1621,"Dragon","",["Dragonite"],"Dratini",["Dragon Breath", "Iron Tail"],["Wrap", "Aqua Tail", "Dragon Pulse"]],
["Dragonite","147",149,182,263,201,3164,3607,"Dragon","Flying",[""],"Dragonair",["Dragon Tail", "Steel Wing"],["Hurricane", "Hyper Beam", "Outrage"]],
["Mewtwo","150",150,193,300,182,3536,4010,"Psychic","",[""],"",["Psycho Cut", "Confusion"],["Psychic", "Thunderbolt", "Ice Beam", "Focus Blast", "Flamethrower"]],
["Mew","151",151,200,210,210,2707,3112,"Psychic","",[""],"",["Pound", "Steel Wing", "Charge Beam", "Shadow Claw", "Volt Switch", "Struggle Bug", "Frost Breath", "Dragon Tail", "Infestation", "Poison Jab", "Rock Smash", "Snarl", "Cut", "Waterfall"],["Psychic", "Ancient Power", "Dragon Claw", "Psyshock", "Ice Beam", "Blizzard", "Hyper Beam", "Solar Beam", "Thunderbolt", "Thunder", "Psychic", "Ancient Power", "Dragon Claw", "Psyshock", "Ice Beam", "Blizzard", "Hyper Beam", "Solar Beam", "Thunderbolt", "Thunder", "Flame Charge", "Low Sweep", "Overheat", "Focus Blast", "Energy Ball", "Stone Edge", "Gyro Ball", "Bulldoze", "Rock Slide", "Grass Knot", "Flash Cannon", "Wild Charge", "Dark Pulse", "Dazzling Gleam", "Surf"]],
["Chikorita","152",152,90,92,122,606,807,"Grass","",["Bayleef"],"",["Vine Whip", "Tackle"],["Energy Ball", "Grass Knot", "Body Slam"]],
["Bayleef","152",153,120,122,155,1046,1305,"Grass","",["Meganium"],"Chikorita",["Razor Leaf", "Tackle"],["Energy Ball", "Grass Knot", "Ancient Power"]],
["Meganium","152",154,160,168,202,1899,2243,"Grass","",[""],"Bayleef",["Razor Leaf", "Vine Whip"],["Petal Blizzard", "Solar Beam", "Earthquake"]],
["Cyndaquil","155",155,78,116,96,631,837,"Fire","",["Quilava"],"",["Ember", "Tackle"],["Flame Charge", "Swift", "Flamethrower"]],
["Quilava","155",156,116,158,129,1215,1494,"Fire","",["Typhlosion"],"Cyndaquil",["Ember", "Tackle"],["Flame Charge", "Dig", "Flamethrower"]],
["Typhlosion","155",157,156,223,176,2324,2705,"Fire","",[""],"Quilava",["Ember", "Shadow Claw"],["Fire Blast", "Overheat", "Solar Beam"]],
["Totodile","158",158,100,117,116,792,1019,"Water","",["Croconaw"],"",["Water Gun", "Scratch"],["Crunch", "Aqua Jet", "Water Pulse"]],
["Croconaw","158",159,130,150,151,1321,1610,"Water","",["Feraligatr"],"Totodile",["Water Gun", "Scratch"],["Crunch", "Ice Punch", "Water Pulse"]],
["Feraligatr","158",160,170,205,197,2359,2740,"Water","",[""],"Croconaw",["Waterfall", "Bite"],["Crunch", "Hydro Pump", "Ice Beam"]],
["Sentret","161",161,70,79,77,364,522,"Normal","",["Furret"],"",["Scratch", "Quick Attack"],["Dig", "Brick Break", "Grass Knot"]],
["Furret","161",162,170,148,130,1383,1679,"Normal","",[""],"Sentret",["Quick Attack", "Sucker Punch"],["Dig", "Brick Break", "Hyper Beam"]],
["Hoothoot","163",163,120,67,101,463,645,"Normal","Flying",["Noctowl"],"",["Feint Attack", "Peck"],["Aerial Ace", "Sky Attack", "Night Shade"]],
["Noctowl","163",164,200,145,179,1725,2055,"Normal","Flying",[""],"Hoothoot",["Wing Attack", "Extrasensory"],["Psychic", "Sky Attack", "Night Shade"]],
["Ledyba","165",165,80,72,142,482,668,"Bug","Flying",["Ledian"],"",["Tackle", "Bug Bite"],["Silver Wind", "Swift", "Aerial Ace"]],
["Ledian","165",166,110,107,209,1020,1284,"Bug","Flying",[""],"Ledyba",["Struggle Bug", "Bug Bite"],["Bug Buzz", "Silver Wind", "Aerial Ace"]],
["Spinarak","167",167,80,105,73,504,690,"Bug","Poison",["Ariados"],"",["Poison Sting", "Bug Bite"],["Night Slash", "Signal Beam", "Cross Poison"]],
["Ariados","167",168,140,161,128,1355,1648,"Bug","Poison",[""],"Spinarak",["Poison Sting", "Infestation"],["Shadow Sneak", "Megahorn", "Cross Poison"]],
["Crobat","41",169,170,194,178,2122,2484,"Poison","Flying",[""],"Golbat",["Air Slash", "Bite"],["Shadow Ball", "Air Cutter", "Sludge Bomb"]],
["Chinchou","170",170,150,106,106,840,1075,"Water","Electric",["Lanturn"],"",["Bubble", "Spark"],["Water Pulse", "Thunderbolt", "Bubble Beam"]],
["Lanturn","170",171,250,146,146,1754,2091,"Water","Electric",[""],"Chinchou",["Water Gun", "Charge Beam"],["Hydro Pump", "Thunderbolt", "Thunder"]],
["Pichu","25",172,40,77,63,243,379,"Electric","",["Pikachu"],"",["Thunder Shock"],["Thunderbolt", "Disarming Voice", "Thunder Punch"]],
["Cleffa","35",173,100,75,91,450,625,"Fairy","",["Clefairy"],"",["Pound", "Zen Headbutt"],["Grass Knot", "Psyshock", "Signal Beam"]],
["Igglybuff","39",174,180,69,34,339,516,"Normal","Fairy",["Jigglypuff"],"",["Pound", "Feint Attack"],["Wild Charge", "Shadow Ball", "Psychic"]],
["Togepi","175",175,70,67,116,379,544,"Fairy","",["Togetic"],"",["Hidden Power", "Peck"],["Ancient Power", "Psyshock", "Dazzling Gleam"]],
["Togetic","175",176,110,139,191,1267,1554,"Fairy","Flying",[""],"Togepi",["Extrasensory", "Hidden Power"],["Ancient Power", "Dazzling Gleam", "Aerial Ace"]],
["Natu","177",177,80,134,89,711,931,"Psychic","Flying",["Xatu"],"",["Peck", "Quick Attack"],["Night Shade", "Psyshock", "Drill Peck"]],
["Xatu","177",178,130,192,146,1663,1989,"Psychic","Flying",[""],"Natu",["Air Slash", "Feint Attack"],["Ominous Wind", "Futuresight", "Aerial Ace"]],
["Mareep","179",179,110,114,82,681,893,"Electric","",["Flaaffy"],"",["Tackle", "Thunder Shock"],["Body Slam", "Thunderbolt", "Discharge"]],
["Flaaffy","179",180,140,145,112,1142,1412,"Electric","",["Ampharos"],"Mareep",["Tackle", "Charge Beam"],["Power Gem", "Thunderbolt", "Discharge"]],
["Ampharos","179",181,180,211,172,2335,2714,"Electric","",[""],"Flaaffy",["Charge Beam", "Volt Switch"],["Zap Cannon", "Focus Blast", "Thunder"]],
["Bellossom","43",182,150,169,189,1789,2123,"Grass","",[""],"Gloom",["Razor Leaf", "Acid"],["Leaf Blade", "Petal Blizzard", "Dazzling Gleam"]],
["Marill","183",183,140,37,93,265,423,"Water","Fairy",["Azumarill"],"Azurill",["Tackle", "Bubble"],["Bubble Beam", "Aqua Tail", "Body Slam"]],
["Azumarill","183",184,200,112,152,1228,1513,"Water","Fairy",[""],"Marill",["Rock Smash", "Bubble"],["Play Rough", "Hydro Pump", "Ice Beam"]],
["Sudowoodo","185",185,140,167,198,1748,2080,"Rock","",[""],"",["Rock Throw", "Counter"],["Stone Edge", "Earthquake", "Rock Slide"]],
["Politoed","60",186,180,174,192,2034,2388,"Water","",[""],"Poliwhirl",["Mud Shot", "Bubble"],["Hydro Pump", "Blizzard", "Surf"]],
["Hoppip","187",187,70,67,101,354,512,"Grass","Flying",["Skiploom"],"",["Tackle", "Bullet Seed"],["Grass Knot", "Dazzling Gleam", "Seed Bomb"]],
["Skiploom","187",188,110,91,127,676,888,"Grass","Flying",["Jumpluff"],"Hoppip",["Tackle", "Bullet Seed"],["Grass Knot", "Dazzling Gleam", "Energy Ball"]],
["Jumpluff","187",189,150,118,197,1275,1564,"Grass","Flying",[""],"Skiploom",["Infestation", "Bullet Seed"],["Energy Ball", "Dazzling Gleam", "Solar Beam"]],
["Aipom","190",190,110,136,112,949,1196,"Normal","",[""],"",["Scratch", "Astonish"],["Low Sweep", "Swift", "Aerial Ace"]],
["Sunkern","191",191,60,55,55,198,319,"Grass","",["Sunflora"],"",["Razor Leaf", "Cut"],["Energy Ball", "Grass Knot", "Seed Bomb"]],
["Sunflora","191",192,150,185,148,1733,2063,"Grass","",[""],"Sunkern",["Razor Leaf", "Bullet Seed"],["Solar Beam", "Petal Blizzard", "Sludge Bomb"]],
["Yanma","193",193,130,154,94,1070,1336,"Bug","Flying",[""],"",["Quick Attack", "Wing Attack"],["Ancient Power", "Aerial Ace", "Silver Wind"]],
["Wooper","194",194,110,75,75,428,600,"Water","Ground",["Quagsire"],"",["Water Gun", "Mud Shot"],["Mud Bomb", "Dig", "Body Slam"]],
["Quagsire","194",195,190,152,152,1624,1943,"Water","Ground",[""],"Wooper",["Water Gun", "Mud Shot"],["Sludge Bomb", "Earthquake", "Stone Edge"]],
["Espeon","133",196,130,261,194,2607,3022,"Psychic","",[""],"Eevee",["Confusion", "Zen Headbutt"],["Psybeam", "Psychic", "Futuresight"]],
["Umbreon","133",197,190,126,250,1727,2067,"Dark","",[""],"Eevee",["Feint Attack", "Snarl"],["Dark Pulse", "Foul Play"]],
["Murkrow","198",198,120,175,87,1124,1402,"Dark","Flying",[""],"",["Peck", "Feint Attack"],["Drill Peck", "Foul Play", "Dark Pulse"]],
["Slowking","79",199,190,177,194,2137,2499,"Water","Psychic",[""],"Slowpoke",["Water Gun", "Confusion"],["Blizzard", "Psychic", "Fire Blast"]],
["Misdreavus","200",200,120,167,167,1487,1794,"Ghost","",[""],"",["Astonish", "Hex"],["Shadow Sneak", "Dark Pulse", "Ominous Wind"]],
["Unown","201",201,96,136,91,799,1030,"Psychic","",[""],"",["Hidden Power"],["Struggle"]],
["Wobbuffet","202",202,380,60,106,757,1031,"Psychic","",[""],"Wynaut",["Counter", "Splash"],["Mirror Coat"]],
["Girafarig","203",203,140,182,133,1562,1876,"Normal","Psychic",[""],"",["Tackle", "Confusion"],["Psychic", "Thunderbolt", "Mirror Coat"]],
["Pineco","204",204,100,108,146,820,1052,"Bug","",["Forretress"],"",["Tackle", "Bug Bite"],["Gyro Ball", "Rock Tomb", "Sand Tomb"]],
["Forretress","204",205,150,161,242,1929,2279,"Bug","Steel",[""],"Pineco",["Bug Bite", "Struggle Bug"],["Heavy Slam", "Earthquake", "Rock Tomb"]],
["Dunsparce","206",206,200,131,131,1333,1627,"Normal","",[""],"",["Bite", "Astonish"],["Dig", "Rock Slide", "Drill Run"]],
["Gligar","207",207,130,143,204,1464,1771,"Ground","Flying",[""],"",["Fury Cutter", "Wing Attack"],["Dig", "Aerial Ace", "Night Slash"]],
["Steelix","95",208,150,148,333,2080,2456,"Steel","Ground",[""],"Onix",["Iron Tail", "Dragon Tail"],["Earthquake", "Heavy Slam", "Crunch"]],
["Snubbull","209",209,120,137,89,890,1132,"Fairy","",["Granbull"],"",["Tackle", "Bite"],["Crunch", "Dazzling Gleam", "Brick Break"]],
["Granbull","209",210,180,212,137,2094,2458,"Fairy","",[""],"Snubbull",["Bite", "Snarl"],["Crunch", "Play Rough", "Close Combat"]],
["Qwilfish","211",211,130,184,148,1605,1924,"Water","Poison",[""],"",["Poison Sting", "Water Gun"],["Aqua Tail", "Ice Beam", "Sludge Wave"]],
["Scizor","123",212,140,236,191,2427,2821,"Bug","Steel",[""],"Scyther",["Bullet Punch", "Fury Cutter"],["X-Scissor", "Iron Head", "Night Slash"]],
["Shuckle","213",213,40,17,396,134,302,"Bug","Rock",[""],"",["Struggle Bug", "Rock Throw"],["Rock Blast", "Stone Edge", "Gyro Ball"]],
["Heracross","214",214,160,234,189,2559,2959,"Bug","Fighting",[""],"",["Counter", "Struggle Bug"],["Megahorn", "Close Combat", "Earthquake"]],
["Sneasel","215",215,110,189,157,1562,1881,"Dark","Ice",[""],"",["Ice Shard", "Feint Attack"],["Avalanche", "Ice Punch", "Foul Play"]],
["Teddiursa","216",216,120,142,93,943,1192,"Normal","",["Ursaring"],"",["Scratch", "Lick"],["Cross Chop", "Crunch", "Play Rough"]],
["Ursaring","216",217,180,236,144,2390,2780,"Normal","",[""],"Teddiursa",["Metal Claw", "Counter"],["Close Combat", "Hyper Beam", "Play Rough"]],
["Slugma","218",218,80,118,71,559,756,"Fire","",["Magcargo"],"",["Ember", "Rock Throw"],["Flame Burst", "Flame Charge", "Rock Slide"]],
["Magcargo","218",219,100,139,209,1264,1554,"Fire","Rock",[""],"Slugma",["Ember", "Rock Throw"],["Heat Wave", "Overheat", "Stone Edge"]],
["Swinub","220",220,100,90,74,487,668,"Ice","Ground",["Piloswine"],"",["Tackle", "Powder Snow"],["Icy Wind", "Body Slam", "Rock Slide"]],
["Piloswine","220",221,200,181,147,1952,2300,"Ice","Ground",[""],"Swinub",["Ice Shard", "Powder Snow"],["Avalanche", "Bulldoze", "Stone Edge"]],
["Corsola","222",222,110,118,156,972,1223,"Water","Rock",[""],"",["Tackle", "Bubble"],["Rock Blast", "Power Gem", "Bubble Beam"]],
["Remoraid","223",223,70,127,69,555,754,"Water","",["Octillery"],"",["Water Gun", "Mud Shot"],["Aurora Beam", "Water Pulse", "Rock Blast"]],
["Octillery","223",224,150,197,141,1802,2139,"Water","",[""],"Remoraid",["Water Gun", "Mud Shot"],["Gunk Shot", "Water Pulse", "Aurora Beam"]],
["Delibird","225",225,90,128,90,724,944,"Ice","Flying",[""],"",["Present"],["Ice Punch", "Icy Wind", "Aerial Ace"]],
["Mantine","226",226,130,148,260,1711,2047,"Water","Flying",[""],"",["Bubble", "Wing Attack"],["Water Pulse", "Ice Beam", "Aerial Ace"]],
["Skarmory","227",227,130,148,260,1711,2047,"Steel","Flying",[""],"",["Steel Wing", "Air Slash"],["Brave Bird", "Sky Attack", "Flash Cannon"]],
["Houndour","228",228,90,152,93,874,1118,"Dark","Fire",["Houndoom"],"",["Feint Attack", "Ember"],["Crunch", "Flamethrower", "Dark Pulse"]],
["Houndoom","228",229,150,224,159,2176,2547,"Dark","Fire",[""],"Houndour",["Snarl", "Fire Fang"],["Crunch", "Fire Blast", "Foul Play"]],
["Kingdra","116",230,150,194,194,2081,2441,"Water","Dragon",[""],"Seadra",["Waterfall", "Dragon Breath"],["Hydro Pump", "Blizzard", "Outrage"]],
["Phanpy","231",231,180,107,107,934,1183,"Ground","",["Donphan"],"",["Tackle", "Rock Smash"],["Bulldoze", "Rock Slide", "Body Slam"]],
["Donphan","231",232,180,214,214,2641,3043,"Ground","",[""],"Phanpy",["Tackle", "Counter"],["Earthquake", "Heavy Slam", "Play Rough"]],
["Porygon2","137",233,170,198,183,2196,2564,"Normal","",[""],"Porygon",["Hidden Power", "Charge Beam"],["Solar Beam", "Hyper Beam", "Zap Cannon"]],
["Stantler","234",234,146,192,132,1676,2003,"Normal","",[""],"",["Tackle", "Zen Headbutt"],["Stomp", "Wild Charge", "Megahorn"]],
["Smeargle","235",235,110,40,88,247,392,"Normal","",[""],"",["Tackle"],["Struggle"]],
["Tyrogue","236",236,70,64,64,269,407,"Fighting","",["Hitmonlee","Hitmonchan","Hitmontop"],"",["Rock Smash", "Tackle"],["Brick Break", "Rock Slide", "Low Sweep"]],
["Hitmontop","236",237,100,173,214,1591,1919,"Fighting","",[""],"Tyrogue",["Rock Smash", "Counter"],["Close Combat", "Gyro Ball", "Stone Edge"]],
["Smoochum","124",238,90,153,116,983,1239,"Ice","Psychic",["Jynx"],"",["Powder Snow", "Pound"],["Ice Beam", "Ice Punch", "Psyshock"]],
["Elekid","125",239,90,135,110,844,1080,"Electric","",["Electabuzz"],"",["Thunder Shock", "Low Kick"],["Thunder Punch", "Brick Break", "Discharge"]],
["Magby","126",240,90,151,108,936,1186,"Fire","",["Magmar"],"",["Ember", "Karate Chop"],["Brick Break", "Fire Punch", "Flame Burst"]],
["Miltank","241",241,190,157,211,1977,2328,"Normal","",[""],"",["Tackle", "Zen Headbutt"],["Stomp", "Body Slam", "Gyro Ball"]],
["Blissey","113",242,510,129,229,2773,3241,"Normal","",[""],"Chansey",["Pound", "Zen Headbutt"],["Psychic", "Hyper Beam", "Dazzling Gleam"]],
["Raikou","243",243,180,241,210,2947,3373,"Electric","",[""],"",["Thunder Shock", "Volt Switch"],["Thunder", "Thunderbolt", "Wild Charge"]],
["Entei","244",244,230,235,176,2974,3401,"Fire","",[""],"",["Fire Spin", "Fire Fang"],["Flamethrower", "Fire Blast", "Overheat"]],
["Suicune","245",245,200,180,235,2454,2843,"Water","",[""],"",["Extrasensory", "Snarl"],["Hydro Pump", "Bubble Beam", "Water Pulse"]],
["Larvitar","246",246,100,115,93,697,911,"Rock","Ground",["Pupitar"],"",["Bite", "Rock Smash"],["Stomp", "Crunch", "Ancient Power"]],
["Pupitar","246",247,140,155,133,1330,1619,"Rock","Ground",["Tyranitar"],"Larvitar",["Bite", "Rock Smash"],["Dig", "Crunch", "Ancient Power"]],
["Tyranitar","246",248,200,251,212,3251,3696,"Rock","Dark",[""],"Pupitar",["Bite", "Iron Tail"],["Fire Blast", "Crunch", "Stone Edge"]],
["Lugia","249",249,212,193,323,3176,3624,"Psychic","Flying",[""],"",["Extrasensory", "Dragon Tail"],["Sky Attack", "Hydro Pump", "Futuresight"]],
["Ho-oh","250",250,193,239,274,3457,3917,"Fire","Flying",[""],"",["Extrasensory", "Steel Wing"],["Brave Bird", "Fire Blast", "Solar Beam"]],
["Celebi","251",251,200,210,210,2707,3112,"Psychic","Grass",[""],"",["Confusion", "Charge Beam"],["Hyper Beam", "Psychic", "Dazzling Gleam"]],
["Treecko","252",252,80,124,104,711,929,"Grass","",["Grovyle"],"",["Pound", "Bullet Seed"],["Energy Ball", "Aerial Ace", "Grass Knot"]],
["Grovyle","252",253,100,172,130,1233,1518,"Grass","",["Sceptile"],"Treecko",["Quick Attack", "Bullet Seed"],["Leaf Blade", "Aerial Ace", "Grass Knot"]],
["Sceptile","252",254,140,223,180,2226,2602,"Grass","",[""],"Grovyle",["Fury Cutter", "Bullet Seed"],["Leaf Blade", "Aerial Ace", "Earthquake"]],
["Torchic","255",255,90,130,92,744,966,"Fire","",["Combusken"],"",["Scratch", "Ember"],["Flame Charge", "Flamethrower", "Rock Tomb"]],
["Combusken","255",256,120,163,115,1204,1483,"Fire","Fighting",["Blaziken"],"Torchic",["Peck", "Ember"],["Flame Charge", "Flamethrower", "Rock Slide"]],
["Blaziken","255",257,160,240,141,2267,2650,"Fire","Fighting",[""],"Combusken",["Counter", "Fire Spin"],["Focus Blast", "Overheat", "Brave Bird"]],
["Mudkip","258",258,100,126,93,764,988,"Water","",["Marshtomp"],"",["Tackle", "Water Gun"],["Dig", "Sludge", "Stomp"]],
["Marshtomp","258",259,140,156,133,1339,1629,"Water","Ground",["Swampert"],"Mudkip",["Mud Shot", "Water Gun"],["Mud Bomb", "Sludge", "Surf"]],
["Swampert","258",260,200,208,175,2447,2835,"Water","Ground",[""],"Marshtomp",["Mud Shot", "Water Gun"],["Earthquake", "Sludge Wave", "Surf"]],
["Poochyena","261",261,70,96,63,401,568,"Dark","",["Mightyena"],"",["Tackle", "Snarl"],["Crunch", "Dig", "Poison Fang"]],
["Mightyena","261",262,140,171,137,1489,1795,"Dark","",[""],"Poochyena",["Bite", "Fire Fang"],["Crunch", "Play Rough", "Poison Fang"]],
["Zigzagoon","263",263,76,58,80,284,426,"Normal","",["Linoone"],"",["Tackle", "Rock Smash"],["Dig", "Grass Knot", "Thunderbolt"]],
["Linoone","263",264,156,142,128,1262,1544,"Normal","",[""],"Zigzagoon",["Shadow Claw", "Tackle"],["Dig", "Grass Knot", "Thunder"]],
["Wurmple","265",265,90,75,61,349,505,"Bug","",["Silcoon","Cascoon"],"",["Tackle", "Bug Bite"],["Struggle"]],
["Silcoon","265",266,100,60,91,360,520,"Bug","",["Beautifly"],"Wurmple",["Poison Sting", "Bug Bite"],["Struggle"]],
["Beautifly","265",267,120,189,98,1289,1584,"Bug","Flying",[""],"Silcoon",["Struggle Bug", "Infestation"],["Silver Wind", "Air Cutter", "Bug Buzz"]],
["Cascoon","265",268,100,60,91,360,520,"Bug","",["Dustox"],"Wurmple",["Poison Sting", "Bug Bite"],["Struggle"]],
["Dustox","265",269,120,98,172,885,1129,"Bug","Poison",[""],"Cascoon",["Struggle Bug", "Confusion"],["Silver Wind", "Sludge Bomb", "Bug Buzz"]],
["Lotad","270",270,80,71,86,370,529,"Water","Grass",["Lombre"],"",["Water Gun", "Razor Leaf"],["Bubble Beam", "Energy Ball"]],
["Lombre","270",271,120,112,128,873,1109,"Water","Grass",["Ludicolo"],"Lotad",["Bubble", "Razor Leaf"],["Bubble Beam", "Ice Beam", "Grass Knot"]],
["Ludicolo","270",272,160,173,191,1902,2245,"Water","Grass",[""],"Lombre",["Bubble", "Razor Leaf"],["Hydro Pump", "Blizzard", "Solar Beam"]],
["Seedot","273",273,80,71,86,370,529,"Grass","",["Nuzleaf"],"",["Bullet Seed", "Quick Attack"],["Energy Ball", "Grass Knot", "Foul Play"]],
["Nuzleaf","273",274,140,134,78,880,1125,"Grass","Dark",["Shiftry"],"Seedot",["Razor Leaf", "Feint Attack"],["Leaf Blade", "Grass Knot", "Foul Play"]],
["Shiftry","273",275,180,200,121,1856,2202,"Grass","Dark",[""],"Nuzleaf",["Razor Leaf", "Feint Attack"],["Leaf Blade", "Hurricane", "Foul Play"]],
["Taillow","276",276,80,106,61,465,646,"Normal","Flying",["Swellow"],"",["Peck", "Quick Attack"],["Aerial Ace"]],
["Swellow","276",277,120,185,130,1453,1760,"Normal","Flying",[""],"Taillow",["Wing Attack", "Steel Wing"],["Aerial Ace", "Brave Bird", "Sky Attack"]],
["Wingull","278",278,80,106,61,465,646,"Water","Flying",["Pelipper"],"",["Water Gun", "Quick Attack"],["Water Pulse", "Air Cutter", "Ice Beam"]],
["Pelipper","278",279,120,175,189,1657,1983,"Water","Flying",[""],"Wingull",["Water Gun", "Wing Attack"],["Hydro Pump", "Hurricane", "Blizzard"]],
["Ralts","280",280,56,79,63,295,440,"Psychic","Fairy",["Kirlia"],"",["Confusion", "Charge Beam"],["Psyshock", "Disarming Voice", "Shadow Sneak"]],
["Kirlia","280",281,76,117,100,641,849,"Psychic","Fairy",["Gardevoir"],"Ralts",["Confusion", "Charge Beam"],["Psychic", "Disarming Voice", "Shadow Sneak"]],
["Gardevoir","280",282,136,237,220,2578,2986,"Psychic","Fairy",[""],"Kirlia",["Confusion", "Charge Beam"],["Psychic", "Dazzling Gleam", "Shadow Ball"]],
["Surskit","283",283,80,93,97,515,700,"Bug","Water",["Masquerain"],"",["Bubble", "Bug Bite"],["Aqua Jet", "Bubble Beam", "Signal Beam"]],
["Masquerain","283",284,140,192,161,1813,2150,"Bug","Flying",[""],"Surskit",["Infestation", "Air Slash"],["Air Cutter", "Ominous Wind", "Silver Wind"]],
["Shroomish","285",285,120,74,110,534,727,"Grass","",["Breloom"],"",["Tackle", "Bullet Seed"],["Seed Bomb", "Grass Knot", "Energy Ball"]],
["Breloom","285",286,120,241,153,2054,2425,"Grass","Fighting",[""],"Shroomish",["Counter", "Bullet Seed"],["Dynamic Punch", "Seed Bomb", "Sludge Bomb"]],
["Slakoth","287",287,120,104,104,730,948,"Normal","",["Vigoroth"],"",["Yawn"],["Body Slam", "Night Slash", "Brick Break"]],
["Vigoroth","287",288,160,159,159,1595,1909,"Normal","",["Slaking"],"Slakoth",["Scratch", "Counter"],["Body Slam", "Bulldoze", "Brick Break"]],
["Slaking","287",289,273,290,183,4077,4581,"Normal","",[""],"Vigoroth",["Yawn"],["Hyper Beam", "Play Rough", "Earthquake"]],
["Nincada","290",290,62,80,153,490,679,"Bug","Ground",["Ninjask","Shedinja"],"",["Scratch", "Bug Bite"],["Night Slash", "Bug Buzz", "Aerial Ace"]],
["Ninjask","290",291,122,196,114,1453,1764,"Bug","Flying",[""],"Nincada",["Fury Cutter", "Metal Claw"],["Shadow Ball", "Bug Buzz", "Aerial Ace"]],
["Shedinja","290",292,2,153,80,121,424,"Bug","Ghost",[""],"Nincada",["Bite", "Struggle Bug"],["Shadow Sneak", "Aerial Ace", "Dig"]],
["Whismur","293",293,128,92,42,424,607,"Normal","",["Loudred"],"",["Pound", "Astonish"],["Stomp", "Disarming Voice", "Flamethrower"]],
["Loudred","293",294,168,134,81,983,1242,"Normal","",["Exploud"],"Whismur",["Bite", "Rock Smash"],["Stomp", "Disarming Voice", "Flamethrower"]],
["Exploud","293",295,208,179,142,1935,2283,"Normal","",[""],"Loudred",["Bite", "Astonish"],["Crunch", "Disarming Voice", "Fire Blast"]],
["Makuhita","296",296,144,99,54,549,751,"Fighting","",["Hariyama"],"",["Rock Smash", "Tackle"],["Heavy Slam", "Low Sweep", "Cross Chop"]],
["Hariyama","296",297,288,209,114,2382,2785,"Fighting","",[""],"Makuhita",["Counter", "Bullet Punch"],["Heavy Slam", "Close Combat", "Dynamic Punch"]],
["Azurill","183",298,100,36,71,190,319,"Normal","Fairy",["Marill"],"",["Splash", "Bubble"],["Bubble Beam", "Ice Beam", "Body Slam"]],
["Nosepass","299",299,60,82,236,613,837,"Rock","",[""],"",["Rock Throw", "Spark"],["Rock Blast", "Rock Slide", "Thunderbolt"]],
["Skitty","300",300,100,84,84,484,664,"Normal","",["Delcatty"],"",["Feint Attack", "Tackle"],["Dig", "Disarming Voice", "Wild Charge"]],
["Delcatty","300",301,140,132,132,1128,1395,"Normal","",[""],"Skitty",["Feint Attack", "Zen Headbutt"],["Play Rough", "Disarming Voice", "Wild Charge"]],
["Sableye","302",302,100,141,141,1053,1314,"Dark","Ghost",[""],"",["Shadow Claw", "Feint Attack"],["Power Gem", "Foul Play", "Shadow Sneak"]],
["Mawile","303",303,100,155,155,1213,1495,"Steel","Fairy",[""],"",["Bite", "Astonish"],["Play Rough", "Vice Grip", "Iron Head"]],
["Aron","304",304,100,121,168,986,1241,"Steel","Rock",["Lairon"],"",["Tackle", "Metal Claw"],["Iron Head", "Rock Tomb", "Body Slam"]],
["Lairon","304",305,120,158,240,1686,2019,"Steel","Rock",["Aggron"],"Aron",["Metal Claw", "Iron Tail"],["Body Slam", "Rock Slide", "Heavy Slam"]],
["Aggron","304",306,140,198,314,2611,3025,"Steel","Rock",[""],"Lairon",["Dragon Tail", "Iron Tail"],["Thunder", "Stone Edge", "Heavy Slam"]],
["Meditite","307",307,60,78,107,393,559,"Fighting","Psychic",["Medicham"],"",["Confusion", "Rock Smash"],["Ice Punch", "Psyshock", "Low Sweep"]],
["Medicham","307",308,120,121,152,1027,1284,"Fighting","Psychic",[""],"Meditite",["Psycho Cut", "Counter"],["Ice Punch", "Psychic", "Dynamic Punch"]],
["Electrike","309",309,80,123,78,611,815,"Electric","",["Manectric"],"",["Quick Attack", "Spark"],["Thunderbolt", "Discharge", "Swift"]],
["Manectric","309",310,140,215,127,1803,2146,"Electric","",[""],"Electrike",["Snarl", "Charge Beam"],["Thunder", "Wild Charge", "Flame Burst"]],
["Plusle","311",311,120,167,147,1395,1693,"Electric","",[""],"",["Spark", "Quick Attack"],["Thunderbolt", "Discharge", "Swift"]],
["Minun","312",312,120,147,167,1309,1597,"Electric","",[""],"",["Spark", "Quick Attack"],["Thunderbolt", "Discharge", "Swift"]],
["Volbeat","313",313,130,143,171,1341,1632,"Bug","",[""],"",["Struggle Bug", "Tackle"],["Signal Beam", "Bug Buzz", "Thunderbolt"]],
["Illumise","314",314,130,143,171,1341,1632,"Bug","",[""],"",["Struggle Bug", "Tackle"],["Silver Wind", "Bug Buzz", "Dazzling Gleam"]],
["Roselia","315",315,100,186,148,1423,1731,"Grass","Poison",[""],"",["Poison Jab", "Razor Leaf"],["Petal Blizzard", "Sludge Bomb", "Dazzling Gleam"]],
["Gulpin","316",316,140,80,99,592,794,"Poison","",["Swalot"],"",["Pound", "Rock Smash"],["Sludge", "Gunk Shot", "Ice Beam"]],
["Swalot","316",317,200,140,159,1570,1885,"Poison","",[""],"Gulpin",["Rock Smash", "Infestation"],["Gunk Shot", "Sludge Bomb", "Ice Beam"]],
["Carvanha","318",318,90,171,39,637,881,"Water","Dark",["Sharpedo"],"",["Bite", "Snarl"],["Aqua Jet", "Crunch", "Poison Fang"]],
["Sharpedo","318",319,140,243,83,1647,2000,"Water","Dark",[""],"Carvanha",["Bite", "Waterfall"],["Hydro Pump", "Crunch", "Poison Fang"]],
["Wailmer","320",320,260,136,68,1137,1435,"Water","",["Wailord"],"",["Splash", "Water Gun"],["Heavy Slam", "Water Pulse", "Body Slam"]],
["Wailord","320",321,340,175,87,1893,2274,"Water","",[""],"Wailmer",["Zen Headbutt", "Water Gun"],["Surf", "Blizzard", "Hyper Beam"]],
["Numel","322",322,120,119,82,742,964,"Fire","Ground",["Camerupt"],"",["Ember", "Tackle"],["Bulldoze", "Heat Wave", "Stomp"]],
["Camerupt","322",323,140,194,139,1702,2031,"Fire","Ground",[""],"Numel",["Ember", "Rock Smash"],["Earthquake", "Overheat", "Solar Beam"]],
["Torkoal","324",324,140,151,234,1719,2051,"Fire","",[""],"",["Fire Spin", "Ember"],["Overheat", "Solar Beam", "Earthquake"]],
["Spoink","325",325,120,125,145,1037,1294,"Psychic","",["Grumpig"],"",["Splash", "Zen Headbutt"],["Psybeam", "Shadow Ball", "Mirror Coat"]],
["Grumpig","325",326,160,171,211,1976,2326,"Psychic","",[""],"Spoink",["Charge Beam", "Extrasensory"],["Psychic", "Shadow Ball", "Mirror Coat"]],
["Spinda","327",327,120,116,116,860,1095,"Normal","",[""],"",["Sucker Punch", "Psycho Cut"],["Dig", "Rock Tomb", "Icy Wind"]],
["Trapinch","328",328,90,162,78,853,1100,"Ground","",["Vibrava"],"",["Mud Shot", "Struggle Bug"],["Sand Tomb", "Dig", "Crunch"]],
["Vibrava","328",329,100,134,99,838,1073,"Ground","Dragon",["Flygon"],"Trapinch",["Mud Shot", "Dragon Breath"],["Sand Tomb", "Bulldoze", "Bug Buzz"]],
["Flygon","328",330,160,205,168,2114,2476,"Ground","Dragon",[""],"Vibrava",["Mud Shot", "Dragon Tail"],["Earthquake", "Dragon Claw", "Stone Edge"]],
["Cacnea","331",331,100,156,74,844,1088,"Grass","",["Cacturne"],"",["Poison Sting", "Sucker Punch"],["Grass Knot", "Brick Break", "Seed Bomb"]],
["Cacturne","331",332,140,221,115,1763,2107,"Grass","Dark",[""],"Cacnea",["Poison Jab", "Sucker Punch"],["Dark Pulse", "Dynamic Punch", "Grass Knot"]],
["Swablu","333",333,90,76,139,534,727,"Normal","Flying",["Altaria"],"",["Peck", "Astonish"],["Disarming Voice", "Aerial Ace", "Ice Beam"]],
["Altaria","333",334,150,141,208,1566,1882,"Dragon","Flying",[""],"Swablu",["Peck", "Dragon Breath"],["Sky Attack", "Dazzling Gleam", "Dragon Pulse"]],
["Zangoose","335",335,146,222,124,1878,2230,"Normal","",[""],"",["Fury Cutter", "Shadow Claw"],["Close Combat", "Night Slash", "Dig"]],
["Seviper","336",336,146,196,118,1618,1942,"Poison","",[""],"",["Poison Jab", "Iron Tail"],["Poison Fang", "Crunch", "Wrap"]],
["Lunatone","337",337,180,178,163,1917,2261,"Rock","Psychic",[""],"",["Rock Throw", "Confusion"],["Psychic", "Rock Slide", "Moonblast"]],
["Solrock","338",338,180,178,163,1917,2261,"Rock","Psychic",[""],"",["Rock Throw", "Confusion"],["Psychic", "Rock Slide", "Solar Beam"]],
["Barboach","339",339,100,93,83,532,721,"Water","Ground",["Whiscash"],"",["Water Gun", "Mud Shot"],["Aqua Tail", "Ice Beam", "Mud Bomb"]],
["Whiscash","339",340,220,151,142,1678,2005,"Water","Ground",[""],"Barboach",["Water Gun", "Mud Shot"],["Water Pulse", "Blizzard", "Mud Bomb"]],
["Corphish","341",341,86,141,113,874,1115,"Water","",["Crawdaunt"],"",["Bubble", "Rock Smash"],["Vice Grip", "Bubble Beam", "Aqua Jet"]],
["Crawdaunt","341",342,126,224,156,1975,2334,"Water","Dark",[""],"Corphish",["Waterfall", "Snarl"],["Vice Grip", "Bubble Beam", "Night Slash"]],
["Baltoy","343",343,80,77,131,495,681,"Ground","Psychic",["Claydol"],"",["Confusion", "Extrasensory"],["Gyro Ball", "Psybeam", "Dig"]],
["Claydol","343",344,120,140,236,1482,1794,"Ground","Psychic",[""],"Baltoy",["Extrasensory", "Confusion"],["Gyro Ball", "Psychic", "Earthquake"]],
["Lileep","345",345,132,105,154,941,1189,"Rock","Grass",["Cradily"],"",["Acid", "Infestation"],["Grass Knot", "Mirror Coat", "Ancient Power"]],
["Cradily","345",346,172,152,198,1764,2096,"Rock","Grass",[""],"Lileep",["Acid", "Infestation"],["Grass Knot", "Bulldoze", "Stone Edge"]],
["Anorith","347",347,90,176,100,1050,1320,"Rock","Bug",["Armaldo"],"",["Struggle Bug", "Scratch"],["Cross Poison", "Aqua Jet", "Ancient Power"]],
["Armaldo","347",348,150,222,183,2313,2694,"Rock","Bug",[""],"Anorith",["Fury Cutter", "Struggle Bug"],["Cross Poison", "Water Pulse", "Rock Blast"]],
["Feebas","349",349,40,29,102,116,222,"Water","",["Milotic"],"",["Splash", "Tackle"],["Mirror Coat"]],
["Milotic","349",350,190,192,242,2589,2988,"Water","",[""],"Feebas",["Waterfall", "Dragon Tail"],["Surf", "Blizzard", "Hyper Beam"]],
["Castform","351",351,140,139,139,1219,1496,"Normal","",[""],"",["Tackle", "Hex"],["Hurricane", "Energy Ball"]],
["Castform_normal","351_normal",351,140,139,139,1219,1496,"Normal","",[""],"",["Tackle", "Hex"],["Hurricane", "Energy Ball"]],
["Castform_rainy","351_rainy",351,140,139,139,1219,1496,"Water","",[""],"",["Water Gun", "Tackle"],["Hydro Pump", "Thunder"]],
["Castform_snowy","351_snowy",351,140,139,139,1219,1496,"Ice","",[""],"",["Powder Snow", "Tackle"],["Blizzard", "Ice Beam"]],
["Castform_sunny","351_sunny",351,140,139,139,1219,1496,"Fire","",[""],"",["Ember", "Tackle"],["Fire Blast", "Solar Beam"]],
["Kecleon","352",352,120,161,212,1615,1938,"Normal","",[""],"",["Lick", "Sucker Punch"],["Foul Play", "Flamethrower", "Thunder", "Ice Beam", "Aerial Ace", "Shadow Sneak"]],
["Shuppet","353",353,88,138,66,661,879,"Ghost","",["Banette"],"",["Feint Attack", "Astonish"],["Ominous Wind", "Night Shade", "Shadow Sneak"]],
["Banette","353",354,128,218,127,1748,2088,"Ghost","",[""],"Shuppet",["Hex", "Shadow Claw"],["Shadow Ball", "Dazzling Gleam", "Thunder"]],
["Duskull","355",355,40,70,162,354,527,"Ghost","",["Dusclops"],"",["Hex", "Astonish"],["Ominous Wind", "Night Shade", "Shadow Sneak"]],
["Dusclops","355",356,80,124,234,1067,1344,"Ghost","",[""],"Duskull",["Hex", "Feint Attack"],["Shadow Punch", "Ice Punch", "Fire Punch"]],
["Tropius","357",357,198,136,165,1546,1859,"Grass","Flying",[""],"",["Air Slash", "Razor Leaf"],["Stomp", "Aerial Ace", "Leaf Blade"]],
["Chimecho","358",358,150,175,174,1778,2110,"Psychic","",[""],"",["Extrasensory", "Astonish"],["Energy Ball", "Shadow Ball", "Psyshock"]],
["Absol","359",359,130,246,120,1932,2297,"Dark","",[""],"",["Psycho Cut", "Snarl"],["Dark Pulse", "Thunder", "Megahorn"]],
["Wynaut","202",360,190,41,86,329,506,"Psychic","",["Wobbuffet"],"",["Splash", "Counter"],["Mirror Coat"]],
["Snorunt","361",361,100,95,95,582,778,"Ice","",["Glalie"],"",["Powder Snow", "Hex"],["Avalanche", "Icy Wind", "Shadow Ball"]],
["Glalie","361",362,160,162,162,1640,1959,"Ice","",[""],"Snorunt",["Ice Shard", "Frost Breath"],["Avalanche", "Gyro Ball", "Shadow Ball"]],
["Spheal","363",363,140,95,90,670,882,"Ice","Water",["Sealeo"],"",["Water Gun", "Rock Smash"],["Aurora Beam", "Body Slam", "Water Pulse"]],
["Sealeo","363",364,180,137,132,1328,1618,"Ice","Water",["Walrein"],"Spheal",["Water Gun", "Powder Snow"],["Aurora Beam", "Body Slam", "Water Pulse"]],
["Walrein","363",365,220,182,176,2252,2625,"Ice","Water",[""],"Sealeo",["Waterfall", "Frost Breath"],["Blizzard", "Earthquake", "Water Pulse"]],
["Clamperl","366",366,70,133,149,854,1099,"Water","",["Huntail","Gorebyss"],"",["Water Gun"],["Body Slam", "Ice Beam", "Water Pulse"]],
["Huntail","366",367,110,197,194,1810,2155,"Water","",[""],"Clamperl",["Water Gun", "Bite"],["Crunch", "Ice Beam", "Aqua Tail"]],
["Gorebyss","366",368,110,211,194,1938,2297,"Water","",[""],"Clamperl",["Water Gun", "Confusion"],["Draining Kiss", "Psychic", "Water Pulse"]],
["Relicanth","369",369,200,162,234,2204,2576,"Water","Rock",[""],"",["Water Gun", "Zen Headbutt"],["Ancient Power", "Aqua Tail", "Hydro Pump"]],
["Luvdisc","370",370,86,81,134,546,740,"Water","",[""],"",["Water Gun", "Splash"],["Draining Kiss", "Water Pulse", "Aqua Jet"]],
["Bagon","371",371,90,134,107,827,1060,"Dragon","",["Shelgon"],"",["Bite", "Ember"],["Flamethrower", "Twister", "Crunch"]],
["Shelgon","371",372,130,172,179,1650,1972,"Dragon","",["Salamence"],"Bagon",["Ember", "Dragon Breath"],["Flamethrower", "Dragon Pulse", "Twister"]],
["Salamence","371",373,190,277,168,3113,3557,"Dragon","Flying",[""],"Shelgon",["Dragon Tail", "Fire Fang"],["Fire Blast", "Hydro Pump", "Draco Meteor"]],
["Beldum","374",374,80,96,141,641,850,"Steel","Psychic",["Metang"],"",["Take Down"],["Struggle"]],
["Metang","374",375,120,138,185,1293,1581,"Steel","Psychic",["Metagross"],"Beldum",["Zen Headbutt", "Metal Claw"],["Psychic", "Gyro Ball", "Psyshock"]],
["Metagross","374",376,160,257,247,3213,3663,"Steel","Psychic",[""],"Metang",["Bullet Punch", "Zen Headbutt"],["Psychic", "Flash Cannon", "Earthquake"]],
["Regirock","377",377,160,179,356,2687,3109,"Rock","",[""],"",["Rock Throw", "Rock Smash"],["Stone Edge", "Zap Cannon", "Focus Blast"]],
["Regice","378",378,160,179,356,2687,3109,"Ice","",[""],"",["Frost Breath", "Rock Smash"],["Blizzard", "Earthquake", "Focus Blast"]],
["Registeel","379",379,160,143,285,1920,2277,"Steel","",[""],"",["Metal Claw", "Rock Smash"],["Flash Cannon", "Hyper Beam", "Focus Blast"]],
["Latias","380",380,160,228,268,2969,3401,"Dragon","Psychic",[""],"",["Dragon Breath", "Zen Headbutt"],["Psychic", "Outrage", "Thunder"]],
["Latios","381",381,160,268,228,3219,3670,"Dragon","Psychic",[""],"",["Dragon Breath", "Zen Headbutt"],["Psychic", "Dragon Claw", "Solar Beam"]],
["Kyogre","382",382,182,270,251,3630,4103,"Water","",[""],"",["Waterfall"],["Hydro Pump", "Blizzard", "Thunder"]],
["Groudon","383",383,182,270,251,3630,4103,"Ground","",[""],"",["Mud Shot", "Dragon Tail"],["Earthquake", "Fire Blast", "Solar Beam"]],
["Rayquaza","384",384,191,284,170,3219,3671,"Dragon","Flying",[""],"",["Air Slash", "Dragon Tail"],["Outrage", "Aerial Ace", "Ancient Power"]],
["Jirachi","385",385,200,210,210,2707,3112,"Steel","Psychic",[""],"",["Confusion", "Charge Beam"],["Dazzling Gleam", "Psychic", "Doom Desire"]],
["Deoxys","386",386,100,345,115,2327,2768,"Psychic","",[""],"",["Zen Headbutt", "Charge Beam"],["Psycho Boost", "Zap Cannon", "Hyper Beam"]],
["Deoxys_attack","386_attack",386,100,414,46,1766,2260,"Psychic","",[""],"",["Zen Headbutt", "Poison Jab"],["Psycho Boost", "Zap Cannon", "Dark Pulse"]],
["Deoxys_defense","386_defense",386,100,144,330,1645,1992,"Psychic","",[""],"",["Zen Headbutt", "Counter"],["Psycho Boost", "Zap Cannon", "Rock Slide"]],
["Deoxys_normal","386_normal",386,100,345,115,2327,2768,"Psychic","",[""],"",["Zen Headbutt", "Charge Beam"],["Psycho Boost", "Thunderbolt", "Hyper Beam"]],
["Deoxys_speed","386_speed",386,100,230,218,2136,2522,"Psychic","",[""],"",["Zen Headbutt", "Charge Beam"],["Psycho Boost", "Zap Cannon", "Swift"]],
["Turtwig","387",387,146,119,110,948,1195,"Grass","",["Grotle"],"",["Tackle", "Razor Leaf"],["Energy Ball", "Seed Bomb", "Body Slam"]],
["Grotle","387",388,181,157,143,1588,1903,"Grass","",["Torterra"],"Turtwig",["Bite", "Razor Leaf"],["Energy Ball", "Solar Beam", "Body Slam"]],
["Torterra","387",389,216,202,188,2560,2955,"Grass","Ground",[""],"Grotle",["Bite", "Razor Leaf"],["Stone Edge", "Solar Beam", "Earthquake"]],
["Chimchar","390",390,127,113,86,742,964,"Fire","",["Monferno"],"",["Ember", "Scratch"],["Flame Wheel", "Flamethrower", "Flame Charge"]],
["Monferno","390",391,162,158,105,1296,1585,"Fire","Fighting",["Infernape"],"Chimchar",["Ember", "Rock Smash"],["Flame Wheel", "Flamethrower", "Low Sweep"]],
["Infernape","390",392,183,222,151,2321,2702,"Fire","Fighting",[""],"Monferno",["Fire Spin", "Rock Smash"],["Solar Beam", "Flamethrower", "Close Combat"]],
["Piplup","393",393,142,112,102,847,1082,"Water","",["Prinplup"],"",["Bubble", "Pound"],["Bubble Beam", "Drill Peck", "Icy Wind"]],
["Prinplup","393",394,162,150,139,1415,1713,"Water","",["Empoleon"],"Piplup",["Bubble", "Metal Claw"],["Bubble Beam", "Hydro Pump", "Icy Wind"]],
["Empoleon","393",395,197,210,186,2528,2921,"Water","Steel",[""],"Prinplup",["Waterfall", "Metal Claw"],["Hydro Pump", "Blizzard", "Flash Cannon"]],
["Starly","396",396,120,101,58,530,724,"Normal","Flying",["Staravia"],"",["Tackle", "Quick Attack"],["Aerial Ace", "Brave Bird"]],
["Staravia","396",397,146,142,94,1046,1308,"Normal","Flying",["Staraptor"],"Starly",["Wing Attack", "Quick Attack"],["Aerial Ace", "Brave Bird", "Heat Wave"]],
["Staraptor","396",398,198,234,140,2450,2845,"Normal","Flying",[""],"Staravia",["Wing Attack", "Quick Attack"],["Brave Bird", "Heat Wave", "Close Combat"]],
["Bidoof","399",399,153,80,73,531,726,"Normal","",["Bibarel"],"",["Tackle", "Take Down"],["Hyper Fang", "Crunch", "Grass Knot"]],
["Bibarel","399",400,188,162,119,1524,1836,"Normal","Water",[""],"Bidoof",["Water Gun", "Take Down"],["Hyper Fang", "Hyper Beam", "Surf"]],
["Kricketot","401",401,114,45,74,259,404,"Bug","",["Kricketune"],"",["Struggle Bug", "Bug Bite"],["Struggle"]],
["Kricketune","401",402,184,160,100,1365,1665,"Bug","",[""],"Kricketot",["Struggle Bug", "Fury Cutter"],["Bug Buzz", "X-Scissor", "Aerial Ace"]],
["Shinx","403",403,128,117,64,666,882,"Electric","",["Luxio"],"",["Tackle", "Spark"],["Discharge", "Thunderbolt", "Swift"]],
["Luxio","403",404,155,159,95,1213,1496,"Electric","",["Luxray"],"Shinx",["Spark", "Bite"],["Thunderbolt", "Wild Charge", "Crunch"]],
["Luxray","403",405,190,232,156,2512,2909,"Electric","",[""],"Luxio",["Spark", "Snarl"],["Hyper Beam", "Wild Charge", "Crunch"]],
["Budew","315",406,120,91,109,654,862,"Grass","Poison",["Roselia"],"",["Hidden Power", "Razor Leaf"],["Energy Ball", "Grass Knot"]],
["Roserade","315",407,155,243,185,2588,2992,"Grass","Poison",[""],"Roselia",["Poison Jab", "Razor Leaf"],["Solar Beam", "Sludge Bomb", "Dazzling Gleam"]],
["Cranidos","408",408,167,218,71,1493,1833,"Rock","Rock",["Rampardos"],"",["Zen Headbutt", "Take Down"],["Rock Tomb", "Ancient Power", "Bulldoze"]],
["Rampardos","408",409,219,295,109,2867,3321,"Rock","Rock",[""],"Cranidos",["Zen Headbutt", "Smack Down"],["Rock Slide", "Outrage", "Flamethrower"]],
["Shieldon","410",410,102,76,195,674,897,"Rock","Steel",["Bastiodon"],"",["Tackle", "Iron Tail"],["Rock Tomb", "Ancient Power", "Heavy Slam"]],
["Bastiodon","410",411,155,94,286,1244,1550,"Rock","Steel",[""],"Shieldon",["Smack Down", "Iron Tail"],["Stone Edge", "Flamethrower", "Flash Cannon"]],
["Burmy","412",412,120,53,83,332,491,"Bug","",["Wormadam","Wormadam","Wormadam","Mothim"],"",["Tackle", "Quick Attack"],["Struggle"]],
["Wormadam","412",413,155,141,180,1481,1786,"Bug","Grass",[""],"Burmy",["Tackle", "Bug Bite"],["Struggle"]],
["Wormadam_plant","412_plant",413,155,141,180,1481,1786,"Bug","Grass",[""],"Burmy",["Confusion", "Bug Bite"],["Psybeam", "Energy Ball", "Bug Buzz"]],
["Wormadam_sandy","412_sandy",413,155,127,175,1315,1605,"Bug","Ground",[""],"Burmy",["Confusion", "Bug Bite"],["Psybeam", "Bulldoze", "Bug Buzz"]],
["Wormadam_trash","412_trash",413,155,141,180,1481,1786,"Bug","Steel",[""],"Burmy",["Confusion", "Bug Bite"],["Psybeam", "Iron Head", "Bug Buzz"]],
["Mothim","412",414,172,185,98,1510,1828,"Bug","Flying",[""],"",["Air Slash", "Bug Bite"],["Psybeam", "Aerial Ace", "Bug Buzz"]],
["Combee","415",415,102,59,83,341,498,"Bug","Flying",["Vespiquen"],"Mothim",["Bug Bite"],["Bug Buzz"]],
["Vespiquen","415",416,172,149,190,1694,2019,"Bug","Flying",[""],"Mothim",["Bug Bite", "Poison Sting"],["Bug Buzz", "Power Gem", "X-Scissor"]],
["Pachirisu","417",417,155,94,172,965,1222,"Electric","",[""],"",["Spark", "Volt Switch"],["Thunder", "Thunderbolt", "Thunder Punch"]],
["Buizel","418",418,146,132,67,821,1062,"Water","",["Floatzel"],"",["Water Gun", "Quick Attack"],["Aqua Jet", "Water Pulse", "Swift"]],
["Floatzel","418",419,198,221,114,2088,2460,"Water","",[""],"Buizel",["Water Gun", "Waterfall"],["Aqua Jet", "Hydro Pump", "Swift"]],
["Cherubi","420",420,128,108,92,737,957,"Grass","",["Cherrim","Cherrim"],"",["Tackle", "Bullet Seed"],["Power Whip", "Petal Blizzard", "Seed Bomb"]],
["Cherrim","420",421,172,170,153,1734,2062,"Grass","",[""],"Cherubi",["Razor Leaf", "Bullet Seed"],["Power Whip", "Hyper Beam", "Solar Beam"]],
["Cherrim_overcast","420_overcast",421,172,170,153,1734,2062,"Grass","",[""],"Cherubi",["Razor Leaf", "Bullet Seed"],["Power Whip", "Hyper Beam", "Solar Beam"]],
["Cherrim_sunny","420_sunny",421,172,170,153,1734,2062,"Grass","",[""],"Cherubi",["Razor Leaf", "Bullet Seed"],["Power Whip", "Hyper Beam", "Solar Beam"]],
["Shellos","422",422,183,103,105,898,1144,"Water","",["Gastrodon"],"",["Mud Slap", "Hidden Power"],["Water Pulse", "Mud Bomb", "Body Slam"]],
["Shellos_east_sea","422",422,183,103,105,898,1144,"Water","",["Gastrodon"],"",["Mud Slap", "Hidden Power"],["Water Pulse", "Mud Bomb", "Body Slam"]],
["Shellos_west_sea","422",422,183,103,105,898,1144,"Water","",["Gastrodon"],"",["Mud Slap", "Hidden Power"],["Water Pulse", "Mud Bomb", "Body Slam"]],
["Gastrodon","422",423,244,169,143,1985,2341,"Water","Ground",[""],"",["Mud Slap", "Hidden Power"],["Water Pulse", "Earthquake", "Body Slam"]],
["Gastrodon_east_sea","422",423,244,169,143,1985,2341,"Water","Ground",[""],"Shellos",["Mud Slap", "Hidden Power"],["Water Pulse", "Earthquake", "Body Slam"]],
["Gastrodon_west_sea","422",423,244,169,143,1985,2341,"Water","Ground",[""],"Shellos",["Mud Slap", "Hidden Power"],["Water Pulse", "Earthquake", "Body Slam"]],
["Ambipom","190",424,181,205,143,2074,2435,"Normal","",[""],"Aipom",["Scratch", "Astonish"],["Low Sweep", "Hyper Beam", "Aerial Ace"]],
["Drifloon","425",425,207,117,80,947,1205,"Ghost","Flying",["Drifblim"],"",["Hex", "Astonish"],["Ominous Wind", "Icy Wind", "Shadow Ball"]],
["Drifblim","425",426,312,180,102,2019,2399,"Ghost","Flying",[""],"Drifloon",["Hex", "Astonish"],["Ominous Wind", "Icy Wind", "Shadow Ball"]],
["Buneary","427",427,146,130,105,1012,1267,"Normal","",["Lopunny"],"",["Pound", "Quick Attack"],["Fire Punch", "Swift"]],
["Lopunny","427",428,163,156,194,1744,2074,"Normal","",[""],"Buneary",["Pound", "Low Kick"],["Fire Punch", "Hyper Beam", "Focus Blast"]],
["Mismagius","200",429,155,211,187,2259,2634,"Ghost","",[""],"Misdreavus",["Sucker Punch", "Hex"],["Shadow Ball", "Dark Pulse", "Dazzling Gleam"]],
["Honchkrow","198",430,225,243,103,2326,2731,"Dark","Flying",[""],"Murkrow",["Peck", "Snarl"],["Brave Bird", "Psychic", "Dark Pulse"]],
["Glameow","431",431,135,109,82,721,940,"Normal","",["Purugly"],"",["Scratch", "Quick Attack"],["Play Rough", "Thunderbolt", "Aerial Ace"]],
["Purugly","431",432,174,172,133,1645,1967,"Normal","",[""],"Glameow",["Scratch", "Shadow Claw"],["Play Rough", "Thunder", "Aerial Ace"]],
["Chingling","358",433,128,114,94,786,1013,"Psychic","",["Chimecho"],"",["Zen Headbutt", "Astonish"],["Wrap", "Shadow Ball", "Psyshock"]],
["Stunky","434",434,160,121,90,913,1159,"Poison","Dark",["Skuntank"],"",["Scratch", "Bite"],["Crunch", "Flamethrower", "Sludge Bomb"]],
["Skuntank","434",435,230,184,132,2016,2375,"Poison","Dark",[""],"Stunky",["Poison Jab", "Bite"],["Crunch", "Flamethrower", "Sludge Bomb"]],
["Bronzor","436",436,149,43,154,409,607,"Steel","Psychic",["Bronzong"],"",["Tackle", "Confusion"],["Gyro Ball", "Psyshock", "Heavy Slam"]],
["Bronzong","436",437,167,161,213,1910,2255,"Steel","Psychic",[""],"Bronzor",["Feint Attack", "Confusion"],["Flash Cannon", "Psychic", "Heavy Slam"]],
["Bonsly","185",438,137,124,133,1052,1311,"Rock","",["Sudowoodo"],"",["Rock Throw", "Counter"],["Rock Tomb", "Bulldoze", "Rock Slide"]],
["Mime_jr","122_jr",439,85,125,142,863,1103,"Psychic","Fairy",["Mr_mime"],"",["Confusion", "Pound"],["Psybeam", "Psychic", "Psyshock"]],
["Happiny","113",440,225,25,77,206,373,"Normal","",["Chansey"],"",["Pound", "Zen Headbutt"],["Psychic", "Dazzling Gleam"]],
["Chatot","441",441,183,183,91,1485,1804,"Normal","Flying",[""],"",["Peck", "Steel Wing"],["Night Shade", "Sky Attack", "Heat Wave"]],
["Spiritomb","442",442,137,169,199,1755,2087,"Ghost","Dark",[""],"",["Feint Attack", "Sucker Punch"],["Shadow Sneak", "Ominous Wind", "Shadow Ball"]],
["Gible","443",443,151,124,84,878,1120,"Dragon","Ground",["Gabite"],"",["Take Down", "Mud Shot"],["Dig", "Twister", "Body Slam"]],
["Gabite","443",444,169,172,125,1572,1887,"Dragon","Ground",["Garchomp"],"Gible",["Take Down", "Mud Shot"],["Dig", "Twister", "Flamethrower"]],
["Garchomp","443",445,239,261,193,3526,3990,"Dragon","Ground",[""],"Gible",["Dragon Tail", "Mud Shot"],["Outrage", "Earthquake", "Fire Blast"]],
["Munchlax","143",446,286,137,117,1576,1905,"Normal","",["Snorlax"],"",["Tackle", "Lick"],["Gunk Shot", "Body Slam", "Bulldoze"]],
["Riolu","448",447,120,127,78,772,1000,"Fighting","",["Lucario"],"",["Counter", "Quick Attack"],["Brick Break", "Low Sweep", "Cross Chop"]],
["Lucario","448",448,172,236,144,2336,2722,"Fighting","Steel",[""],"Riolu",["Counter", "Bullet Punch"],["Flash Cannon", "Shadow Ball", "Close Combat"]],
["Hippopotas","449",449,169,124,118,1101,1367,"Ground","",["Hippowdon"],"",["Tackle", "Bite"],["Dig", "Rock Tomb", "Body Slam"]],
["Hippowdon","449",450,239,201,191,2701,3107,"Ground","",[""],"Hippopotas",["Fire Fang", "Bite"],["Earthquake", "Stone Edge", "Body Slam"]],
["Skorupi","451",451,120,93,151,787,1016,"Poison","Bug",["Drapion"],"",["Poison Sting", "Infestation"],["Cross Poison", "Aqua Tail", "Sludge Bomb"]],
["Drapion","451",452,172,180,202,2110,2470,"Poison","Dark",[""],"Skorupi",["Poison Sting", "Infestation"],["Crunch", "Aqua Tail", "Sludge Bomb"]],
["Croagunk","453",453,134,116,76,736,959,"Poison","Fighting",["Toxicroak"],"",["Poison Sting", "Poison Jab"],["Brick Break", "Low Sweep", "Sludge Bomb"]],
["Toxicroak","453",454,195,211,133,2137,2506,"Poison","Fighting",[""],"Croagunk",["Poison Jab", "Counter"],["Dynamic Punch", "Mud Bomb", "Sludge Bomb"]],
["Carnivine","455",455,179,187,136,1835,2174,"Grass","",[""],"",["Bite", "Vine Whip"],["Power Whip", "Energy Ball", "Crunch"]],
["Finneon","456",456,135,96,116,755,978,"Water","",["Lumineon"],"",["Pound", "Water Gun"],["Water Pulse", "Ice Beam", "Silver Wind"]],
["Lumineon","456",457,170,142,170,1518,1827,"Water","",[""],"",["Waterfall", "Water Gun"],["Water Pulse", "Blizzard", "Silver Wind"]],
["Mantyke","226",458,128,105,179,999,1257,"Water","Flying",["Mantine"],"",["Bubble", "Tackle"],["Water Pulse", "Ice Beam", "Aerial Ace"]],
["Snover","459",459,155,115,105,922,1167,"Ice","Grass",["Abomasnow"],"",["Powder Snow", "Ice Shard"],["Ice Beam", "Energy Ball", "Stomp"]],
["Abomasnow","459",460,207,178,158,2024,2379,"Ice","Grass",[""],"Snover",["Powder Snow", "Razor Leaf"],["Blizzard", "Energy Ball", "Outrage"]],
["Weavile","215",461,172,243,171,2621,3026,"Dark","Ice",[""],"Sneasel",["Ice Shard", "Feint Attack"],["Avalanche", "Focus Blast", "Foul Play"]],
["Magnezone","81",462,172,238,205,2811,3227,"Electric","Steel",[""],"Magneton",["Spark", "Charge Beam"],["Zap Cannon", "Wild Charge", "Flash Cannon"]],
["Lickilicky","108",463,242,161,181,2119,2484,"Normal","",[""],"Lickitung",["Lick", "Zen Headbutt"],["Hyper Beam", "Earthquake", "Solar Beam"]],
["Rhyperior","111",464,251,241,190,3310,3760,"Ground","Rock",[""],"Rhydon",["Mud Slap", "Smack Down"],["Surf", "Earthquake", "Stone Edge"]],
["Tangrowth","114",465,225,207,184,2649,3051,"Grass","",[""],"Tangela",["Vine Whip", "Infestation"],["Ancient Power", "Sludge Bomb", "Solar Beam"]],
["Electivire","125",466,181,249,163,2690,3101,"Electric","",[""],"Electabuzz",["Thunder Shock", "Low Kick"],["Thunder Punch", "Wild Charge", "Thunder"]],
["Magmortar","126",467,181,247,172,2741,3155,"Fire","",[""],"Magmar",["Fire Spin", "Karate Chop"],["Brick Break", "Fire Punch", "Fire Blast"]],
["Togekiss","175",468,198,225,217,2933,3355,"Fairy","Flying",[""],"Togetic",["Air Slash", "Hidden Power"],["Ancient Power", "Dazzling Gleam", "Aerial Ace"]],
["Yanmega","193",469,200,231,156,2566,2967,"Bug","Flying",[""],"Yanma",["Bug Bite", "Wing Attack"],["Ancient Power", "Aerial Ace", "Bug Buzz"]],
["Leafeon","133",470,163,216,219,2567,2965,"Grass","",[""],"Eevee",["Razor Leaf", "Quick Attack"],["Solar Beam", "Leaf Blade", "Energy Ball"]],
["Glaceon","133",471,163,238,205,2736,3149,"Ice","",[""],"Eevee",["Ice Shard", "Frost Breath"],["Avalanche", "Icy Wind", "Ice Beam"]],
["Gliscor","207",472,181,185,222,2332,2711,"Ground","Flying",[""],"Gligar",["Fury Cutter", "Wing Attack"],["Earthquake", "Aerial Ace", "Night Slash"]],
["Mamoswine","220",473,242,247,146,2920,3352,"Ice","Ground",[""],"Piloswine",["Mud Slap", "Powder Snow"],["Avalanche", "Bulldoze", "Stone Edge"]],
["Porygon_z","137_z",474,198,264,150,2861,3290,"Normal","",[""],"Porygon2",["Hidden Power", "Charge Beam"],["Solar Beam", "Hyper Beam", "Zap Cannon"]],
["Gallade","280",475,169,237,195,2706,3115,"Psychic","Fighting",[""],"Kirlia",["Confusion", "Low Kick"],["Close Combat", "Psychic", "Leaf Blade"]],
["Probopass","299",476,155,135,275,1753,2095,"Rock","Steel",[""],"Nosepass",["Rock Throw", "Spark"],["Magnet Bomb", "Rock Slide", "Thunderbolt"]],
["Dusknoir","355",477,128,180,254,2041,2405,"Ghost","",[""],"Dusclops",["Hex", "Astonish"],["Ominous Wind", "Psychic", "Dark Pulse"]],
["Froslass","361",478,172,171,150,1727,2055,"Ice","Ghost",[""],"Snorunt",["Powder Snow", "Hex"],["Avalanche", "Crunch", "Shadow Ball"]],
["Rotom","479",479,137,204,219,2222,2598,"Electric","Ghost",[""],"",["Tackle", "Thunder Shock"],["Ominous Wind", "Thunderbolt", "Thunder"]],
["Rotom_fan","479_fan",479,137,204,219,2222,2598,"Electric","Flying",[""],"",["Tackle", "Air Slash"],["Ominous Wind", "Thunderbolt", "Thunder"]],
["Rotom_frost","479_frost",479,137,204,219,2222,2598,"Electric","Ice",[""],"",["Tackle", "Thunder Shock"],["Blizzard", "Thunderbolt", "Thunder"]],
["Rotom_heat","479_heat",479,137,204,219,2222,2598,"Electric","Fire",[""],"",["Tackle", "Thunder Shock"],["Overheat", "Thunderbolt", "Thunder"]],
["Rotom_mow","479_mow",479,137,204,219,2222,2598,"Electric","Grass",[""],"",["Tackle", "Thunder Shock"],["Ominous Wind", "Thunderbolt", "Thunder"]],
["Rotom_normal","479_normal",479,137,204,219,2222,2598,"Electric","Ghost",[""],"",["Tackle", "Thunder Shock"],["Ominous Wind", "Thunderbolt", "Thunder"]],
["Rotom_wash","479_wash",479,137,185,159,1717,2045,"Electric","Water",[""],"",["Tackle", "Thunder Shock"],["Hydro Pump", "Thunderbolt", "Thunder"]],
["Uxie","480",480,181,156,270,2169,2542,"Psychic","",[""],"",["Confusion", "Extrasensory"],["Futuresight", "Swift", "Thunder"]],
["Mesprit","481",481,190,212,212,2676,3080,"Psychic","",[""],"",["Confusion", "Extrasensory"],["Futuresight", "Swift", "Blizzard"]],
["Azelf","482",482,181,270,151,2807,3233,"Psychic","",[""],"",["Confusion", "Extrasensory"],["Futuresight", "Swift", "Fire Blast"]],
["Dialga","483",483,205,275,211,3597,4067,"Steel","Dragon",[""],"",["Dragon Breath", "Metal Claw"],["Draco Meteor", "Iron Head", "Thunder"]],
["Palkia","484",484,189,280,215,3550,4019,"Water","Dragon",[""],"",["Dragon Breath", "Dragon Tail"],["Draco Meteor", "Fire Blast", "Hydro Pump"]],
["Heatran","485",485,209,251,213,3331,3781,"Fire","Steel",[""],"",["Fire Spin", "Bug Bite"],["Fire Blast", "Iron Head", "Stone Edge"]],
["Regigigas","486",486,221,287,210,3889,4377,"Normal","",[""],"",["Zen Headbutt", "Rock Smash"],["Stone Edge", "Avalanche", "Heavy Slam"]],
["Giratina","487",487,284,187,225,2973,3403,"Ghost","Dragon",[""],"",["Dragon Breath", "Shadow Claw"],["Dragon Claw", "Ancient Power", "Shadow Sneak"]],
["Giratina_altered","487_altered",487,284,187,225,2973,3403,"Ghost","Dragon",[""],"",["Dragon Breath", "Shadow Claw"],["Dragon Claw", "Ancient Power", "Shadow Sneak"]],
["Giratina_origin","487_origin",487,284,225,187,3261,3710,"Ghost","Dragon",[""],"",["Dragon Breath", "Shadow Claw"],["Dragon Claw", "Ancient Power", "Shadow Sneak"]],
["Cresselia","488",488,260,152,258,2476,2878,"Psychic","",[""],"",["Psycho Cut", "Confusion"],["Aurora Beam", "Moonblast", "Futuresight"]],
["Phione","489",489,190,162,162,1787,2120,"Water","",["Manaphy"],"",["Waterfall", "Bubble"],["Bubble Beam", "Water Pulse", "Surf"]],
["Manaphy","490",490,225,210,210,2871,3288,"Water","",[""],"Phione",["Waterfall", "Bubble"],["Bubble Beam", "Psychic", "Surf"]],
["Darkrai","491",491,172,285,198,3308,3766,"Dark","",[""],"",["Snarl", "Feint Attack"],["Focus Blast", "Shadow Ball", "Dark Pulse"]],
["Shaymin","492",492,225,261,166,3172,3618,"Grass","",[""],"",["Hidden Power", "Zen Headbutt"],["Energy Ball", "Grass Knot", "Solar Beam"]],
["Shaymin_land","492_land",492,225,261,166,3172,3618,"Grass","",[""],"",["Hidden Power", "Zen Headbutt"],["Energy Ball", "Grass Knot", "Solar Beam"]],
["Shaymin_sky","492_sky",492,225,210,210,2871,3288,"Grass","Flying",[""],"",["Hidden Power", "Zen Headbutt"],["Energy Ball", "Grass Knot", "Solar Beam"]],
["Arceus","493",493,237,238,238,3555,4018,"Normal","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_bug","493_bug",493,237,238,238,3555,4018,"Bug","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_dark","493_dark",493,237,238,238,3555,4018,"Dark","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_dragon","493_dragon",493,237,238,238,3555,4018,"Dragon","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_electric","493_electric",493,237,238,238,3555,4018,"Electric","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_fairy","493_fairy",493,237,238,238,3555,4018,"Fairy","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_fighting","493_fighting",493,237,238,238,3555,4018,"Fighting","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_fire","493_fire",493,237,238,238,3555,4018,"Fire","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_flying","493_flying",493,237,238,238,3555,4018,"Flying","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_ghost","493_ghost",493,237,238,238,3555,4018,"Ghost","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_grass","493_grass",493,237,238,238,3555,4018,"Grass","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_ground","493_ground",493,237,238,238,3555,4018,"Ground","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_ice","493_ice",493,237,238,238,3555,4018,"Ice","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_normal","493_normal",493,237,238,238,3555,4018,"Normal","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_poison","493_poison",493,237,238,238,3555,4018,"Poison","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_psychic","493_psychic",493,237,238,238,3555,4018,"Psychic","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_rock","493_rock",493,237,238,238,3555,4018,"Rock","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_steel","493_steel",493,237,238,238,3555,4018,"Steel","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
["Arceus_water","493_water",493,237,238,238,3555,4018,"Water","",[""],"",["Iron Tail", "Shadow Claw"],["Futuresight", "Hyper Beam", "Outrage"]],
]
LEGENDARY_SPECIES_IDS = [
144, # Articuno
145, # Zados
146, # Moltres
150, # Mewtwo
151, # Mew
243, # Raikou
244, # Entei
245, # Suicune
249, # Lugia
250, # Ho-oh
251, # Celebi
377, # Regirock
378, # Regice
379, # Registeel
380, # Latias
381, # Latios
382, # Kyogre
383, # Groudon
384, # Rayquaza
385, # Jirachi
386, # Deoxys
]
def __VALIDATE_POKEMON_MOVES():
#print("Validating pokemon moves...")
for species in RAW_SPECIES_DATA:
for qm in species[SPECIES_KEYS.Quick_Moves]:
if moves._get_basic_move_by_name(qm) is None:
print("Invalid move "+qm+" for PKMN "+species[SPECIES_KEYS.Name])
for cm in species[SPECIES_KEYS.Charge_Moves]:
if moves._get_charge_move_by_name(cm) is None:
print("Invalid move "+cm+" for PKMN "+species[SPECIES_KEYS.Name])
__VALIDATE_POKEMON_MOVES()
def get_id_from_species(name):
for species in RAW_SPECIES_DATA:
if name.lower() == species[SPECIES_KEYS.Name].lower():
return int(species[SPECIES_KEYS.Id])
return -1
def get_species_data_from_species(name):
for species in RAW_SPECIES_DATA:
if name.lower() == species[SPECIES_KEYS.Name].lower():
return species
return None
| gpl-3.0 |
MITPERG/oilsands-mop | env/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/sbcharsetprober.py | 2927 | 4793 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
from .compat import wrap_ord
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
#NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
# TRUE if we need to reverse every pair in the model lookup
self._mReversed = reversed
# Optional auxiliary prober for name decision
self._mNameProber = nameProber
self.reset()
def reset(self):
CharSetProber.reset(self)
# char order of last character
self._mLastOrder = 255
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
# characters that fall in our sampling range
self._mFreqChar = 0
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][wrap_ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
i = (self._mLastOrder * SAMPLE_SIZE) + order
model = self._mModel['precedenceMatrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * SAMPLE_SIZE) + self._mLastOrder
model = self._mModel['precedenceMatrix'][i]
self._mSeqCounters[model] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a'
'winner\n' %
(self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative'
'shortcut threshhold %s\n' %
(self._mModel['charsetName'], cf,
NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
/ self._mModel['mTypicalPositiveRatio'])
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| mit |
vidartf/hyperspy | hyperspy/drawing/_widgets/scalebar.py | 2 | 5335 | # -*- coding: utf-8 -*-
# Copyright 2007-2016 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
from hyperspy.misc.math_tools import closest_nice_number
class ScaleBar(object):
def __init__(self, ax, units, pixel_size=None, color='white',
position=None, max_size_ratio=0.25, lw=2, length=None,
animated=False):
"""Add a scale bar to an image.
Parameteres
-----------
ax : matplotlib axes
The axes where to draw the scale bar.
units : string
pixel_size : {None, float}
If None the axes of the image are supposed to be calibrated.
Otherwise the pixel size must be specified.
color : a valid matplotlib color
position {None, (float, float)}
If None the position is automatically determined.
max_size_ratio : float
The maximum size of the scale bar in respect to the
length of the x axis
lw : int
The line width
length : {None, float}
If None the length is automatically calculated using the
max_size_ratio.
"""
self.animated = animated
self.ax = ax
self.units = units
self.pixel_size = pixel_size
self.xmin, self.xmax = ax.get_xlim()
self.ymin, self.ymax = ax.get_ylim()
self.text = None
self.line = None
self.tex_bold = False
if length is None:
self.calculate_size(max_size_ratio=max_size_ratio)
else:
self.length = length
if position is None:
self.position = self.calculate_line_position()
else:
self.position = position
self.calculate_text_position()
self.plot_scale(line_width=lw)
self.set_color(color)
def get_units_string(self):
if self.tex_bold is True:
if (self.units[0] and self.units[-1]) == '$':
return r'$\mathbf{%g\,%s}$' % \
(self.length, self.units[1:-1])
else:
return r'$\mathbf{%g\,}$\textbf{%s}' % \
(self.length, self.units)
else:
return r'$%g\,$%s' % (self.length, self.units)
def calculate_line_position(self, pad=0.05):
return ((1 - pad) * self.xmin + pad * self.xmax,
(1 - pad) * self.ymin + pad * self.ymax)
def calculate_text_position(self, pad=1 / 100.):
ps = self.pixel_size if self.pixel_size is not None else 1
x1, y1 = self.position
x2, y2 = x1 + self.length / ps, y1
self.text_position = ((x1 + x2) / 2.,
y2 + (self.ymax - self.ymin) / ps * pad)
def calculate_size(self, max_size_ratio=0.25):
ps = self.pixel_size if self.pixel_size is not None else 1
size = closest_nice_number(ps * (self.xmax - self.xmin) *
max_size_ratio)
self.length = size
def remove(self):
if self.line is not None:
self.ax.lines.remove(self.line)
if self.text is not None:
self.ax.texts.remove(self.text)
def plot_scale(self, line_width=1):
self.remove()
ps = self.pixel_size if self.pixel_size is not None else 1
x1, y1 = self.position
x2, y2 = x1 + self.length / ps, y1
self.line, = self.ax.plot([x1, x2], [y1, y2],
linestyle='-',
lw=line_width,
animated=self.animated)
self.text = self.ax.text(*self.text_position,
s=self.get_units_string(),
ha='center',
size='medium',
animated=self.animated)
self.ax.set_xlim(self.xmin, self.xmax)
self.ax.set_ylim(self.ymin, self.ymax)
self.ax.figure.canvas.draw()
def _set_position(self, x, y):
self.position = x, y
self.calculate_text_position()
self.plot_scale(line_width=self.line.get_linewidth())
def set_color(self, c):
self.line.set_color(c)
self.text.set_color(c)
self.ax.figure.canvas.draw_idle()
def set_length(self, length):
color = self.line.get_color()
self.length = length
self.calculate_scale_size()
self.calculate_text_position()
self.plot_scale(line_width=self.line.get_linewidth())
self.set_color(color)
def set_tex_bold(self):
self.tex_bold = True
self.text.set_text(self.get_units_string())
self.ax.figure.canvas.draw_idle()
| gpl-3.0 |
Boldie/gourmet | gourmet/plugins/import_export/mastercook_import_plugin/mastercook_importer.py | 6 | 9029 | import gourmet.importers.importer as importer
import xml.sax, re, os.path
from gourmet.gdebug import debug
from gourmet.importers.xml_importer import unquoteattr
import gourmet.importers.xml_importer as xml_importer
from gettext import gettext as _
class Mx2Cleaner:
def __init__ (self):
self.regs_to_toss = ["<\?xml[^?]+\?>","<!DOCTYPE[^>]+>"]
self.toss_regexp = "("
for r in self.regs_to_toss:
self.toss_regexp = self.toss_regexp + r + "|"
self.toss_regexp = self.toss_regexp[0:-1] + ")"
self.toss_regexp = re.compile(self.toss_regexp)
self.attr_regexp = '(<[^>]+?)\"([^=]+[\"<>][^=]+)\"'
self.attr_regexp = re.compile(self.attr_regexp)
self.encodings = ['cp1252','iso8859','ascii','latin_1','cp850','utf-8']
def cleanup (self, infile, outfile):
infile = open(infile,'r')
outfile = open(outfile,'w')
for l in infile.readlines():
l = self.toss_regs(l)
l = self.fix_attrs(l)
l = self.encode(l)
outfile.write(l)
infile.close()
outfile.close()
def toss_regs (self, instr):
m = self.toss_regexp.search(instr)
if m:
outstr = instr[0:m.start()] + instr[m.end():]
debug('Converted "%s" to "%s"'%(instr,outstr),1)
return outstr
else:
return instr
def fix_attrs (self, instr):
match = self.attr_regexp.search(instr)
outstr = ""
while match:
outstr = outstr + instr[0:match.start()]
pre,badattr = match.groups()
outstr = outstr + pre
outstr = outstr + xml.sax.saxutils.quoteattr(badattr)
debug('Fixed broken attribute: %s -> %s'%(instr,outstr),0)
instr = instr[match.end():]
match = self.attr_regexp.search(instr)
outstr = outstr + instr
return outstr
def encode (self, l):
for e in self.encodings:
try:
return l.decode(e)
except:
debug('Could not decode as %s'%e,2)
pass
raise Exception("Could not encode %s" % l)
class MastercookXMLHandler (xml_importer.RecHandler):
"""We handle MasterCook XML Files"""
def __init__ (self, parent_thread=None,conv=None):
debug('MastercookXMLHandler starting',0)
xml_importer.RecHandler.__init__(self,parent_thread=parent_thread,conv=None)
self.total = 0
self.recs_done = 0
self.elements = {
'mx2':['source','date'],
#'Summ':[],
'Nam':[],
'RcpE':['name'],
'RTxt':[],
'Serv':['qty'],
'PropT':['elapsed'],
'IngR':['name','unit','qty'],
'IPrp':[],
#'DirS':[],
'DirT':[],
'Desc':[],
'Srce':[],
'Note':[],
'CatT':[],
'Yield':['unit','qty'],
}
self.current_elements = []
self.bufs = []
xml.sax.ContentHandler.__init__(self)
importer.Importer.__init__(self,conv=conv)
def grabattr (self, attrs, name, default=''):
return unquoteattr(attrs.get(name,default))
def startElement (self, name, attrs):
self.in_mixed=0
if not self.elements.has_key(name):
debug('Unhandled element: %s'%name,0)
return
else:
self.current_elements = [name] + self.current_elements
handler = self._get_handler(name)
handler(start=True,attrs=attrs)
def endElement (self, name):
if not self.elements.has_key(name):
return
else:
self.current_elements.remove(name)
handler = self._get_handler(name)
handler(end=True)
def endDocument (self):
self.emit('progress',1,_("Mastercook import finished."))
def _get_handler (self, name):
return getattr(self,'%s_handler'%name)
def mx2_handler (self, start=False, end=False, attrs=None):
if start:
pass
def characters (self, ch):
debug('adding to %s bufs: %s'%(len(self.bufs),ch),0)
for buf in self.bufs:
setattr(self,buf,getattr(self,buf)+ch)
def Nam_handler (self, start=False, end=False, attrs=None):
if start:
# we simply count recipes so that we can
# indicate progress.
self.total += 1
def RcpE_handler (self, start=False, end=False, attrs=None):
if start:
self.start_rec()
#if self.source:
# self.rec['source']=self.source
if attrs:
self.rec['title']=self.grabattr(attrs,'name')
if end:
if self.rec.has_key('yield'):
self._add_to_instructions("\nYield: %s %s"%self.rec['yield'])
del self.rec['yield']
self.commit_rec()
def RTxt_handler (self, start=False, end=False, attrs=None):
if start:
self.cdata_buf = ""
self.bufs.append('cdata_buf')
if end:
self.bufs.remove('cdata_buf')
def Serv_handler (self, start=False, end=False, attrs=None):
if attrs:
self.rec['servings']=self.grabattr(attrs,'qty')
def Yield_handler (self, start=False, end=False, attrs=None):
if attrs:
self.rec['yield']=(self.grabattr(attrs,'qty'),self.grabattr(attrs,'unit'))
def CatT_handler (self, start=False, end=False, attrs=None):
if start:
self.catbuf = ""
self.bufs.append('catbuf')
if end:
self.bufs.remove('catbuf')
self.catbuf = self.catbuf.strip()
if self.rec.has_key('category'):
self.rec['category']=self.rec['category']+" "+self.catbuf
else:
self.rec['category']=xml.sax.saxutils.unescape(self.catbuf)
def IngR_handler (self, start=False, end=False, attrs=None):
if attrs:
self.start_ing()
self.item = self.grabattr(attrs,'name')
self.add_amt(self.grabattr(attrs,'qty'))
self.ing['unit']=self.grabattr(attrs,'unit')
if end:
if self.item.find("===")==0:
self.group = self.item[4:-4]
else:
self.add_item(self.item)
debug(
"item=%s, unit=%s"%(self.item,self.ing['unit']),
0
)
self.commit_ing()
def _add_to_instructions (self, buf):
debug('adding to instructions: %s'%buf,0)
if self.rec.has_key('instructions'):
self.rec['instructions'] = self.rec['instructions'] + "\n%s"%xml.sax.saxutils.unescape(buf)
else:
self.rec['instructions'] = xml.sax.saxutils.unescape(buf)
def DirT_handler (self, start=False, end=False, attrs=None):
if start:
self.dbuf = ""
self.bufs.append('dbuf')
if end:
self.bufs.remove('dbuf')
self._add_to_instructions(self.dbuf.strip())
# this also gets added to instructions
Desc_handler = DirT_handler
def Note_handler (self, start=False, end=False, attrs=None):
if start:
self.dbuf = ""
self.bufs.append('dbuf')
if end:
self.bufs.remove('dbuf')
buf = xml.sax.saxutils.unescape(self.dbuf.strip())
if self.rec.has_key('modifications'):
self.rec['modifications'] = self.rec['modifications'] + "\n%s"%buf
else:
self.rec['modifications'] = buf
def IPrp_handler (self, start=False, end=False, attrs=None):
if start:
self.ipbuf = ""
self.bufs.append('ipbuf')
if end:
self.item += "; %s"%xml.sax.saxutils.unescape(self.ipbuf.strip())
self.bufs.remove('ipbuf')
def Srce_handler (self, start=False, end=False, attrs=None):
if start:
self.srcbuf = ""
self.bufs.append('srcbuf')
if end:
self.rec['source']=self.srcbuf.strip()
self.bufs.remove('srcbuf')
class MastercookImporter (xml_importer.Converter):
def __init__ (self, filename):
xml_importer.Converter.__init__(self,
recHandler=MastercookXMLHandler,
recMarker='<RcpE',
filename=filename,
)
def pre_run (self):
self.emit('progress',0.03, _("Tidying up XML"))
cleaner = Mx2Cleaner()
base,ext=os.path.splitext(self.fn)
cleanfn = base + ".gourmetcleaned" + ext
cleaner.cleanup(self.fn,cleanfn)
debug('Cleaned up file saved to %s'%cleanfn,1)
self.orig_fn = self.fn
self.fn = cleanfn
| gpl-2.0 |
jorik041/stackprinter | app/tests/mock_sepyresults.py | 2 | 13647 | # -*- coding: utf-8 -*-
EMPTY_QUESTIONS="""
{
"total": 0,
"page": 1,
"pagesize": 1,
"questions": []
}
"""
EMPTY_ANSWERS="""
{
"total": 0,
"page": 1,
"pagesize": 1,
"answers": []
}
"""
QUESTION="""
{
"total": 1,
"page": 1,
"pagesize": 1,
"questions": [
{
"tags": [
"c#",
"datetime"
],
"answer_count": 27,
"accepted_answer_id": 1404,
"favorite_count": 61,
"question_timeline_url": "/questions/9/timeline",
"question_comments_url": "/questions/9/comments",
"question_answers_url": "/questions/9/answers",
"question_id": 9,
"owner": {
"user_id": 1,
"user_type": "moderator",
"display_name": "Jeff Atwood",
"reputation": 13942,
"email_hash": "51d623f33f8b83095db84ff35e15dbe8"
},
"creation_date": 1217547659,
"last_edit_date": 1279702437,
"last_activity_date": 1279702437,
"up_vote_count": 110,
"down_vote_count": 4,
"view_count": 19681,
"score": 106,
"community_owned": false,
"title": "How do I calculate someone's age in C#?"
}
]
}
"""
ANSWER="""
{
"total": 1,
"page": 1,
"pagesize": 30,
"answers": [
{
"answer_id": 22,
"accepted": false,
"answer_comments_url": "/answers/22/comments",
"question_id": 20,
"owner": {
"user_id": 4,
"user_type": "moderator",
"display_name": "Jeff Atwood",
"reputation": 292,
"email_hash": "51d623f33f8b83095db84ff35e15dbe8"
},
"creation_date": 1274322376,
"last_edit_date": 1274327738,
"last_activity_date": 1274327738,
"up_vote_count": 3,
"down_vote_count": 0,
"view_count": 77,
"score": 3,
"community_owned": false,
"title": "This is the right answer"
}
]
}
"""
ANSWERS="""
{
"total": 3,
"page": 1,
"pagesize": 30,
"answers": [
{
"answer_id": 443,
"accepted": false,
"answer_comments_url": "/answers/443/comments",
"question_id": 1,
"owner": {
"user_id": 1172,
"user_type": "registered",
"display_name": "slf",
"reputation": 101,
"email_hash": "938b7f6d039125f37271fd42104fbf37"
},
"creation_date": 1275406590,
"last_activity_date": 1275406590,
"up_vote_count": 1,
"down_vote_count": 0,
"view_count": 4283,
"score": 1,
"community_owned": true,
"title": "API Documentation and Help",
"body": "test"
},
{
"answer_id": 369,
"accepted": false,
"answer_comments_url": "/answers/369/comments",
"question_id": 1,
"owner": {
"user_id": 14,
"user_type": "registered",
"display_name": "code poet",
"reputation": 5420,
"email_hash": "df4a7fbd8a054fd6193ca0ee62952f1f"
},
"creation_date": 1275224160,
"last_edit_date": 1275234003,
"last_activity_date": 1275234003,
"up_vote_count": 6,
"down_vote_count": 0,
"view_count": 4283,
"score": 6,
"community_owned": true,
"title": "API Documentation and Help",
"body": "test"
},
{
"answer_id": 336,
"accepted": false,
"answer_comments_url": "/answers/336/comments",
"question_id": 1,
"owner": {
"user_id": 14,
"user_type": "registered",
"display_name": "code poet",
"reputation": 5420,
"email_hash": "df4a7fbd8a054fd6193ca0ee62952f1f"
},
"creation_date": 1275051317,
"last_edit_date": 1275221998,
"last_activity_date": 1275221998,
"up_vote_count": 0,
"down_vote_count": 0,
"view_count": 4283,
"score": 0,
"community_owned": true,
"title": "API Documentation and Help",
"body": "test"
}
]
}
"""
QUESTIONS="""
{
"total": 2,
"page": 1,
"pagesize": 30,
"questions": [
{
"tags": [
"teaching"
],
"answer_count": 9,
"favorite_count": 2,
"question_timeline_url": "/questions/83/timeline",
"question_comments_url": "/questions/83/comments",
"question_answers_url": "/questions/83/answers",
"question_id": 83,
"owner": {
"user_id": 24,
"user_type": "registered",
"display_name": "David McGraw",
"reputation": 168,
"email_hash": "840264c45b706fb829d406e9c61a1889"
},
"creation_date": 1279656260,
"last_edit_date": 1279668012,
"last_activity_date": 1279683109,
"up_vote_count": 7,
"down_vote_count": 0,
"view_count": 83,
"score": 7,
"community_owned": false,
"title": "What are some good ways to get children excited about math?"
},
{
"tags": [
"soft-question",
"teaching",
"philosophical"
],
"answer_count": 11,
"accepted_answer_id": 168,
"favorite_count": 1,
"question_timeline_url": "/questions/154/timeline",
"question_comments_url": "/questions/154/comments",
"question_answers_url": "/questions/154/answers",
"question_id": 154,
"owner": {
"user_id": 113,
"user_type": "registered",
"display_name": "Neil Mayhew",
"reputation": 28,
"email_hash": "e8f67f35200cf319e30329dddd412b6d"
},
"creation_date": 1279664256,
"last_edit_date": 1279717657,
"last_activity_date": 1279717657,
"up_vote_count": 4,
"down_vote_count": 2,
"view_count": 147,
"score": 2,
"community_owned": false,
"title": "Are complex numbers quantities?"
}
]
}
"""
USERS = """
{
"total": 1,
"page": 1,
"pagesize": 30,
"users": [
{
"user_id": 130929,
"user_type": "registered",
"creation_date": 1246352548,
"display_name": "systempuntoout",
"reputation": 4751,
"email_hash": "0db359dc9dcb63067f6d7c8ad856e019",
"age": 34,
"last_access_date": 1280530077,
"website_url": "http://systempuntoout.blogspot.com",
"location": "Italy",
"about_me": "about_me",
"question_count": 70,
"answer_count": 210,
"view_count": 296,
"up_vote_count": 1171,
"down_vote_count": 50,
"accept_rate": 98,
"association_id": "459c805a-d71a-4d07-9809-14e503777b30",
"user_questions_url": "/users/130929/questions",
"user_answers_url": "/users/130929/answers",
"user_favorites_url": "/users/130929/favorites",
"user_tags_url": "/users/130929/tags",
"user_badges_url": "/users/130929/badges",
"user_timeline_url": "/users/130929/timeline",
"user_mentioned_url": "/users/130929/mentioned",
"user_comments_url": "/users/130929/comments",
"user_reputation_url": "/users/130929/reputation",
"badge_counts": {
"gold": 1,
"silver": 4,
"bronze": 21
}
}
]
}
"""
TAGS="""
{
"total": 72,
"page": 1,
"pagesize": 70,
"tags": [
{
"name": "python",
"count": 30358,
"fulfills_required": false
},
{
"name": "wxpython",
"count": 592,
"fulfills_required": false
},
{
"name": "ironpython",
"count": 437,
"fulfills_required": false
},
{
"name": "python-3",
"count": 362,
"fulfills_required": false
},
{
"name": "pythonic",
"count": 194,
"fulfills_required": false
},
{
"name": "mod-python",
"count": 126,
"fulfills_required": false
},
{
"name": "ipython",
"count": 78,
"fulfills_required": false
},
{
"name": "boost-python",
"count": 60,
"fulfills_required": false
},
{
"name": "python-2.6",
"count": 59,
"fulfills_required": false
},
{
"name": "cpython",
"count": 41,
"fulfills_required": false
},
{
"name": "pdb-python",
"count": 37,
"fulfills_required": false
},
{
"name": "python-2.5",
"count": 35,
"fulfills_required": false
},
{
"name": "python-c-api",
"count": 35,
"fulfills_required": false
},
{
"name": "python-2.x",
"count": 30,
"fulfills_required": false
},
{
"name": "python-datamodel",
"count": 26,
"fulfills_required": false
},
{
"name": "pythonpath",
"count": 21,
"fulfills_required": false
},
{
"name": "python-module",
"count": 20,
"fulfills_required": false
},
{
"name": "python-2.4",
"count": 17,
"fulfills_required": false
},
{
"name": "ironpython-studio",
"count": 14,
"fulfills_required": false
},
{
"name": "mysql-python",
"count": 12,
"fulfills_required": false
},
{
"name": "python-interpreter",
"count": 12,
"fulfills_required": false
},
{
"name": "python-imaging-library",
"count": 11,
"fulfills_required": false
},
{
"name": "python-decorators",
"count": 11,
"fulfills_required": false
},
{
"name": "pythoncard",
"count": 10,
"fulfills_required": false
},
{
"name": "python-c-extension",
"count": 7,
"fulfills_required": false
},
{
"name": "python-embedding",
"count": 7,
"fulfills_required": false
},
{
"name": "python-import",
"count": 7,
"fulfills_required": false
},
{
"name": "python-multithreading",
"count": 7,
"fulfills_required": false
},
{
"name": "parallel-python",
"count": 7,
"fulfills_required": false
},
{
"name": "python-3.1",
"count": 6,
"fulfills_required": false
},
{
"name": "biopython",
"count": 6,
"fulfills_required": false
},
{
"name": "python-modules",
"count": 5,
"fulfills_required": false
},
{
"name": "vpython",
"count": 5,
"fulfills_required": false
},
{
"name": "python-egg-cache",
"count": 4,
"fulfills_required": false
},
{
"name": "pythonce",
"count": 4,
"fulfills_required": false
},
{
"name": "dnspython",
"count": 4,
"fulfills_required": false
},
{
"name": "bpython",
"count": 4,
"fulfills_required": false
},
{
"name": "activepython",
"count": 4,
"fulfills_required": false
},
{
"name": "gdata-python-client",
"count": 3,
"fulfills_required": false
},
{
"name": "pythonw",
"count": 3,
"fulfills_required": false
},
{
"name": "python-socketserver",
"count": 3,
"fulfills_required": false
},
{
"name": "python-twitter",
"count": 3,
"fulfills_required": false
},
{
"name": "python-mode",
"count": 3,
"fulfills_required": false
},
{
"name": "python-path",
"count": 3,
"fulfills_required": false
},
{
"name": "python-nose",
"count": 2,
"fulfills_required": false
},
{
"name": "python-openid",
"count": 2,
"fulfills_required": false
},
{
"name": "python-idle",
"count": 2,
"fulfills_required": false
},
{
"name": "python-visual",
"count": 2,
"fulfills_required": false
},
{
"name": "python-svn-swig-api",
"count": 2,
"fulfills_required": false
},
{
"name": "python-install",
"count": 2,
"fulfills_required": false
},
{
"name": "python-interactive",
"count": 2,
"fulfills_required": false
},
{
"name": "python-2.1",
"count": 2,
"fulfills_required": false
},
{
"name": "p4python",
"count": 2,
"fulfills_required": false
},
{
"name": "python.h",
"count": 1,
"fulfills_required": false
},
{
"name": "python-2.3",
"count": 1,
"fulfills_required": false
},
{
"name": "python3k",
"count": 1,
"fulfills_required": false
},
{
"name": "python-buffer",
"count": 1,
"fulfills_required": false
},
{
"name": "python-2.7",
"count": 1,
"fulfills_required": false
},
{
"name": "firepython",
"count": 1,
"fulfills_required": false
},
{
"name": "advanced-python",
"count": 1,
"fulfills_required": false
},
{
"name": "no-python",
"count": 1,
"fulfills_required": false
},
{
"name": "plpython",
"count": 1,
"fulfills_required": false
},
{
"name": "portable-python",
"count": 1,
"fulfills_required": false
},
{
"name": "pure-python",
"count": 1,
"fulfills_required": false
},
{
"name": "python-log-viewer",
"count": 1,
"fulfills_required": false
},
{
"name": "python-memcached",
"count": 1,
"fulfills_required": false
},
{
"name": "python-ide",
"count": 1,
"fulfills_required": false
},
{
"name": "python-design-patterns",
"count": 1,
"fulfills_required": false
},
{
"name": "python-exceptions",
"count": 1,
"fulfills_required": false
},
{
"name": "python-gtkmozembed",
"count": 1,
"fulfills_required": false
}
]
}
"""
| bsd-3-clause |
vladmm/intellij-community | python/helpers/profiler/thriftpy3/protocol/TProtocolDecorator.py | 44 | 1543 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thriftpy3.protocol.TProtocol import TProtocolBase
from types import *
class TProtocolDecorator():
def __init__(self, protocol):
TProtocolBase(protocol)
self.protocol = protocol
def __getattr__(self, name):
if hasattr(self.protocol, name):
member = getattr(self.protocol, name)
if type(member) in [MethodType, UnboundMethodType, FunctionType, LambdaType, BuiltinFunctionType, BuiltinMethodType]:
return lambda *args, **kwargs: self._wrap(member, args, kwargs)
else:
return member
raise AttributeError(name)
def _wrap(self, func, args, kwargs):
if type(func) == MethodType:
result = func(*args, **kwargs)
else:
result = func(self.protocol, *args, **kwargs)
return result
| apache-2.0 |
KWARC/mwetoolkit | bin/tail.py | 1 | 6014 | #!/usr/bin/python
# -*- coding:UTF-8 -*-
# ###############################################################################
#
# Copyright 2010-2014 Carlos Ramisch, Vitor De Araujo, Silvio Ricardo Cordeiro,
# Sandra Castellanos
#
# tail.py is part of mwetoolkit
#
# mwetoolkit is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# mwetoolkit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mwetoolkit. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
"""
Prints the last N entities of a list. Works like the "tail" command in
the unix platform, only it takes a file in xml format as input.
This script is DTD independent, that is, it might be called on a corpus
file, on a list of candidates or on a dictionary.
For more information, call the script with no parameter and read the
usage instructions.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from libs.util import read_options, treat_options_simplest, verbose, error
from libs import filetype
################################################################################
# GLOBALS
usage_string = """Usage:
python {program} OPTIONS <input-file>
The <input-file> must be in one of the filetype
formats accepted by the `--from` switch.
OPTIONS may be:
-n OR --number
Number of entities that you want to print out. Default value is 10.
--from <input-filetype-ext>
Force conversion from given filetype extension.
(By default, file type is automatically detected):
{descriptions.input[ALL]}
--to <output-filetype-ext>
Convert input to given filetype extension.
(By default, keeps input in original format):
{descriptions.output[ALL]}
{common_options}
"""
limit = 10
input_filetype_ext = None
output_filetype_ext = None
################################################################################
class TailPrinterHandler(filetype.ChainedInputHandler):
"""For each entity in the file, prints it if the limit is still not
achieved. No buffering as in tail, this is not necessary here.
"""
def __init__(self, limit):
self.limit = limit
self.entity_counter = 0
self.entity_buffer = [None] * limit
def before_file(self, fileobj, info={}):
if self.chain is None:
self.chain = self.make_printer(info, output_filetype_ext)
self.chain.before_file(fileobj, info)
def _fallback_entity(self, entity, info={}):
"""For each entity in the corpus, puts it in a circular buffer. This is
necessary because we do not know the total number of lines, so we always
keep the last n lines in the global buffer.
@param entity A subclass of `Ngram` that is being read from the XM.
"""
if self.limit > 0:
self.entity_buffer[self.entity_counter % self.limit] = (entity, info)
self.entity_counter += 1
def after_file(self, fileobj, info={}):
"""After we read all the XML file, we can finally be sure about which lines
need to be printed. Those correspond exactly to the N last lines added
to the buffer.
"""
for i in range(min(self.limit, self.entity_counter)):
#pdb.set_trace()
# entity_buffer is a circular buffer. In order to print the entities in
# the correct order, we go from the cell immediately after the last one
# stored in the buffer (position entity_counter) until the until the
# last one stored in the buffer (position entity_counter-1). If there
# are less entities in the file than the limit, this padding is not
# needed and we simply go from 0 until entity_counter-1
index = (self.entity_counter + i) % min(self.limit, self.entity_counter)
if self.entity_buffer[index] != None:
entity, info = self.entity_buffer[index]
self.chain.handle(entity, info)
else:
break
self.chain.after_file(fileobj, info)
################################################################################
def treat_options(opts, arg, n_arg, usage_string):
"""Callback function that handles the command line options of this script.
@param opts The options parsed by getopts. Ignored.
@param arg The argument list parsed by getopts.
@param n_arg The number of arguments expected for this script.
"""
global limit
global entity_buffer
global input_filetype_ext
global output_filetype_ext
treat_options_simplest(opts, arg, n_arg, usage_string)
for ( o, a ) in opts:
if o == "--from":
input_filetype_ext = a
elif o == "--to":
output_filetype_ext = a
elif o in ("-n", "--number"):
try:
limit = int(a)
entity_buffer = [None] * limit
if limit < 0:
raise ValueError
except ValueError:
error("You must provide a positive " + \
"integer value as argument of -n option.")
else:
raise Exception("Bad arg: " + o)
################################################################################
# MAIN SCRIPT
args = read_options("n:", ["from=", "to=", "number="], treat_options, -1, usage_string)
filetype.parse(args, TailPrinterHandler(limit), input_filetype_ext)
| gpl-3.0 |
chrisblock/selenium | py/test/selenium/webdriver/common/click_tests.py | 39 | 1423 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
@pytest.fixture(autouse=True)
def loadPage(pages):
pages.load("clicks.html")
def testCanClickOnALinkThatOverflowsAndFollowIt(driver):
driver.find_element(By.ID, "overflowLink").click()
WebDriverWait(driver, 3).until(EC.title_is("XHTML Test Page"))
def testClickingALinkMadeUpOfNumbersIsHandledCorrectly(driver):
driver.find_element(By.LINK_TEXT, "333333").click()
WebDriverWait(driver, 3).until(EC.title_is("XHTML Test Page"))
| apache-2.0 |
jn7163/django | django/db/backends/mysql/schema.py | 166 | 4774 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY "
"(%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
# Inner import to allow module to fail to load gracefully
import MySQLdb.converters
return MySQLdb.escape(value, MySQLdb.converters.conversions)
def skip_default(self, field):
"""
MySQL doesn't accept default values for TEXT and BLOB types, and
implicitly treats these columns as nullable.
"""
db_type = field.db_type(self.connection)
return (
db_type is not None and
db_type.lower() in {
'tinyblob', 'blob', 'mediumblob', 'longblob',
'tinytext', 'text', 'mediumtext', 'longtext',
}
)
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
def _model_indexes_sql(self, model):
storage = self.connection.introspection.get_storage_engine(
self.connection.cursor(), model._meta.db_table
)
if storage == "InnoDB":
for field in model._meta.local_fields:
if field.db_index and not field.unique and field.get_internal_type() == "ForeignKey":
# Temporary setting db_index to False (in memory) to disable
# index creation for FKs (index automatically created by MySQL)
field.db_index = False
return super(DatabaseSchemaEditor, self)._model_indexes_sql(model)
def _delete_composed_index(self, model, fields, *args):
"""
MySQL can remove an implicit FK index on a field when that field is
covered by another index like a unique_together. "covered" here means
that the more complex index starts like the simpler one.
http://bugs.mysql.com/bug.php?id=37910 / Django ticket #24757
We check here before removing the [unique|index]_together if we have to
recreate a FK index.
"""
first_field = model._meta.get_field(fields[0])
if first_field.get_internal_type() == 'ForeignKey':
constraint_names = self._constraint_names(model, [first_field.column], index=True)
if not constraint_names:
self.execute(self._create_index_sql(model, [first_field], suffix=""))
return super(DatabaseSchemaEditor, self)._delete_composed_index(model, fields, *args)
def _set_field_new_type_null_status(self, field, new_type):
"""
Keep the null property of the old field. If it has changed, it will be
handled separately.
"""
if field.null:
new_type += " NULL"
else:
new_type += " NOT NULL"
return new_type
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
new_type = self._set_field_new_type_null_status(old_field, new_type)
return super(DatabaseSchemaEditor, self)._alter_column_type_sql(table, old_field, new_field, new_type)
def _rename_field_sql(self, table, old_field, new_field, new_type):
new_type = self._set_field_new_type_null_status(old_field, new_type)
return super(DatabaseSchemaEditor, self)._rename_field_sql(table, old_field, new_field, new_type)
| bsd-3-clause |
xiami9916057/volatility | volatility/plugins/overlays/windows/vista_sp12_x86_syscalls.py | 45 | 39669 | # Volatility
# Copyright (c) 2008-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
syscalls = [
[
'NtAcceptConnectPort', # 0x0
'NtAccessCheck', # 0x1
'NtAccessCheckAndAuditAlarm', # 0x2
'NtAccessCheckByType', # 0x3
'NtAccessCheckByTypeAndAuditAlarm', # 0x4
'NtAccessCheckByTypeResultList', # 0x5
'NtAccessCheckByTypeResultListAndAuditAlarm', # 0x6
'NtAccessCheckByTypeResultListAndAuditAlarmByHandle', # 0x7
'NtAddAtom', # 0x8
'NtAddBootEntry', # 0x9
'NtAddDriverEntry', # 0xa
'NtAdjustGroupsToken', # 0xb
'NtAdjustPrivilegesToken', # 0xc
'NtAlertResumeThread', # 0xd
'NtAlertThread', # 0xe
'NtAllocateLocallyUniqueId', # 0xf
'NtAllocateUserPhysicalPages', # 0x10
'NtAllocateUuids', # 0x11
'NtAllocateVirtualMemory', # 0x12
'NtAlpcAcceptConnectPort', # 0x13
'NtAlpcCancelMessage', # 0x14
'NtAlpcConnectPort', # 0x15
'NtAlpcCreatePort', # 0x16
'NtAlpcCreatePortSection', # 0x17
'NtAlpcCreateResourceReserve', # 0x18
'NtAlpcCreateSectionView', # 0x19
'NtAlpcCreateSecurityContext', # 0x1a
'NtAlpcDeletePortSection', # 0x1b
'NtAlpcDeleteResourceReserve', # 0x1c
'NtAlpcDeleteSectionView', # 0x1d
'NtAlpcDeleteSecurityContext', # 0x1e
'NtAlpcDisconnectPort', # 0x1f
'NtAlpcImpersonateClientOfPort', # 0x20
'NtAlpcOpenSenderProcess', # 0x21
'NtAlpcOpenSenderThread', # 0x22
'NtAlpcQueryInformation', # 0x23
'NtAlpcQueryInformationMessage', # 0x24
'NtAlpcRevokeSecurityContext', # 0x25
'NtAlpcSendWaitReceivePort', # 0x26
'NtAlpcSetInformation', # 0x27
'NtApphelpCacheControl', # 0x28
'NtAreMappedFilesTheSame', # 0x29
'NtAssignProcessToJobObject', # 0x2a
'NtCallbackReturn', # 0x2b
'NtCancelDeviceWakeupRequest', # 0x2c
'NtCancelIoFile', # 0x2d
'NtCancelTimer', # 0x2e
'NtClearEvent', # 0x2f
'NtClose', # 0x30
'NtCloseObjectAuditAlarm', # 0x31
'NtCompactKeys', # 0x32
'NtCompareTokens', # 0x33
'NtCompleteConnectPort', # 0x34
'NtCompressKey', # 0x35
'NtConnectPort', # 0x36
'NtContinue', # 0x37
'NtCreateDebugObject', # 0x38
'NtCreateDirectoryObject', # 0x39
'NtCreateEvent', # 0x3a
'NtCreateEventPair', # 0x3b
'NtCreateFile', # 0x3c
'NtCreateIoCompletion', # 0x3d
'NtCreateJobObject', # 0x3e
'NtCreateJobSet', # 0x3f
'NtCreateKey', # 0x40
'NtCreateKeyTransacted', # 0x41
'NtCreateMailslotFile', # 0x42
'NtCreateMutant', # 0x43
'NtCreateNamedPipeFile', # 0x44
'NtCreatePrivateNamespace', # 0x45
'NtCreatePagingFile', # 0x46
'NtCreatePort', # 0x47
'NtCreateProcess', # 0x48
'NtCreateProcessEx', # 0x49
'NtCreateProfile', # 0x4a
'NtCreateSection', # 0x4b
'NtCreateSemaphore', # 0x4c
'NtCreateSymbolicLinkObject', # 0x4d
'NtCreateThread', # 0x4e
'NtCreateTimer', # 0x4f
'NtCreateToken', # 0x50
'NtCreateTransaction', # 0x51
'NtOpenTransaction', # 0x52
'NtQueryInformationTransaction', # 0x53
'NtQueryInformationTransactionManager', # 0x54
'NtPrePrepareEnlistment', # 0x55
'NtPrepareEnlistment', # 0x56
'NtCommitEnlistment', # 0x57
'NtReadOnlyEnlistment', # 0x58
'NtRollbackComplete', # 0x59
'NtRollbackEnlistment', # 0x5a
'NtCommitTransaction', # 0x5b
'NtRollbackTransaction', # 0x5c
'NtPrePrepareComplete', # 0x5d
'NtPrepareComplete', # 0x5e
'NtCommitComplete', # 0x5f
'NtSinglePhaseReject', # 0x60
'NtSetInformationTransaction', # 0x61
'NtSetInformationTransactionManager', # 0x62
'NtSetInformationResourceManager', # 0x63
'NtCreateTransactionManager', # 0x64
'NtOpenTransactionManager', # 0x65
'NtRenameTransactionManager', # 0x66
'NtRollforwardTransactionManager', # 0x67
'NtRecoverEnlistment', # 0x68
'NtRecoverResourceManager', # 0x69
'NtRecoverTransactionManager', # 0x6a
'NtCreateResourceManager', # 0x6b
'NtOpenResourceManager', # 0x6c
'NtGetNotificationResourceManager', # 0x6d
'NtQueryInformationResourceManager', # 0x6e
'NtCreateEnlistment', # 0x6f
'NtOpenEnlistment', # 0x70
'NtSetInformationEnlistment', # 0x71
'NtQueryInformationEnlistment', # 0x72
'NtCreateWaitablePort', # 0x73
'NtDebugActiveProcess', # 0x74
'NtDebugContinue', # 0x75
'NtDelayExecution', # 0x76
'NtDeleteAtom', # 0x77
'NtDeleteBootEntry', # 0x78
'NtDeleteDriverEntry', # 0x79
'NtDeleteFile', # 0x7a
'NtDeleteKey', # 0x7b
'NtDeletePrivateNamespace', # 0x7c
'NtDeleteObjectAuditAlarm', # 0x7d
'NtDeleteValueKey', # 0x7e
'NtDeviceIoControlFile', # 0x7f
'NtDisplayString', # 0x80
'NtDuplicateObject', # 0x81
'NtDuplicateToken', # 0x82
'NtEnumerateBootEntries', # 0x83
'NtEnumerateDriverEntries', # 0x84
'NtEnumerateKey', # 0x85
'NtEnumerateSystemEnvironmentValuesEx', # 0x86
'NtEnumerateTransactionObject', # 0x87
'NtEnumerateValueKey', # 0x88
'NtExtendSection', # 0x89
'NtFilterToken', # 0x8a
'NtFindAtom', # 0x8b
'NtFlushBuffersFile', # 0x8c
'NtFlushInstructionCache', # 0x8d
'NtFlushKey', # 0x8e
'NtFlushProcessWriteBuffers', # 0x8f
'NtFlushVirtualMemory', # 0x90
'NtFlushWriteBuffer', # 0x91
'NtFreeUserPhysicalPages', # 0x92
'NtFreeVirtualMemory', # 0x93
'NtFreezeRegistry', # 0x94
'NtFreezeTransactions', # 0x95
'NtFsControlFile', # 0x96
'NtGetContextThread', # 0x97
'NtGetDevicePowerState', # 0x98
'NtGetNlsSectionPtr', # 0x99
'NtGetPlugPlayEvent', # 0x9a
'NtGetWriteWatch', # 0x9b
'NtImpersonateAnonymousToken', # 0x9c
'NtImpersonateClientOfPort', # 0x9d
'NtImpersonateThread', # 0x9e
'NtInitializeNlsFiles', # 0x9f
'NtInitializeRegistry', # 0xa0
'NtInitiatePowerAction', # 0xa1
'NtIsProcessInJob', # 0xa2
'NtIsSystemResumeAutomatic', # 0xa3
'NtListenPort', # 0xa4
'NtLoadDriver', # 0xa5
'NtLoadKey', # 0xa6
'NtLoadKey2', # 0xa7
'NtLoadKeyEx', # 0xa8
'NtLockFile', # 0xa9
'NtLockProductActivationKeys', # 0xaa
'NtLockRegistryKey', # 0xab
'NtLockVirtualMemory', # 0xac
'NtMakePermanentObject', # 0xad
'NtMakeTemporaryObject', # 0xae
'NtMapUserPhysicalPages', # 0xaf
'NtMapUserPhysicalPagesScatter', # 0xb0
'NtMapViewOfSection', # 0xb1
'NtModifyBootEntry', # 0xb2
'NtModifyDriverEntry', # 0xb3
'NtNotifyChangeDirectoryFile', # 0xb4
'NtNotifyChangeKey', # 0xb5
'NtNotifyChangeMultipleKeys', # 0xb6
'NtOpenDirectoryObject', # 0xb7
'NtOpenEvent', # 0xb8
'NtOpenEventPair', # 0xb9
'NtOpenFile', # 0xba
'NtOpenIoCompletion', # 0xbb
'NtOpenJobObject', # 0xbc
'NtOpenKey', # 0xbd
'NtOpenKeyTransacted', # 0xbe
'NtOpenMutant', # 0xbf
'NtOpenPrivateNamespace', # 0xc0
'NtOpenObjectAuditAlarm', # 0xc1
'NtOpenProcess', # 0xc2
'NtOpenProcessToken', # 0xc3
'NtOpenProcessTokenEx', # 0xc4
'NtOpenSection', # 0xc5
'NtOpenSemaphore', # 0xc6
'NtOpenSession', # 0xc7
'NtOpenSymbolicLinkObject', # 0xc8
'NtOpenThread', # 0xc9
'NtOpenThreadToken', # 0xca
'NtOpenThreadTokenEx', # 0xcb
'NtOpenTimer', # 0xcc
'NtPlugPlayControl', # 0xcd
'NtPowerInformation', # 0xce
'NtPrivilegeCheck', # 0xcf
'NtPrivilegeObjectAuditAlarm', # 0xd0
'NtPrivilegedServiceAuditAlarm', # 0xd1
'NtProtectVirtualMemory', # 0xd2
'NtPulseEvent', # 0xd3
'NtQueryAttributesFile', # 0xd4
'NtQueryBootEntryOrder', # 0xd5
'NtQueryBootOptions', # 0xd6
'NtQueryDebugFilterState', # 0xd7
'NtQueryDefaultLocale', # 0xd8
'NtQueryDefaultUILanguage', # 0xd9
'NtQueryDirectoryFile', # 0xda
'NtQueryDirectoryObject', # 0xdb
'NtQueryDriverEntryOrder', # 0xdc
'NtQueryEaFile', # 0xdd
'NtQueryEvent', # 0xde
'NtQueryFullAttributesFile', # 0xdf
'NtQueryInformationAtom', # 0xe0
'NtQueryInformationFile', # 0xe1
'NtQueryInformationJobObject', # 0xe2
'NtQueryInformationPort', # 0xe3
'NtQueryInformationProcess', # 0xe4
'NtQueryInformationThread', # 0xe5
'NtQueryInformationToken', # 0xe6
'NtQueryInstallUILanguage', # 0xe7
'NtQueryIntervalProfile', # 0xe8
'NtQueryIoCompletion', # 0xe9
'NtQueryKey', # 0xea
'NtQueryMultipleValueKey', # 0xeb
'NtQueryMutant', # 0xec
'NtQueryObject', # 0xed
'NtQueryOpenSubKeys', # 0xee
'NtQueryOpenSubKeysEx', # 0xef
'NtQueryPerformanceCounter', # 0xf0
'NtQueryQuotaInformationFile', # 0xf1
'NtQuerySection', # 0xf2
'NtQuerySecurityObject', # 0xf3
'NtQuerySemaphore', # 0xf4
'NtQuerySymbolicLinkObject', # 0xf5
'NtQuerySystemEnvironmentValue', # 0xf6
'NtQuerySystemEnvironmentValueEx', # 0xf7
'NtQuerySystemInformation', # 0xf8
'NtQuerySystemTime', # 0xf9
'NtQueryTimer', # 0xfa
'NtQueryTimerResolution', # 0xfb
'NtQueryValueKey', # 0xfc
'NtQueryVirtualMemory', # 0xfd
'NtQueryVolumeInformationFile', # 0xfe
'NtQueueApcThread', # 0xff
'NtRaiseException', # 0x100
'NtRaiseHardError', # 0x101
'NtReadFile', # 0x102
'NtReadFileScatter', # 0x103
'NtReadRequestData', # 0x104
'NtReadVirtualMemory', # 0x105
'NtRegisterThreadTerminatePort', # 0x106
'NtReleaseMutant', # 0x107
'NtReleaseSemaphore', # 0x108
'NtRemoveIoCompletion', # 0x109
'NtRemoveProcessDebug', # 0x10a
'NtRenameKey', # 0x10b
'NtReplaceKey', # 0x10c
'NtReplacePartitionUnit', # 0x10d
'NtReplyPort', # 0x10e
'NtReplyWaitReceivePort', # 0x10f
'NtReplyWaitReceivePortEx', # 0x110
'NtReplyWaitReplyPort', # 0x111
'NtRequestDeviceWakeup', # 0x112
'NtRequestPort', # 0x113
'NtRequestWaitReplyPort', # 0x114
'NtRequestWakeupLatency', # 0x115
'NtResetEvent', # 0x116
'NtResetWriteWatch', # 0x117
'NtRestoreKey', # 0x118
'NtResumeProcess', # 0x119
'NtResumeThread', # 0x11a
'NtSaveKey', # 0x11b
'NtSaveKeyEx', # 0x11c
'NtSaveMergedKeys', # 0x11d
'NtSecureConnectPort', # 0x11e
'NtSetBootEntryOrder', # 0x11f
'NtSetBootOptions', # 0x120
'NtSetContextThread', # 0x121
'NtSetDebugFilterState', # 0x122
'NtSetDefaultHardErrorPort', # 0x123
'NtSetDefaultLocale', # 0x124
'NtSetDefaultUILanguage', # 0x125
'NtSetDriverEntryOrder', # 0x126
'NtSetEaFile', # 0x127
'NtSetEvent', # 0x128
'NtSetEventBoostPriority', # 0x129
'NtSetHighEventPair', # 0x12a
'NtSetHighWaitLowEventPair', # 0x12b
'NtSetInformationDebugObject', # 0x12c
'NtSetInformationFile', # 0x12d
'NtSetInformationJobObject', # 0x12e
'NtSetInformationKey', # 0x12f
'NtSetInformationObject', # 0x130
'NtSetInformationProcess', # 0x131
'NtSetInformationThread', # 0x132
'NtSetInformationToken', # 0x133
'NtSetIntervalProfile', # 0x134
'NtSetIoCompletion', # 0x135
'NtSetLdtEntries', # 0x136
'NtSetLowEventPair', # 0x137
'NtSetLowWaitHighEventPair', # 0x138
'NtSetQuotaInformationFile', # 0x139
'NtSetSecurityObject', # 0x13a
'NtSetSystemEnvironmentValue', # 0x13b
'NtSetSystemEnvironmentValueEx', # 0x13c
'NtSetSystemInformation', # 0x13d
'NtSetSystemPowerState', # 0x13e
'NtSetSystemTime', # 0x13f
'NtSetThreadExecutionState', # 0x140
'NtSetTimer', # 0x141
'NtSetTimerResolution', # 0x142
'NtSetUuidSeed', # 0x143
'NtSetValueKey', # 0x144
'NtSetVolumeInformationFile', # 0x145
'NtShutdownSystem', # 0x146
'NtSignalAndWaitForSingleObject', # 0x147
'NtStartProfile', # 0x148
'NtStopProfile', # 0x149
'NtSuspendProcess', # 0x14a
'NtSuspendThread', # 0x14b
'NtSystemDebugControl', # 0x14c
'NtTerminateJobObject', # 0x14d
'NtTerminateProcess', # 0x14e
'NtTerminateThread', # 0x14f
'NtTestAlert', # 0x150
'NtThawRegistry', # 0x151
'NtThawTransactions', # 0x152
'NtTraceEvent', # 0x153
'NtTraceControl', # 0x154
'NtTranslateFilePath', # 0x155
'NtUnloadDriver', # 0x156
'NtUnloadKey', # 0x157
'NtUnloadKey2', # 0x158
'NtUnloadKeyEx', # 0x159
'NtUnlockFile', # 0x15a
'NtUnlockVirtualMemory', # 0x15b
'NtUnmapViewOfSection', # 0x15c
'NtVdmControl', # 0x15d
'NtWaitForDebugEvent', # 0x15e
'NtWaitForMultipleObjects', # 0x15f
'NtWaitForSingleObject', # 0x160
'NtWaitHighEventPair', # 0x161
'NtWaitLowEventPair', # 0x162
'NtWriteFile', # 0x163
'NtWriteFileGather', # 0x164
'NtWriteRequestData', # 0x165
'NtWriteVirtualMemory', # 0x166
'NtYieldExecution', # 0x167
'NtCreateKeyedEvent', # 0x168
'NtOpenKeyedEvent', # 0x169
'NtReleaseKeyedEvent', # 0x16a
'NtWaitForKeyedEvent', # 0x16b
'NtQueryPortInformationProcess', # 0x16c
'NtGetCurrentProcessorNumber', # 0x16d
'NtWaitForMultipleObjects32', # 0x16e
'NtGetNextProcess', # 0x16f
'NtGetNextThread', # 0x170
'NtCancelIoFileEx', # 0x171
'NtCancelSynchronousIoFile', # 0x172
'NtRemoveIoCompletionEx', # 0x173
'NtRegisterProtocolAddressInformation', # 0x174
'NtPropagationComplete', # 0x175
'NtPropagationFailed', # 0x176
'NtCreateWorkerFactory', # 0x177
'NtReleaseWorkerFactoryWorker', # 0x178
'NtWaitForWorkViaWorkerFactory', # 0x179
'NtSetInformationWorkerFactory', # 0x17a
'NtQueryInformationWorkerFactory', # 0x17b
'NtWorkerFactoryWorkerReady', # 0x17c
'NtShutdownWorkerFactory', # 0x17d
'NtCreateThreadEx', # 0x17e
'NtCreateUserProcess', # 0x17f
'NtQueryLicenseValue', # 0x180
'NtMapCMFModule', # 0x181
'NtIsUILanguageComitted', # 0x182
'NtFlushInstallUILanguage', # 0x183
'NtGetMUIRegistryInfo', # 0x184
'NtAcquireCMFViewOwnership', # 0x185
'NtReleaseCMFViewOwnership', # 0x186
],
[
'NtGdiAbortDoc', # 0x0
'NtGdiAbortPath', # 0x1
'NtGdiAddFontResourceW', # 0x2
'NtGdiAddRemoteFontToDC', # 0x3
'NtGdiAddFontMemResourceEx', # 0x4
'NtGdiRemoveMergeFont', # 0x5
'NtGdiAddRemoteMMInstanceToDC', # 0x6
'NtGdiAlphaBlend', # 0x7
'NtGdiAngleArc', # 0x8
'NtGdiAnyLinkedFonts', # 0x9
'NtGdiFontIsLinked', # 0xa
'NtGdiArcInternal', # 0xb
'NtGdiBeginPath', # 0xc
'NtGdiBitBlt', # 0xd
'NtGdiCancelDC', # 0xe
'NtGdiCheckBitmapBits', # 0xf
'NtGdiCloseFigure', # 0x10
'NtGdiClearBitmapAttributes', # 0x11
'NtGdiClearBrushAttributes', # 0x12
'NtGdiColorCorrectPalette', # 0x13
'NtGdiCombineRgn', # 0x14
'NtGdiCombineTransform', # 0x15
'NtGdiComputeXformCoefficients', # 0x16
'NtGdiConfigureOPMProtectedOutput', # 0x17
'NtGdiConsoleTextOut', # 0x18
'NtGdiConvertMetafileRect', # 0x19
'NtGdiCreateBitmap', # 0x1a
'NtGdiCreateClientObj', # 0x1b
'NtGdiCreateColorSpace', # 0x1c
'NtGdiCreateColorTransform', # 0x1d
'NtGdiCreateCompatibleBitmap', # 0x1e
'NtGdiCreateCompatibleDC', # 0x1f
'NtGdiCreateDIBBrush', # 0x20
'NtGdiCreateDIBitmapInternal', # 0x21
'NtGdiCreateDIBSection', # 0x22
'NtGdiCreateEllipticRgn', # 0x23
'NtGdiCreateHalftonePalette', # 0x24
'NtGdiCreateHatchBrushInternal', # 0x25
'NtGdiCreateMetafileDC', # 0x26
'NtGdiCreateOPMProtectedOutputs', # 0x27
'NtGdiCreatePaletteInternal', # 0x28
'NtGdiCreatePatternBrushInternal', # 0x29
'NtGdiCreatePen', # 0x2a
'NtGdiCreateRectRgn', # 0x2b
'NtGdiCreateRoundRectRgn', # 0x2c
'NtGdiCreateServerMetaFile', # 0x2d
'NtGdiCreateSolidBrush', # 0x2e
'NtGdiD3dContextCreate', # 0x2f
'NtGdiD3dContextDestroy', # 0x30
'NtGdiD3dContextDestroyAll', # 0x31
'NtGdiD3dValidateTextureStageState', # 0x32
'NtGdiD3dDrawPrimitives2', # 0x33
'NtGdiDdGetDriverState', # 0x34
'NtGdiDdAddAttachedSurface', # 0x35
'NtGdiDdAlphaBlt', # 0x36
'NtGdiDdAttachSurface', # 0x37
'NtGdiDdBeginMoCompFrame', # 0x38
'NtGdiDdBlt', # 0x39
'NtGdiDdCanCreateSurface', # 0x3a
'NtGdiDdCanCreateD3DBuffer', # 0x3b
'NtGdiDdColorControl', # 0x3c
'NtGdiDdCreateDirectDrawObject', # 0x3d
'NtGdiDdCreateSurface', # 0x3e
'NtGdiDdCreateD3DBuffer', # 0x3f
'NtGdiDdCreateMoComp', # 0x40
'NtGdiDdCreateSurfaceObject', # 0x41
'NtGdiDdDeleteDirectDrawObject', # 0x42
'NtGdiDdDeleteSurfaceObject', # 0x43
'NtGdiDdDestroyMoComp', # 0x44
'NtGdiDdDestroySurface', # 0x45
'NtGdiDdDestroyD3DBuffer', # 0x46
'NtGdiDdEndMoCompFrame', # 0x47
'NtGdiDdFlip', # 0x48
'NtGdiDdFlipToGDISurface', # 0x49
'NtGdiDdGetAvailDriverMemory', # 0x4a
'NtGdiDdGetBltStatus', # 0x4b
'NtGdiDdGetDC', # 0x4c
'NtGdiDdGetDriverInfo', # 0x4d
'NtGdiDdGetDxHandle', # 0x4e
'NtGdiDdGetFlipStatus', # 0x4f
'NtGdiDdGetInternalMoCompInfo', # 0x50
'NtGdiDdGetMoCompBuffInfo', # 0x51
'NtGdiDdGetMoCompGuids', # 0x52
'NtGdiDdGetMoCompFormats', # 0x53
'NtGdiDdGetScanLine', # 0x54
'NtGdiDdLock', # 0x55
'NtGdiDdLockD3D', # 0x56
'NtGdiDdQueryDirectDrawObject', # 0x57
'NtGdiDdQueryMoCompStatus', # 0x58
'NtGdiDdReenableDirectDrawObject', # 0x59
'NtGdiDdReleaseDC', # 0x5a
'NtGdiDdRenderMoComp', # 0x5b
'NtGdiDdResetVisrgn', # 0x5c
'NtGdiDdSetColorKey', # 0x5d
'NtGdiDdSetExclusiveMode', # 0x5e
'NtGdiDdSetGammaRamp', # 0x5f
'NtGdiDdCreateSurfaceEx', # 0x60
'NtGdiDdSetOverlayPosition', # 0x61
'NtGdiDdUnattachSurface', # 0x62
'NtGdiDdUnlock', # 0x63
'NtGdiDdUnlockD3D', # 0x64
'NtGdiDdUpdateOverlay', # 0x65
'NtGdiDdWaitForVerticalBlank', # 0x66
'NtGdiDvpCanCreateVideoPort', # 0x67
'NtGdiDvpColorControl', # 0x68
'NtGdiDvpCreateVideoPort', # 0x69
'NtGdiDvpDestroyVideoPort', # 0x6a
'NtGdiDvpFlipVideoPort', # 0x6b
'NtGdiDvpGetVideoPortBandwidth', # 0x6c
'NtGdiDvpGetVideoPortField', # 0x6d
'NtGdiDvpGetVideoPortFlipStatus', # 0x6e
'NtGdiDvpGetVideoPortInputFormats', # 0x6f
'NtGdiDvpGetVideoPortLine', # 0x70
'NtGdiDvpGetVideoPortOutputFormats', # 0x71
'NtGdiDvpGetVideoPortConnectInfo', # 0x72
'NtGdiDvpGetVideoSignalStatus', # 0x73
'NtGdiDvpUpdateVideoPort', # 0x74
'NtGdiDvpWaitForVideoPortSync', # 0x75
'NtGdiDvpAcquireNotification', # 0x76
'NtGdiDvpReleaseNotification', # 0x77
'NtGdiDxgGenericThunk', # 0x78
'NtGdiDeleteClientObj', # 0x79
'NtGdiDeleteColorSpace', # 0x7a
'NtGdiDeleteColorTransform', # 0x7b
'NtGdiDeleteObjectApp', # 0x7c
'NtGdiDescribePixelFormat', # 0x7d
'NtGdiDestroyOPMProtectedOutput', # 0x7e
'NtGdiGetPerBandInfo', # 0x7f
'NtGdiDoBanding', # 0x80
'NtGdiDoPalette', # 0x81
'NtGdiDrawEscape', # 0x82
'NtGdiEllipse', # 0x83
'NtGdiEnableEudc', # 0x84
'NtGdiEndDoc', # 0x85
'NtGdiEndPage', # 0x86
'NtGdiEndPath', # 0x87
'NtGdiEnumFontChunk', # 0x88
'NtGdiEnumFontClose', # 0x89
'NtGdiEnumFontOpen', # 0x8a
'NtGdiEnumObjects', # 0x8b
'NtGdiEqualRgn', # 0x8c
'NtGdiEudcLoadUnloadLink', # 0x8d
'NtGdiExcludeClipRect', # 0x8e
'NtGdiExtCreatePen', # 0x8f
'NtGdiExtCreateRegion', # 0x90
'NtGdiExtEscape', # 0x91
'NtGdiExtFloodFill', # 0x92
'NtGdiExtGetObjectW', # 0x93
'NtGdiExtSelectClipRgn', # 0x94
'NtGdiExtTextOutW', # 0x95
'NtGdiFillPath', # 0x96
'NtGdiFillRgn', # 0x97
'NtGdiFlattenPath', # 0x98
'NtGdiFlush', # 0x99
'NtGdiForceUFIMapping', # 0x9a
'NtGdiFrameRgn', # 0x9b
'NtGdiFullscreenControl', # 0x9c
'NtGdiGetAndSetDCDword', # 0x9d
'NtGdiGetAppClipBox', # 0x9e
'NtGdiGetBitmapBits', # 0x9f
'NtGdiGetBitmapDimension', # 0xa0
'NtGdiGetBoundsRect', # 0xa1
'NtGdiGetCertificate', # 0xa2
'NtGdiGetCertificateSize', # 0xa3
'NtGdiGetCharABCWidthsW', # 0xa4
'NtGdiGetCharacterPlacementW', # 0xa5
'NtGdiGetCharSet', # 0xa6
'NtGdiGetCharWidthW', # 0xa7
'NtGdiGetCharWidthInfo', # 0xa8
'NtGdiGetColorAdjustment', # 0xa9
'NtGdiGetColorSpaceforBitmap', # 0xaa
'NtGdiGetCOPPCompatibleOPMInformation', # 0xab
'NtGdiGetDCDword', # 0xac
'NtGdiGetDCforBitmap', # 0xad
'NtGdiGetDCObject', # 0xae
'NtGdiGetDCPoint', # 0xaf
'NtGdiGetDeviceCaps', # 0xb0
'NtGdiGetDeviceGammaRamp', # 0xb1
'NtGdiGetDeviceCapsAll', # 0xb2
'NtGdiGetDIBitsInternal', # 0xb3
'NtGdiGetETM', # 0xb4
'NtGdiGetEudcTimeStampEx', # 0xb5
'NtGdiGetFontData', # 0xb6
'NtGdiGetFontResourceInfoInternalW', # 0xb7
'NtGdiGetGlyphIndicesW', # 0xb8
'NtGdiGetGlyphIndicesWInternal', # 0xb9
'NtGdiGetGlyphOutline', # 0xba
'NtGdiGetOPMInformation', # 0xbb
'NtGdiGetKerningPairs', # 0xbc
'NtGdiGetLinkedUFIs', # 0xbd
'NtGdiGetMiterLimit', # 0xbe
'NtGdiGetMonitorID', # 0xbf
'NtGdiGetNearestColor', # 0xc0
'NtGdiGetNearestPaletteIndex', # 0xc1
'NtGdiGetObjectBitmapHandle', # 0xc2
'NtGdiGetOPMRandomNumber', # 0xc3
'NtGdiGetOutlineTextMetricsInternalW', # 0xc4
'NtGdiGetPath', # 0xc5
'NtGdiGetPixel', # 0xc6
'NtGdiGetRandomRgn', # 0xc7
'NtGdiGetRasterizerCaps', # 0xc8
'NtGdiGetRealizationInfo', # 0xc9
'NtGdiGetRegionData', # 0xca
'NtGdiGetRgnBox', # 0xcb
'NtGdiGetServerMetaFileBits', # 0xcc
'NtGdiGetSpoolMessage', # 0xcd
'NtGdiGetStats', # 0xce
'NtGdiGetStockObject', # 0xcf
'NtGdiGetStringBitmapW', # 0xd0
'NtGdiGetSuggestedOPMProtectedOutputArraySize', # 0xd1
'NtGdiGetSystemPaletteUse', # 0xd2
'NtGdiGetTextCharsetInfo', # 0xd3
'NtGdiGetTextExtent', # 0xd4
'NtGdiGetTextExtentExW', # 0xd5
'NtGdiGetTextFaceW', # 0xd6
'NtGdiGetTextMetricsW', # 0xd7
'NtGdiGetTransform', # 0xd8
'NtGdiGetUFI', # 0xd9
'NtGdiGetEmbUFI', # 0xda
'NtGdiGetUFIPathname', # 0xdb
'NtGdiGetEmbedFonts', # 0xdc
'NtGdiChangeGhostFont', # 0xdd
'NtGdiAddEmbFontToDC', # 0xde
'NtGdiGetFontUnicodeRanges', # 0xdf
'NtGdiGetWidthTable', # 0xe0
'NtGdiGradientFill', # 0xe1
'NtGdiHfontCreate', # 0xe2
'NtGdiIcmBrushInfo', # 0xe3
'NtGdiInit', # 0xe4
'NtGdiInitSpool', # 0xe5
'NtGdiIntersectClipRect', # 0xe6
'NtGdiInvertRgn', # 0xe7
'NtGdiLineTo', # 0xe8
'NtGdiMakeFontDir', # 0xe9
'NtGdiMakeInfoDC', # 0xea
'NtGdiMaskBlt', # 0xeb
'NtGdiModifyWorldTransform', # 0xec
'NtGdiMonoBitmap', # 0xed
'NtGdiMoveTo', # 0xee
'NtGdiOffsetClipRgn', # 0xef
'NtGdiOffsetRgn', # 0xf0
'NtGdiOpenDCW', # 0xf1
'NtGdiPatBlt', # 0xf2
'NtGdiPolyPatBlt', # 0xf3
'NtGdiPathToRegion', # 0xf4
'NtGdiPlgBlt', # 0xf5
'NtGdiPolyDraw', # 0xf6
'NtGdiPolyPolyDraw', # 0xf7
'NtGdiPolyTextOutW', # 0xf8
'NtGdiPtInRegion', # 0xf9
'NtGdiPtVisible', # 0xfa
'NtGdiQueryFonts', # 0xfb
'NtGdiQueryFontAssocInfo', # 0xfc
'NtGdiRectangle', # 0xfd
'NtGdiRectInRegion', # 0xfe
'NtGdiRectVisible', # 0xff
'NtGdiRemoveFontResourceW', # 0x100
'NtGdiRemoveFontMemResourceEx', # 0x101
'NtGdiResetDC', # 0x102
'NtGdiResizePalette', # 0x103
'NtGdiRestoreDC', # 0x104
'NtGdiRoundRect', # 0x105
'NtGdiSaveDC', # 0x106
'NtGdiScaleViewportExtEx', # 0x107
'NtGdiScaleWindowExtEx', # 0x108
'NtGdiSelectBitmap', # 0x109
'NtGdiSelectBrush', # 0x10a
'NtGdiSelectClipPath', # 0x10b
'NtGdiSelectFont', # 0x10c
'NtGdiSelectPen', # 0x10d
'NtGdiSetBitmapAttributes', # 0x10e
'NtGdiSetBitmapBits', # 0x10f
'NtGdiSetBitmapDimension', # 0x110
'NtGdiSetBoundsRect', # 0x111
'NtGdiSetBrushAttributes', # 0x112
'NtGdiSetBrushOrg', # 0x113
'NtGdiSetColorAdjustment', # 0x114
'NtGdiSetColorSpace', # 0x115
'NtGdiSetDeviceGammaRamp', # 0x116
'NtGdiSetDIBitsToDeviceInternal', # 0x117
'NtGdiSetFontEnumeration', # 0x118
'NtGdiSetFontXform', # 0x119
'NtGdiSetIcmMode', # 0x11a
'NtGdiSetLinkedUFIs', # 0x11b
'NtGdiSetMagicColors', # 0x11c
'NtGdiSetMetaRgn', # 0x11d
'NtGdiSetMiterLimit', # 0x11e
'NtGdiGetDeviceWidth', # 0x11f
'NtGdiMirrorWindowOrg', # 0x120
'NtGdiSetLayout', # 0x121
'NtGdiSetOPMSigningKeyAndSequenceNumbers', # 0x122
'NtGdiSetPixel', # 0x123
'NtGdiSetPixelFormat', # 0x124
'NtGdiSetRectRgn', # 0x125
'NtGdiSetSystemPaletteUse', # 0x126
'NtGdiSetTextJustification', # 0x127
'NtGdiSetupPublicCFONT', # 0x128
'NtGdiSetVirtualResolution', # 0x129
'NtGdiSetSizeDevice', # 0x12a
'NtGdiStartDoc', # 0x12b
'NtGdiStartPage', # 0x12c
'NtGdiStretchBlt', # 0x12d
'NtGdiStretchDIBitsInternal', # 0x12e
'NtGdiStrokeAndFillPath', # 0x12f
'NtGdiStrokePath', # 0x130
'NtGdiSwapBuffers', # 0x131
'NtGdiTransformPoints', # 0x132
'NtGdiTransparentBlt', # 0x133
'NtGdiUnloadPrinterDriver', # 0x134
'NtGdiUnmapMemFont', # 0x135
'NtGdiUnrealizeObject', # 0x136
'NtGdiUpdateColors', # 0x137
'NtGdiWidenPath', # 0x138
'NtUserActivateKeyboardLayout', # 0x139
'NtUserAddClipboardFormatListener', # 0x13a
'NtUserAlterWindowStyle', # 0x13b
'NtUserAssociateInputContext', # 0x13c
'NtUserAttachThreadInput', # 0x13d
'NtUserBeginPaint', # 0x13e
'NtUserBitBltSysBmp', # 0x13f
'NtUserBlockInput', # 0x140
'NtUserBuildHimcList', # 0x141
'NtUserBuildHwndList', # 0x142
'NtUserBuildNameList', # 0x143
'NtUserBuildPropList', # 0x144
'NtUserCallHwnd', # 0x145
'NtUserCallHwndLock', # 0x146
'NtUserCallHwndOpt', # 0x147
'NtUserCallHwndParam', # 0x148
'NtUserCallHwndParamLock', # 0x149
'NtUserCallMsgFilter', # 0x14a
'NtUserCallNextHookEx', # 0x14b
'NtUserCallNoParam', # 0x14c
'NtUserCallOneParam', # 0x14d
'NtUserCallTwoParam', # 0x14e
'NtUserChangeClipboardChain', # 0x14f
'NtUserChangeDisplaySettings', # 0x150
'NtUserCheckAccessForIntegrityLevel', # 0x151
'NtUserCheckDesktopByThreadId', # 0x152
'NtUserCheckWindowThreadDesktop', # 0x153
'NtUserCheckImeHotKey', # 0x154
'NtUserCheckMenuItem', # 0x155
'NtUserChildWindowFromPointEx', # 0x156
'NtUserClipCursor', # 0x157
'NtUserCloseClipboard', # 0x158
'NtUserCloseDesktop', # 0x159
'NtUserCloseWindowStation', # 0x15a
'NtUserConsoleControl', # 0x15b
'NtUserConvertMemHandle', # 0x15c
'NtUserCopyAcceleratorTable', # 0x15d
'NtUserCountClipboardFormats', # 0x15e
'NtUserCreateAcceleratorTable', # 0x15f
'NtUserCreateCaret', # 0x160
'NtUserCreateDesktopEx', # 0x161
'NtUserCreateInputContext', # 0x162
'NtUserCreateLocalMemHandle', # 0x163
'NtUserCreateWindowEx', # 0x164
'NtUserCreateWindowStation', # 0x165
'NtUserDdeInitialize', # 0x166
'NtUserDeferWindowPos', # 0x167
'NtUserDefSetText', # 0x168
'NtUserDeleteMenu', # 0x169
'NtUserDestroyAcceleratorTable', # 0x16a
'NtUserDestroyCursor', # 0x16b
'NtUserDestroyInputContext', # 0x16c
'NtUserDestroyMenu', # 0x16d
'NtUserDestroyWindow', # 0x16e
'NtUserDisableThreadIme', # 0x16f
'NtUserDispatchMessage', # 0x170
'NtUserDoSoundConnect', # 0x171
'NtUserDoSoundDisconnect', # 0x172
'NtUserDragDetect', # 0x173
'NtUserDragObject', # 0x174
'NtUserDrawAnimatedRects', # 0x175
'NtUserDrawCaption', # 0x176
'NtUserDrawCaptionTemp', # 0x177
'NtUserDrawIconEx', # 0x178
'NtUserDrawMenuBarTemp', # 0x179
'NtUserEmptyClipboard', # 0x17a
'NtUserEnableMenuItem', # 0x17b
'NtUserEnableScrollBar', # 0x17c
'NtUserEndDeferWindowPosEx', # 0x17d
'NtUserEndMenu', # 0x17e
'NtUserEndPaint', # 0x17f
'NtUserEnumDisplayDevices', # 0x180
'NtUserEnumDisplayMonitors', # 0x181
'NtUserEnumDisplaySettings', # 0x182
'NtUserEvent', # 0x183
'NtUserExcludeUpdateRgn', # 0x184
'NtUserFillWindow', # 0x185
'NtUserFindExistingCursorIcon', # 0x186
'NtUserFindWindowEx', # 0x187
'NtUserFlashWindowEx', # 0x188
'NtUserFrostCrashedWindow', # 0x189
'NtUserGetAltTabInfo', # 0x18a
'NtUserGetAncestor', # 0x18b
'NtUserGetAppImeLevel', # 0x18c
'NtUserGetAsyncKeyState', # 0x18d
'NtUserGetAtomName', # 0x18e
'NtUserGetCaretBlinkTime', # 0x18f
'NtUserGetCaretPos', # 0x190
'NtUserGetClassInfoEx', # 0x191
'NtUserGetClassName', # 0x192
'NtUserGetClipboardData', # 0x193
'NtUserGetClipboardFormatName', # 0x194
'NtUserGetClipboardOwner', # 0x195
'NtUserGetClipboardSequenceNumber', # 0x196
'NtUserGetClipboardViewer', # 0x197
'NtUserGetClipCursor', # 0x198
'NtUserGetComboBoxInfo', # 0x199
'NtUserGetControlBrush', # 0x19a
'NtUserGetControlColor', # 0x19b
'NtUserGetCPD', # 0x19c
'NtUserGetCursorFrameInfo', # 0x19d
'NtUserGetCursorInfo', # 0x19e
'NtUserGetDC', # 0x19f
'NtUserGetDCEx', # 0x1a0
'NtUserGetDoubleClickTime', # 0x1a1
'NtUserGetForegroundWindow', # 0x1a2
'NtUserGetGuiResources', # 0x1a3
'NtUserGetGUIThreadInfo', # 0x1a4
'NtUserGetIconInfo', # 0x1a5
'NtUserGetIconSize', # 0x1a6
'NtUserGetImeHotKey', # 0x1a7
'NtUserGetImeInfoEx', # 0x1a8
'NtUserGetInternalWindowPos', # 0x1a9
'NtUserGetKeyboardLayoutList', # 0x1aa
'NtUserGetKeyboardLayoutName', # 0x1ab
'NtUserGetKeyboardState', # 0x1ac
'NtUserGetKeyNameText', # 0x1ad
'NtUserGetKeyState', # 0x1ae
'NtUserGetListBoxInfo', # 0x1af
'NtUserGetMenuBarInfo', # 0x1b0
'NtUserGetMenuIndex', # 0x1b1
'NtUserGetMenuItemRect', # 0x1b2
'NtUserGetMessage', # 0x1b3
'NtUserGetMouseMovePointsEx', # 0x1b4
'NtUserGetObjectInformation', # 0x1b5
'NtUserGetOpenClipboardWindow', # 0x1b6
'NtUserGetPriorityClipboardFormat', # 0x1b7
'NtUserGetProcessWindowStation', # 0x1b8
'NtUserGetRawInputBuffer', # 0x1b9
'NtUserGetRawInputData', # 0x1ba
'NtUserGetRawInputDeviceInfo', # 0x1bb
'NtUserGetRawInputDeviceList', # 0x1bc
'NtUserGetRegisteredRawInputDevices', # 0x1bd
'NtUserGetScrollBarInfo', # 0x1be
'NtUserGetSystemMenu', # 0x1bf
'NtUserGetThreadDesktop', # 0x1c0
'NtUserGetThreadState', # 0x1c1
'NtUserGetTitleBarInfo', # 0x1c2
'NtUserGetUpdatedClipboardFormats', # 0x1c3
'NtUserGetUpdateRect', # 0x1c4
'NtUserGetUpdateRgn', # 0x1c5
'NtUserGetWindowDC', # 0x1c6
'NtUserGetWindowPlacement', # 0x1c7
'NtUserGetWOWClass', # 0x1c8
'NtUserGhostWindowFromHungWindow', # 0x1c9
'NtUserHardErrorControl', # 0x1ca
'NtUserHideCaret', # 0x1cb
'NtUserHiliteMenuItem', # 0x1cc
'NtUserHungWindowFromGhostWindow', # 0x1cd
'NtUserImpersonateDdeClientWindow', # 0x1ce
'NtUserInitialize', # 0x1cf
'NtUserInitializeClientPfnArrays', # 0x1d0
'NtUserInitTask', # 0x1d1
'NtUserInternalGetWindowText', # 0x1d2
'NtUserInternalGetWindowIcon', # 0x1d3
'NtUserInvalidateRect', # 0x1d4
'NtUserInvalidateRgn', # 0x1d5
'NtUserIsClipboardFormatAvailable', # 0x1d6
'NtUserKillTimer', # 0x1d7
'NtUserLoadKeyboardLayoutEx', # 0x1d8
'NtUserLockWindowStation', # 0x1d9
'NtUserLockWindowUpdate', # 0x1da
'NtUserLockWorkStation', # 0x1db
'NtUserLogicalToPhysicalPoint', # 0x1dc
'NtUserMapVirtualKeyEx', # 0x1dd
'NtUserMenuItemFromPoint', # 0x1de
'NtUserMessageCall', # 0x1df
'NtUserMinMaximize', # 0x1e0
'NtUserMNDragLeave', # 0x1e1
'NtUserMNDragOver', # 0x1e2
'NtUserModifyUserStartupInfoFlags', # 0x1e3
'NtUserMoveWindow', # 0x1e4
'NtUserNotifyIMEStatus', # 0x1e5
'NtUserNotifyProcessCreate', # 0x1e6
'NtUserNotifyWinEvent', # 0x1e7
'NtUserOpenClipboard', # 0x1e8
'NtUserOpenDesktop', # 0x1e9
'NtUserOpenInputDesktop', # 0x1ea
'NtUserOpenThreadDesktop', # 0x1eb
'NtUserOpenWindowStation', # 0x1ec
'NtUserPaintDesktop', # 0x1ed
'NtUserPaintMonitor', # 0x1ee
'NtUserPeekMessage', # 0x1ef
'NtUserPhysicalToLogicalPoint', # 0x1f0
'NtUserPostMessage', # 0x1f1
'NtUserPostThreadMessage', # 0x1f2
'NtUserPrintWindow', # 0x1f3
'NtUserProcessConnect', # 0x1f4
'NtUserQueryInformationThread', # 0x1f5
'NtUserQueryInputContext', # 0x1f6
'NtUserQuerySendMessage', # 0x1f7
'NtUserQueryWindow', # 0x1f8
'NtUserRealChildWindowFromPoint', # 0x1f9
'NtUserRealInternalGetMessage', # 0x1fa
'NtUserRealWaitMessageEx', # 0x1fb
'NtUserRedrawWindow', # 0x1fc
'NtUserRegisterClassExWOW', # 0x1fd
'NtUserRegisterErrorReportingDialog', # 0x1fe
'NtUserRegisterUserApiHook', # 0x1ff
'NtUserRegisterHotKey', # 0x200
'NtUserRegisterRawInputDevices', # 0x201
'NtUserRegisterTasklist', # 0x202
'NtUserRegisterWindowMessage', # 0x203
'NtUserRemoveClipboardFormatListener', # 0x204
'NtUserRemoveMenu', # 0x205
'NtUserRemoveProp', # 0x206
'NtUserResolveDesktop', # 0x207
'NtUserResolveDesktopForWOW', # 0x208
'NtUserSBGetParms', # 0x209
'NtUserScrollDC', # 0x20a
'NtUserScrollWindowEx', # 0x20b
'NtUserSelectPalette', # 0x20c
'NtUserSendInput', # 0x20d
'NtUserSetActiveWindow', # 0x20e
'NtUserSetAppImeLevel', # 0x20f
'NtUserSetCapture', # 0x210
'NtUserSetClassLong', # 0x211
'NtUserSetClassWord', # 0x212
'NtUserSetClipboardData', # 0x213
'NtUserSetClipboardViewer', # 0x214
'NtUserSetConsoleReserveKeys', # 0x215
'NtUserSetCursor', # 0x216
'NtUserSetCursorContents', # 0x217
'NtUserSetCursorIconData', # 0x218
'NtUserSetFocus', # 0x219
'NtUserSetImeHotKey', # 0x21a
'NtUserSetImeInfoEx', # 0x21b
'NtUserSetImeOwnerWindow', # 0x21c
'NtUserSetInformationProcess', # 0x21d
'NtUserSetInformationThread', # 0x21e
'NtUserSetInternalWindowPos', # 0x21f
'NtUserSetKeyboardState', # 0x220
'NtUserSetMenu', # 0x221
'NtUserSetMenuContextHelpId', # 0x222
'NtUserSetMenuDefaultItem', # 0x223
'NtUserSetMenuFlagRtoL', # 0x224
'NtUserSetObjectInformation', # 0x225
'NtUserSetParent', # 0x226
'NtUserSetProcessWindowStation', # 0x227
'NtUserGetProp', # 0x228
'NtUserSetProp', # 0x229
'NtUserSetScrollInfo', # 0x22a
'NtUserSetShellWindowEx', # 0x22b
'NtUserSetSysColors', # 0x22c
'NtUserSetSystemCursor', # 0x22d
'NtUserSetSystemMenu', # 0x22e
'NtUserSetSystemTimer', # 0x22f
'NtUserSetThreadDesktop', # 0x230
'NtUserSetThreadLayoutHandles', # 0x231
'NtUserSetThreadState', # 0x232
'NtUserSetTimer', # 0x233
'NtUserSetProcessDPIAware', # 0x234
'NtUserSetWindowFNID', # 0x235
'NtUserSetWindowLong', # 0x236
'NtUserSetWindowPlacement', # 0x237
'NtUserSetWindowPos', # 0x238
'NtUserSetWindowRgn', # 0x239
'NtUserGetWindowRgnEx', # 0x23a
'NtUserSetWindowRgnEx', # 0x23b
'NtUserSetWindowsHookAW', # 0x23c
'NtUserSetWindowsHookEx', # 0x23d
'NtUserSetWindowStationUser', # 0x23e
'NtUserSetWindowWord', # 0x23f
'NtUserSetWinEventHook', # 0x240
'NtUserShowCaret', # 0x241
'NtUserShowScrollBar', # 0x242
'NtUserShowWindow', # 0x243
'NtUserShowWindowAsync', # 0x244
'NtUserSoundSentry', # 0x245
'NtUserSwitchDesktop', # 0x246
'NtUserSystemParametersInfo', # 0x247
'NtUserTestForInteractiveUser', # 0x248
'NtUserThunkedMenuInfo', # 0x249
'NtUserThunkedMenuItemInfo', # 0x24a
'NtUserToUnicodeEx', # 0x24b
'NtUserTrackMouseEvent', # 0x24c
'NtUserTrackPopupMenuEx', # 0x24d
'NtUserCalcMenuBar', # 0x24e
'NtUserPaintMenuBar', # 0x24f
'NtUserTranslateAccelerator', # 0x250
'NtUserTranslateMessage', # 0x251
'NtUserUnhookWindowsHookEx', # 0x252
'NtUserUnhookWinEvent', # 0x253
'NtUserUnloadKeyboardLayout', # 0x254
'NtUserUnlockWindowStation', # 0x255
'NtUserUnregisterClass', # 0x256
'NtUserUnregisterUserApiHook', # 0x257
'NtUserUnregisterHotKey', # 0x258
'NtUserUpdateInputContext', # 0x259
'NtUserUpdateInstance', # 0x25a
'NtUserUpdateLayeredWindow', # 0x25b
'NtUserGetLayeredWindowAttributes', # 0x25c
'NtUserSetLayeredWindowAttributes', # 0x25d
'NtUserUpdatePerUserSystemParameters', # 0x25e
'NtUserUserHandleGrantAccess', # 0x25f
'NtUserValidateHandleSecure', # 0x260
'NtUserValidateRect', # 0x261
'NtUserValidateTimerCallback', # 0x262
'NtUserVkKeyScanEx', # 0x263
'NtUserWaitForInputIdle', # 0x264
'NtUserWaitForMsgAndEvent', # 0x265
'NtUserWaitMessage', # 0x266
'NtUserWin32PoolAllocationStats', # 0x267
'NtUserWindowFromPhysicalPoint', # 0x268
'NtUserWindowFromPoint', # 0x269
'NtUserYieldTask', # 0x26a
'NtUserRemoteConnect', # 0x26b
'NtUserRemoteRedrawRectangle', # 0x26c
'NtUserRemoteRedrawScreen', # 0x26d
'NtUserRemoteStopScreenUpdates', # 0x26e
'NtUserCtxDisplayIOCtl', # 0x26f
'NtUserRegisterSessionPort', # 0x270
'NtUserUnregisterSessionPort', # 0x271
'NtUserUpdateWindowTransform', # 0x272
'NtUserDwmStartRedirection', # 0x273
'NtUserDwmStopRedirection', # 0x274
'NtUserDwmHintDxUpdate', # 0x275
'NtUserDwmGetDxRgn', # 0x276
'NtUserGetWindowMinimizeRect', # 0x277
'NtGdiEngAssociateSurface', # 0x278
'NtGdiEngCreateBitmap', # 0x279
'NtGdiEngCreateDeviceSurface', # 0x27a
'NtGdiEngCreateDeviceBitmap', # 0x27b
'NtGdiEngCreatePalette', # 0x27c
'NtGdiEngComputeGlyphSet', # 0x27d
'NtGdiEngCopyBits', # 0x27e
'NtGdiEngDeletePalette', # 0x27f
'NtGdiEngDeleteSurface', # 0x280
'NtGdiEngEraseSurface', # 0x281
'NtGdiEngUnlockSurface', # 0x282
'NtGdiEngLockSurface', # 0x283
'NtGdiEngBitBlt', # 0x284
'NtGdiEngStretchBlt', # 0x285
'NtGdiEngPlgBlt', # 0x286
'NtGdiEngMarkBandingSurface', # 0x287
'NtGdiEngStrokePath', # 0x288
'NtGdiEngFillPath', # 0x289
'NtGdiEngStrokeAndFillPath', # 0x28a
'NtGdiEngPaint', # 0x28b
'NtGdiEngLineTo', # 0x28c
'NtGdiEngAlphaBlend', # 0x28d
'NtGdiEngGradientFill', # 0x28e
'NtGdiEngTransparentBlt', # 0x28f
'NtGdiEngTextOut', # 0x290
'NtGdiEngStretchBltROP', # 0x291
'NtGdiXLATEOBJ_cGetPalette', # 0x292
'NtGdiXLATEOBJ_iXlate', # 0x293
'NtGdiXLATEOBJ_hGetColorTransform', # 0x294
'NtGdiCLIPOBJ_bEnum', # 0x295
'NtGdiCLIPOBJ_cEnumStart', # 0x296
'NtGdiCLIPOBJ_ppoGetPath', # 0x297
'NtGdiEngDeletePath', # 0x298
'NtGdiEngCreateClip', # 0x299
'NtGdiEngDeleteClip', # 0x29a
'NtGdiBRUSHOBJ_ulGetBrushColor', # 0x29b
'NtGdiBRUSHOBJ_pvAllocRbrush', # 0x29c
'NtGdiBRUSHOBJ_pvGetRbrush', # 0x29d
'NtGdiBRUSHOBJ_hGetColorTransform', # 0x29e
'NtGdiXFORMOBJ_bApplyXform', # 0x29f
'NtGdiXFORMOBJ_iGetXform', # 0x2a0
'NtGdiFONTOBJ_vGetInfo', # 0x2a1
'NtGdiFONTOBJ_pxoGetXform', # 0x2a2
'NtGdiFONTOBJ_cGetGlyphs', # 0x2a3
'NtGdiFONTOBJ_pifi', # 0x2a4
'NtGdiFONTOBJ_pfdg', # 0x2a5
'NtGdiFONTOBJ_pQueryGlyphAttrs', # 0x2a6
'NtGdiFONTOBJ_pvTrueTypeFontFile', # 0x2a7
'NtGdiFONTOBJ_cGetAllGlyphHandles', # 0x2a8
'NtGdiSTROBJ_bEnum', # 0x2a9
'NtGdiSTROBJ_bEnumPositionsOnly', # 0x2aa
'NtGdiSTROBJ_bGetAdvanceWidths', # 0x2ab
'NtGdiSTROBJ_vEnumStart', # 0x2ac
'NtGdiSTROBJ_dwGetCodePage', # 0x2ad
'NtGdiPATHOBJ_vGetBounds', # 0x2ae
'NtGdiPATHOBJ_bEnum', # 0x2af
'NtGdiPATHOBJ_vEnumStart', # 0x2b0
'NtGdiPATHOBJ_vEnumStartClipLines', # 0x2b1
'NtGdiPATHOBJ_bEnumClipLines', # 0x2b2
'NtGdiGetDhpdev', # 0x2b3
'NtGdiEngCheckAbort', # 0x2b4
'NtGdiHT_Get8BPPFormatPalette', # 0x2b5
'NtGdiHT_Get8BPPMaskPalette', # 0x2b6
'NtGdiUpdateTransform', # 0x2b7
'NtGdiSetPUMPDOBJ', # 0x2b8
'NtGdiBRUSHOBJ_DeleteRbrush', # 0x2b9
'NtGdiUMPDEngFreeUserMem', # 0x2ba
'NtGdiDrawStream', # 0x2bb
'NtGdiDwmGetDirtyRgn', # 0x2bc
'NtGdiDwmGetSurfaceData', # 0x2bd
'NtGdiDdDDICreateAllocation', # 0x2be
'NtGdiDdDDIQueryResourceInfo', # 0x2bf
'NtGdiDdDDIOpenResource', # 0x2c0
'NtGdiDdDDIDestroyAllocation', # 0x2c1
'NtGdiDdDDISetAllocationPriority', # 0x2c2
'NtGdiDdDDIQueryAllocationResidency', # 0x2c3
'NtGdiDdDDICreateDevice', # 0x2c4
'NtGdiDdDDIDestroyDevice', # 0x2c5
'NtGdiDdDDICreateContext', # 0x2c6
'NtGdiDdDDIDestroyContext', # 0x2c7
'NtGdiDdDDICreateSynchronizationObject', # 0x2c8
'NtGdiDdDDIDestroySynchronizationObject', # 0x2c9
'NtGdiDdDDIWaitForSynchronizationObject', # 0x2ca
'NtGdiDdDDISignalSynchronizationObject', # 0x2cb
'NtGdiDdDDIGetRuntimeData', # 0x2cc
'NtGdiDdDDIQueryAdapterInfo', # 0x2cd
'NtGdiDdDDILock', # 0x2ce
'NtGdiDdDDIUnlock', # 0x2cf
'NtGdiDdDDIGetDisplayModeList', # 0x2d0
'NtGdiDdDDISetDisplayMode', # 0x2d1
'NtGdiDdDDIGetMultisampleMethodList', # 0x2d2
'NtGdiDdDDIPresent', # 0x2d3
'NtGdiDdDDIRender', # 0x2d4
'NtGdiDdDDIOpenAdapterFromDeviceName', # 0x2d5
'NtGdiDdDDIOpenAdapterFromHdc', # 0x2d6
'NtGdiDdDDICloseAdapter', # 0x2d7
'NtGdiDdDDIGetSharedPrimaryHandle', # 0x2d8
'NtGdiDdDDIEscape', # 0x2d9
'NtGdiDdDDIQueryStatistics', # 0x2da
'NtGdiDdDDISetVidPnSourceOwner', # 0x2db
'NtGdiDdDDIGetPresentHistory', # 0x2dc
'NtGdiDdDDICreateOverlay', # 0x2dd
'NtGdiDdDDIUpdateOverlay', # 0x2de
'NtGdiDdDDIFlipOverlay', # 0x2df
'NtGdiDdDDIDestroyOverlay', # 0x2e0
'NtGdiDdDDIWaitForVerticalBlankEvent', # 0x2e1
'NtGdiDdDDISetGammaRamp', # 0x2e2
'NtGdiDdDDIGetDeviceState', # 0x2e3
'NtGdiDdDDICreateDCFromMemory', # 0x2e4
'NtGdiDdDDIDestroyDCFromMemory', # 0x2e5
'NtGdiDdDDISetContextSchedulingPriority', # 0x2e6
'NtGdiDdDDIGetContextSchedulingPriority', # 0x2e7
'NtGdiDdDDISetProcessSchedulingPriorityClass', # 0x2e8
'NtGdiDdDDIGetProcessSchedulingPriorityClass', # 0x2e9
'NtGdiDdDDIReleaseProcessVidPnSourceOwners', # 0x2ea
'NtGdiDdDDIGetScanLine', # 0x2eb
'NtGdiDdDDISetQueuedLimit', # 0x2ec
'NtGdiDdDDIPollDisplayChildren', # 0x2ed
'NtGdiDdDDIInvalidateActiveVidPn', # 0x2ee
'NtGdiDdDDICheckOcclusion', # 0x2ef
'NtGdiDdDDIWaitForIdle', # 0x2f0
'NtGdiDdDDICheckMonitorPowerState', # 0x2f1
'NtGdiDdDDICheckExclusiveOwnership', # 0x2f2
'NtGdiDdDDISetDisplayPrivateDriverFormat', # 0x2f3
'NtGdiDdDDISharedPrimaryLockNotification', # 0x2f4
'NtGdiDdDDISharedPrimaryUnLockNotification', # 0x2f5
'DxgStubEnableDirectDrawRedirection', # 0x2f6
'DxgStubDeleteDirectDrawObject', # 0x2f7
'NtGdiGetNumberOfPhysicalMonitors', # 0x2f8
'NtGdiGetPhysicalMonitors', # 0x2f9
'NtGdiGetPhysicalMonitorDescription', # 0x2fa
'NtGdiDestroyPhysicalMonitor', # 0x2fb
'NtGdiDDCCIGetVCPFeature', # 0x2fc
'NtGdiDDCCISetVCPFeature', # 0x2fd
'NtGdiDDCCISaveCurrentSettings', # 0x2fe
'NtGdiDDCCIGetCapabilitiesStringLength', # 0x2ff
'NtGdiDDCCIGetCapabilitiesString', # 0x300
'NtGdiDDCCIGetTimingReport', # 0x301
'NtUserSetMirrorRendering', # 0x302
'NtUserShowSystemCursor', # 0x303
],
]
| gpl-2.0 |
stephanerobert/fake-switches | tests/dell/test_configure_vlan.py | 4 | 3779 | # Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.dell import enable, configuring_vlan, \
assert_running_config_contains_in_order, unconfigure_vlan, \
assert_interface_configuration
from tests.util.protocol_util import with_protocol, ProtocolTest, SshTester, TelnetTester
class DellConfigureVlanTest(ProtocolTest):
__test__ = False
tester_class = SshTester
test_switch = "dell"
@with_protocol
def test_configuring_a_vlan(self, t):
enable(t)
configuring_vlan(t, 1234)
assert_running_config_contains_in_order(t, [
"vlan database",
"vlan 1,1234",
"exit"
])
unconfigure_vlan(t, 1234)
assert_running_config_contains_in_order(t, [
"vlan database",
"vlan 1",
"exit"
])
@with_protocol
def test_unconfiguring_a_vlan_failing(self, t):
enable(t)
t.write("configure")
t.readln("")
t.read("my_switch(config)#")
t.write("vlan database")
t.readln("")
t.read("my_switch(config-vlan)#")
t.write("no vlan 3899")
t.readln("Warning: The use of large numbers of VLANs or interfaces may cause significant")
t.readln("delays in applying the configuration.")
t.readln("")
t.readln("")
t.readln("These VLANs do not exist: 3899.")
t.readln("")
t.read("my_switch(config-vlan)#")
t.write("exit")
t.readln("")
t.read("my_switch(config)#")
t.write("exit")
t.readln("")
t.read("my_switch#")
@with_protocol
def test_configuring_a_vlan_name(self, t):
enable(t)
configuring_vlan(t, 1234)
t.write("configure")
t.readln("")
t.read("my_switch(config)#")
t.write("interface vlan 1234")
t.readln("")
t.read("my_switch(config-if-vlan1234)#")
t.write("name")
t.readln("")
t.readln("Command not found / Incomplete command. Use ? to list commands.")
t.readln("")
t.read("my_switch(config-if-vlan1234)#")
t.write("name one two")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if-vlan1234)#")
t.write("name this-name-is-too-long-buddy-buddy")
t.readln("Name must be 32 characters or less.")
t.readln("")
t.read("my_switch(config-if-vlan1234)#")
t.write("name this-name-is-too-long-buddy-budd")
t.readln("")
t.read("my_switch(config-if-vlan1234)#")
t.write("exit")
t.readln("")
t.read("my_switch(config)#")
t.write("exit")
t.readln("")
t.read("my_switch#")
assert_interface_configuration(t, "vlan 1234", [
"interface vlan 1234",
"name \"this-name-is-too-long-buddy-budd\"",
"exit",
])
unconfigure_vlan(t, 1234)
class DellConfigureVlanSshTest(DellConfigureVlanTest):
__test__ = True
tester_class = SshTester
class DellConfigureVlanTelnetTest(DellConfigureVlanTest):
__test__ = True
tester_class = TelnetTester
| apache-2.0 |
Lujeni/ansible | test/units/modules/network/f5/test_bigip_gtm_monitor_tcp.py | 38 | 6969 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_gtm_monitor_tcp import ApiParameters
from library.modules.bigip_gtm_monitor_tcp import ModuleParameters
from library.modules.bigip_gtm_monitor_tcp import ModuleManager
from library.modules.bigip_gtm_monitor_tcp import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_gtm_monitor_tcp import ApiParameters
from ansible.modules.network.f5.bigip_gtm_monitor_tcp import ModuleParameters
from ansible.modules.network.f5.bigip_gtm_monitor_tcp import ModuleManager
from ansible.modules.network.f5.bigip_gtm_monitor_tcp import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='/Common/my-tcp',
send='the send string',
receive='the receive string',
ip='1.1.1.1',
port='80',
interval='10',
timeout='20',
ignore_down_response=True,
transparent=False,
probe_timeout='30',
reverse=True
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/my-tcp'
assert p.send == 'the send string'
assert p.receive == 'the receive string'
assert p.destination == '1.1.1.1:80'
assert p.ip == '1.1.1.1'
assert p.port == 80
assert p.interval == 10
assert p.timeout == 20
assert p.ignore_down_response is True
assert p.transparent is False
assert p.probe_timeout == 30
assert p.reverse is True
def test_api_parameters(self):
args = load_fixture('load_gtm_monitor_tcp_1.json')
p = ApiParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/tcp'
assert p.send == 'the send string'
assert p.receive == 'the receive string'
assert p.destination == '1.1.1.1:80'
assert p.ip == '1.1.1.1'
assert p.port == 80
assert p.interval == 30
assert p.timeout == 120
assert p.ignore_down_response is False
assert p.transparent is True
assert p.probe_timeout == 5
assert p.reverse is True
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
try:
self.p1 = patch('library.modules.bigip_gtm_monitor_tcp.module_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = True
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_gtm_monitor_tcp.module_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = True
def tearDown(self):
self.p1.stop()
def test_create_monitor(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
mm.module_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
def test_change_ip(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
current = ApiParameters(params=load_fixture('load_gtm_monitor_tcp_1.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[True, True])
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.module_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['ip'] == '10.10.10.10'
def test_change_ignore_down_response(self, *args):
set_module_args(dict(
name='foo',
ignore_down_response=True,
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
current = ApiParameters(params=load_fixture('load_gtm_monitor_tcp_1.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[True, True])
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.module_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['ignore_down_response'] is True
| gpl-3.0 |
rcsanchez97/mongo-c-driver | build/evergreen_config_generator/functions.py | 3 | 3172 | # Copyright 2018-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict as OD
from textwrap import dedent
from evergreen_config_generator import ConfigObject
def func(func_name, **kwargs):
od = OD([('func', func_name)])
if kwargs:
od['vars'] = OD(sorted(kwargs.items()))
return od
def bootstrap(VERSION='latest', TOPOLOGY=None, **kwargs):
if TOPOLOGY:
return func('bootstrap mongo-orchestration',
VERSION=VERSION,
TOPOLOGY=TOPOLOGY,
**kwargs)
return func('bootstrap mongo-orchestration',
VERSION=VERSION,
**kwargs)
def run_tests(URI=None, **kwargs):
if URI:
return func('run tests', URI=URI, **kwargs)
return func('run tests', **kwargs)
def s3_put(remote_file, project_path=True, **kwargs):
if project_path:
remote_file = '${project}/' + remote_file
od = OD([
('command', 's3.put'),
('params', OD([
('aws_key', '${aws_key}'),
('aws_secret', '${aws_secret}'),
('remote_file', remote_file),
('bucket', 'mciuploads'),
('permissions', 'public-read')]))])
od['params'].update(kwargs)
return od
def strip_lines(s):
return '\n'.join(line for line in s.split('\n') if line.strip())
def shell_exec(script, test=True, errexit=True, xtrace=False, silent=False,
continue_on_err=False, working_dir=None, background=False):
dedented = ''
if errexit:
dedented += 'set -o errexit\n'
if xtrace:
dedented += 'set -o xtrace\n'
dedented += dedent(strip_lines(script))
command = OD([('command', 'shell.exec')])
if test:
command['type'] = 'test'
command['params'] = OD()
if silent:
command['params']['silent'] = True
if working_dir is not None:
command['params']['working_dir'] = working_dir
if continue_on_err:
command['params']['continue_on_err'] = True
if background:
command['params']['background'] = True
command['params']['shell'] = 'bash'
command['params']['script'] = dedented
return command
def targz_pack(target, source_dir, *include):
return OD([
('command', 'archive.targz_pack'),
('params', OD([
('target', target),
('source_dir', source_dir),
('include', list(include))]))])
class Function(ConfigObject):
def __init__(self, *commands):
super(Function, self).__init__()
self.commands = commands
def to_dict(self):
return list(self.commands) | apache-2.0 |
XtinaSchelin/isfdb-calibre | isfdb-plugin/config.py | 1 | 3031 | #!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2015, Xtina Schelin <xtina.schelin@gmail.com>'
__docformat__ = 'restructuredtext en'
try:
from PyQt5 import Qt as QtGui
except ImportError:
from PyQt4 import QtGui
try:
from PyQt5.Qt import QLabel, QGridLayout, Qt, QGroupBox, QCheckBox
except ImportError:
from PyQt4.Qt import QLabel, QGridLayout, Qt, QGroupBox, QCheckBox
from calibre.gui2.metadata.config import ConfigWidget as DefaultConfigWidget
from calibre.utils.config import JSONConfig
STORE_NAME = 'Options'
KEY_MAX_DOWNLOADS = 'maxDownloads'
KEY_APPEND_CONTENTS = 'appendContents'
DEFAULT_STORE_VALUES = {
KEY_MAX_DOWNLOADS: 1,
KEY_APPEND_CONTENTS: False
}
# This is where all preferences for this plugin will be stored.
plugin_prefs = JSONConfig('plugins/ISFDB')
# Set defaults.
plugin_prefs.defaults[STORE_NAME] = DEFAULT_STORE_VALUES
class ConfigWidget(DefaultConfigWidget):
def __init__(self, plugin):
DefaultConfigWidget.__init__(self, plugin)
c = plugin_prefs[STORE_NAME]
other_group_box = QGroupBox('Other options', self)
self.l.addWidget(other_group_box, self.l.rowCount(), 0, 1, 2)
other_group_box_layout = QGridLayout()
other_group_box.setLayout(other_group_box_layout)
# Maximum # of title/author searches to review.
max_label = QLabel('Maximum title/author search matches to evaluate (1 = fastest):', self)
max_label.setToolTip('ISFDB doesn\'t always have links to large covers for every ISBN\n'
'of the same book. Increasing this value will take effect when doing\n'
'title/author searches to consider more ISBN editions.\n\n'
'This will increase the potential likelihood of getting a larger cover,\n'
'though does not guarantee it.')
other_group_box_layout.addWidget(max_label, 0, 0, 1, 1)
self.max_downloads_spin = QtGui.QSpinBox(self)
self.max_downloads_spin.setMinimum(1)
self.max_downloads_spin.setMaximum(5)
self.max_downloads_spin.setProperty('value', c.get(KEY_MAX_DOWNLOADS, DEFAULT_STORE_VALUES[KEY_MAX_DOWNLOADS]))
other_group_box_layout.addWidget(self.max_downloads_spin, 0, 1, 1, 1)
other_group_box_layout.setColumnStretch(2, 1)
# Contents field, if possible.
self.contents_checkbox = QCheckBox('Append Contents if available to comments', self)
self.contents_checkbox.setToolTip('Choosing this option will write the Contents section to the comments\n'
'field, if such a section exists.')
self.contents_checkbox.setChecked(c.get(KEY_APPEND_CONTENTS, DEFAULT_STORE_VALUES[KEY_APPEND_CONTENTS]))
other_group_box_layout.addWidget(self.contents_checkbox, 2, 0, 1, 3)
def commit(self):
DefaultConfigWidget.commit(self)
new_prefs = {}
new_prefs[KEY_MAX_DOWNLOADS] = int(unicode(self.max_downloads_spin.value()))
new_prefs[KEY_APPEND_CONTENTS] = self.contents_checkbox.checkState() == Qt.Checked
plugin_prefs[STORE_NAME] = new_prefs
| mit |
40223117cda/2015cdaw13 | static/Brython3.1.0-20150301-090019/Lib/unittest/suite.py | 748 | 9715 | """TestSuite"""
import sys
from . import case
from . import util
__unittest = True
def _call_if_exists(parent, attr):
func = getattr(parent, attr, lambda: None)
func()
class BaseTestSuite(object):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not callable(test):
raise TypeError("{} is not callable".format(repr(test)))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, str):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result, debug=False):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if not debug:
test(result)
else:
test.debug()
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self.run(debug, True)
################################
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
_call_if_exists(result, '_setupStdout')
try:
setUpClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
_call_if_exists(result, '_setupStdout')
try:
setUpModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
_call_if_exists(result, '_setupStdout')
try:
tearDownModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
_call_if_exists(result, '_setupStdout')
try:
tearDownClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
| gpl-3.0 |
caveman-dick/ansible | test/units/playbook/test_playbook.py | 119 | 2239 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook import Playbook
from ansible.vars.manager import VariableManager
from units.mock.loader import DictDataLoader
class TestPlaybook(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_playbook(self):
fake_loader = DictDataLoader({})
p = Playbook(loader=fake_loader)
def test_basic_playbook(self):
fake_loader = DictDataLoader({
"test_file.yml": """
- hosts: all
""",
})
p = Playbook.load("test_file.yml", loader=fake_loader)
plays = p.get_plays()
def test_bad_playbook_files(self):
fake_loader = DictDataLoader({
# represents a playbook which is not a list of plays
"bad_list.yml": """
foo: bar
""",
# represents a playbook where a play entry is mis-formatted
"bad_entry.yml": """
-
- "This should be a mapping..."
""",
})
vm = VariableManager()
self.assertRaises(AnsibleParserError, Playbook.load, "bad_list.yml", vm, fake_loader)
self.assertRaises(AnsibleParserError, Playbook.load, "bad_entry.yml", vm, fake_loader)
| gpl-3.0 |
olologin/scikit-learn | sklearn/linear_model/tests/test_base.py | 83 | 15089 | # Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Fabian Pedregosa <fabian.pedregosa@inria.fr>
#
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from scipy import linalg
from itertools import product
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import ignore_warnings
from sklearn.linear_model.base import LinearRegression
from sklearn.linear_model.base import _preprocess_data
from sklearn.linear_model.base import sparse_center_data, center_data
from sklearn.linear_model.base import _rescale_data
from sklearn.utils import check_random_state
from sklearn.utils.testing import assert_greater
from sklearn.datasets.samples_generator import make_sparse_uncorrelated
from sklearn.datasets.samples_generator import make_regression
rng = np.random.RandomState(0)
def test_linear_regression():
# Test LinearRegression on a simple dataset.
# a simple dataset
X = [[1], [2]]
Y = [1, 2]
reg = LinearRegression()
reg.fit(X, Y)
assert_array_almost_equal(reg.coef_, [1])
assert_array_almost_equal(reg.intercept_, [0])
assert_array_almost_equal(reg.predict(X), [1, 2])
# test it also for degenerate input
X = [[1]]
Y = [0]
reg = LinearRegression()
reg.fit(X, Y)
assert_array_almost_equal(reg.coef_, [0])
assert_array_almost_equal(reg.intercept_, [0])
assert_array_almost_equal(reg.predict(X), [0])
def test_linear_regression_sample_weights():
# TODO: loop over sparse data as well
rng = np.random.RandomState(0)
# It would not work with under-determined systems
for n_samples, n_features in ((6, 5), ):
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
sample_weight = 1.0 + rng.rand(n_samples)
for intercept in (True, False):
# LinearRegression with explicit sample_weight
reg = LinearRegression(fit_intercept=intercept)
reg.fit(X, y, sample_weight=sample_weight)
coefs1 = reg.coef_
inter1 = reg.intercept_
assert_equal(reg.coef_.shape, (X.shape[1], )) # sanity checks
assert_greater(reg.score(X, y), 0.5)
# Closed form of the weighted least square
# theta = (X^T W X)^(-1) * X^T W y
W = np.diag(sample_weight)
if intercept is False:
X_aug = X
else:
dummy_column = np.ones(shape=(n_samples, 1))
X_aug = np.concatenate((dummy_column, X), axis=1)
coefs2 = linalg.solve(X_aug.T.dot(W).dot(X_aug),
X_aug.T.dot(W).dot(y))
if intercept is False:
assert_array_almost_equal(coefs1, coefs2)
else:
assert_array_almost_equal(coefs1, coefs2[1:])
assert_almost_equal(inter1, coefs2[0])
def test_raises_value_error_if_sample_weights_greater_than_1d():
# Sample weights must be either scalar or 1D
n_sampless = [2, 3]
n_featuress = [3, 2]
for n_samples, n_features in zip(n_sampless, n_featuress):
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
sample_weights_OK = rng.randn(n_samples) ** 2 + 1
sample_weights_OK_1 = 1.
sample_weights_OK_2 = 2.
reg = LinearRegression()
# make sure the "OK" sample weights actually work
reg.fit(X, y, sample_weights_OK)
reg.fit(X, y, sample_weights_OK_1)
reg.fit(X, y, sample_weights_OK_2)
def test_fit_intercept():
# Test assertions on betas shape.
X2 = np.array([[0.38349978, 0.61650022],
[0.58853682, 0.41146318]])
X3 = np.array([[0.27677969, 0.70693172, 0.01628859],
[0.08385139, 0.20692515, 0.70922346]])
y = np.array([1, 1])
lr2_without_intercept = LinearRegression(fit_intercept=False).fit(X2, y)
lr2_with_intercept = LinearRegression(fit_intercept=True).fit(X2, y)
lr3_without_intercept = LinearRegression(fit_intercept=False).fit(X3, y)
lr3_with_intercept = LinearRegression(fit_intercept=True).fit(X3, y)
assert_equal(lr2_with_intercept.coef_.shape,
lr2_without_intercept.coef_.shape)
assert_equal(lr3_with_intercept.coef_.shape,
lr3_without_intercept.coef_.shape)
assert_equal(lr2_without_intercept.coef_.ndim,
lr3_without_intercept.coef_.ndim)
def test_linear_regression_sparse(random_state=0):
# Test that linear regression also works with sparse data
random_state = check_random_state(random_state)
for i in range(10):
n = 100
X = sparse.eye(n, n)
beta = random_state.rand(n)
y = X * beta[:, np.newaxis]
ols = LinearRegression()
ols.fit(X, y.ravel())
assert_array_almost_equal(beta, ols.coef_ + ols.intercept_)
assert_array_almost_equal(ols.predict(X) - y.ravel(), 0)
def test_linear_regression_multiple_outcome(random_state=0):
# Test multiple-outcome linear regressions
X, y = make_regression(random_state=random_state)
Y = np.vstack((y, y)).T
n_features = X.shape[1]
reg = LinearRegression(fit_intercept=True)
reg.fit((X), Y)
assert_equal(reg.coef_.shape, (2, n_features))
Y_pred = reg.predict(X)
reg.fit(X, y)
y_pred = reg.predict(X)
assert_array_almost_equal(np.vstack((y_pred, y_pred)).T, Y_pred, decimal=3)
def test_linear_regression_sparse_multiple_outcome(random_state=0):
# Test multiple-outcome linear regressions with sparse data
random_state = check_random_state(random_state)
X, y = make_sparse_uncorrelated(random_state=random_state)
X = sparse.coo_matrix(X)
Y = np.vstack((y, y)).T
n_features = X.shape[1]
ols = LinearRegression()
ols.fit(X, Y)
assert_equal(ols.coef_.shape, (2, n_features))
Y_pred = ols.predict(X)
ols.fit(X, y.ravel())
y_pred = ols.predict(X)
assert_array_almost_equal(np.vstack((y_pred, y_pred)).T, Y_pred, decimal=3)
def test_preprocess_data():
n_samples = 200
n_features = 2
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples)
expected_X_mean = np.mean(X, axis=0)
expected_X_norm = np.std(X, axis=0) * np.sqrt(X.shape[0])
expected_y_mean = np.mean(y, axis=0)
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=False, normalize=False)
assert_array_almost_equal(X_mean, np.zeros(n_features))
assert_array_almost_equal(y_mean, 0)
assert_array_almost_equal(X_norm, np.ones(n_features))
assert_array_almost_equal(Xt, X)
assert_array_almost_equal(yt, y)
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=True, normalize=False)
assert_array_almost_equal(X_mean, expected_X_mean)
assert_array_almost_equal(y_mean, expected_y_mean)
assert_array_almost_equal(X_norm, np.ones(n_features))
assert_array_almost_equal(Xt, X - expected_X_mean)
assert_array_almost_equal(yt, y - expected_y_mean)
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=True, normalize=True)
assert_array_almost_equal(X_mean, expected_X_mean)
assert_array_almost_equal(y_mean, expected_y_mean)
assert_array_almost_equal(X_norm, expected_X_norm)
assert_array_almost_equal(Xt, (X - expected_X_mean) / expected_X_norm)
assert_array_almost_equal(yt, y - expected_y_mean)
def test_preprocess_data_multioutput():
n_samples = 200
n_features = 3
n_outputs = 2
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples, n_outputs)
expected_y_mean = np.mean(y, axis=0)
args = [X, sparse.csc_matrix(X)]
for X in args:
_, yt, _, y_mean, _ = _preprocess_data(X, y, fit_intercept=False,
normalize=False)
assert_array_almost_equal(y_mean, np.zeros(n_outputs))
assert_array_almost_equal(yt, y)
_, yt, _, y_mean, _ = _preprocess_data(X, y, fit_intercept=True,
normalize=False)
assert_array_almost_equal(y_mean, expected_y_mean)
assert_array_almost_equal(yt, y - y_mean)
_, yt, _, y_mean, _ = _preprocess_data(X, y, fit_intercept=True,
normalize=True)
assert_array_almost_equal(y_mean, expected_y_mean)
assert_array_almost_equal(yt, y - y_mean)
def test_preprocess_data_weighted():
n_samples = 200
n_features = 2
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples)
sample_weight = rng.rand(n_samples)
expected_X_mean = np.average(X, axis=0, weights=sample_weight)
expected_y_mean = np.average(y, axis=0, weights=sample_weight)
# XXX: if normalize=True, should we expect a weighted standard deviation?
# Currently not weighted, but calculated with respect to weighted mean
expected_X_norm = (np.sqrt(X.shape[0]) *
np.mean((X - expected_X_mean) ** 2, axis=0) ** .5)
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=True, normalize=False,
sample_weight=sample_weight)
assert_array_almost_equal(X_mean, expected_X_mean)
assert_array_almost_equal(y_mean, expected_y_mean)
assert_array_almost_equal(X_norm, np.ones(n_features))
assert_array_almost_equal(Xt, X - expected_X_mean)
assert_array_almost_equal(yt, y - expected_y_mean)
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=True, normalize=True,
sample_weight=sample_weight)
assert_array_almost_equal(X_mean, expected_X_mean)
assert_array_almost_equal(y_mean, expected_y_mean)
assert_array_almost_equal(X_norm, expected_X_norm)
assert_array_almost_equal(Xt, (X - expected_X_mean) / expected_X_norm)
assert_array_almost_equal(yt, y - expected_y_mean)
def test_sparse_preprocess_data_with_return_mean():
n_samples = 200
n_features = 2
# random_state not supported yet in sparse.rand
X = sparse.rand(n_samples, n_features, density=.5) # , random_state=rng
X = X.tolil()
y = rng.rand(n_samples)
XA = X.toarray()
expected_X_norm = np.std(XA, axis=0) * np.sqrt(X.shape[0])
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=False, normalize=False,
return_mean=True)
assert_array_almost_equal(X_mean, np.zeros(n_features))
assert_array_almost_equal(y_mean, 0)
assert_array_almost_equal(X_norm, np.ones(n_features))
assert_array_almost_equal(Xt.A, XA)
assert_array_almost_equal(yt, y)
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=True, normalize=False,
return_mean=True)
assert_array_almost_equal(X_mean, np.mean(XA, axis=0))
assert_array_almost_equal(y_mean, np.mean(y, axis=0))
assert_array_almost_equal(X_norm, np.ones(n_features))
assert_array_almost_equal(Xt.A, XA)
assert_array_almost_equal(yt, y - np.mean(y, axis=0))
Xt, yt, X_mean, y_mean, X_norm = \
_preprocess_data(X, y, fit_intercept=True, normalize=True,
return_mean=True)
assert_array_almost_equal(X_mean, np.mean(XA, axis=0))
assert_array_almost_equal(y_mean, np.mean(y, axis=0))
assert_array_almost_equal(X_norm, expected_X_norm)
assert_array_almost_equal(Xt.A, XA / expected_X_norm)
assert_array_almost_equal(yt, y - np.mean(y, axis=0))
def test_csr_preprocess_data():
# Test output format of _preprocess_data, when input is csr
X, y = make_regression()
X[X < 2.5] = 0.0
csr = sparse.csr_matrix(X)
csr_, y, _, _, _ = _preprocess_data(csr, y, True)
assert_equal(csr_.getformat(), 'csr')
def test_rescale_data():
n_samples = 200
n_features = 2
sample_weight = 1.0 + rng.rand(n_samples)
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples)
rescaled_X, rescaled_y = _rescale_data(X, y, sample_weight)
rescaled_X2 = X * np.sqrt(sample_weight)[:, np.newaxis]
rescaled_y2 = y * np.sqrt(sample_weight)
assert_array_almost_equal(rescaled_X, rescaled_X2)
assert_array_almost_equal(rescaled_y, rescaled_y2)
@ignore_warnings # all deprecation warnings
def test_deprecation_center_data():
n_samples = 200
n_features = 2
w = 1.0 + rng.rand(n_samples)
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples)
param_grid = product([True, False], [True, False], [True, False],
[None, w])
for (fit_intercept, normalize, copy, sample_weight) in param_grid:
XX = X.copy() # such that we can try copy=False as well
X1, y1, X1_mean, X1_var, y1_mean = \
center_data(XX, y, fit_intercept=fit_intercept,
normalize=normalize, copy=copy,
sample_weight=sample_weight)
XX = X.copy()
X2, y2, X2_mean, X2_var, y2_mean = \
_preprocess_data(XX, y, fit_intercept=fit_intercept,
normalize=normalize, copy=copy,
sample_weight=sample_weight)
assert_array_almost_equal(X1, X2)
assert_array_almost_equal(y1, y2)
assert_array_almost_equal(X1_mean, X2_mean)
assert_array_almost_equal(X1_var, X2_var)
assert_array_almost_equal(y1_mean, y2_mean)
# Sparse cases
X = sparse.csr_matrix(X)
for (fit_intercept, normalize, copy, sample_weight) in param_grid:
X1, y1, X1_mean, X1_var, y1_mean = \
center_data(X, y, fit_intercept=fit_intercept, normalize=normalize,
copy=copy, sample_weight=sample_weight)
X2, y2, X2_mean, X2_var, y2_mean = \
_preprocess_data(X, y, fit_intercept=fit_intercept,
normalize=normalize, copy=copy,
sample_weight=sample_weight, return_mean=False)
assert_array_almost_equal(X1.toarray(), X2.toarray())
assert_array_almost_equal(y1, y2)
assert_array_almost_equal(X1_mean, X2_mean)
assert_array_almost_equal(X1_var, X2_var)
assert_array_almost_equal(y1_mean, y2_mean)
for (fit_intercept, normalize) in product([True, False], [True, False]):
X1, y1, X1_mean, X1_var, y1_mean = \
sparse_center_data(X, y, fit_intercept=fit_intercept,
normalize=normalize)
X2, y2, X2_mean, X2_var, y2_mean = \
_preprocess_data(X, y, fit_intercept=fit_intercept,
normalize=normalize, return_mean=True)
assert_array_almost_equal(X1.toarray(), X2.toarray())
assert_array_almost_equal(y1, y2)
assert_array_almost_equal(X1_mean, X2_mean)
assert_array_almost_equal(X1_var, X2_var)
assert_array_almost_equal(y1_mean, y2_mean)
| bsd-3-clause |
wisonwang/django-lfs | lfs/manage/templatetags/lfs_manage_tags.py | 4 | 2940 | # django imports
from django import template
from django.template import RequestContext
from django.template.loader import render_to_string
# lfs imports
from lfs.catalog.models import Category
from lfs.manufacturer.models import Manufacturer
register = template.Library()
@register.inclusion_tag('manage/category/category_filter.html', takes_context=True)
def category_filter(context, css_class="", name="category"):
"""Returns the categories of the shop for management purposes.
The css_class attribute is used for different ajax based requests in
different views.
"""
request = context.get("request")
selected = request.session.get("product_filters", {}).get("category")
categories = []
for category in Category.objects.filter(parent=None):
children = category_filter_children(request, category, name)
categories.append({
"id": category.id,
"name": category.name,
"children": children,
"selected": str(category.id) == selected,
})
result = {"categories": categories, "css_class": css_class, "name": name, "selected": selected}
return result
# NOTE: The reason why not to use another inclusion_tag is that the request is
# not available within an inclusion_tag if one inclusion_tag is called by
# another. (Don't know why yet.)
def category_filter_children(request, category, name="category_filter", level=1):
"""Returns the children of the given category as HTML.
"""
categories = []
for category in category.category_set.all():
children = category_filter_children(request, category, name, level + 1)
categories.append({
"id": category.id,
"name": "%s%s" % (" " * level * 5, category.name),
"children": children,
"level": level,
"selected": str(category.id) == request.session.get("product_filters", {}).get("category")
})
result = render_to_string("manage/category/category_filter_children.html", RequestContext(request, {
"categories": categories
}))
return result
@register.inclusion_tag('manage/manufacturers/manufacturer_filter.html', takes_context=True)
def manufacturer_filter(context, css_class="", name="manufacturer"):
"""Returns the categories of the shop for management purposes.
The css_class attribute is used for different ajax based requests in
different views.
"""
request = context.get("request")
selected = request.session.get("product_filters", {}).get("manufacturer")
manufacturers = []
for manufacturer in Manufacturer.objects.all():
manufacturers.append({
"id": manufacturer.id,
"name": manufacturer.name,
"selected": str(manufacturer.id) == selected,
})
result = {"manufacturers": manufacturers, "css_class": css_class, "name": name, "selected": selected}
return result | bsd-3-clause |
worldcompany/djangoembed | oembed/parsers/html.py | 4 | 2313 | import re
from BeautifulSoup import BeautifulSoup # use BS to parse HTML (it's easy!)
import oembed
from oembed.constants import OEMBED_BLOCK_ELEMENTS, URL_RE, STANDALONE_URL_RE
from oembed.exceptions import OEmbedException
from oembed.parsers.base import BaseParser
from oembed.parsers.text import TextParser, TextBlockParser
class HTMLParser(BaseParser):
"""
Use BeautifulSoup for easy html processing.
"""
def parse_data(self, text, maxwidth, maxheight, template_dir, context,
urlize_all_links):
block_parser = TextBlockParser()
original_template_dir = template_dir
soup = BeautifulSoup(text)
for user_url in soup.findAll(text=re.compile(URL_RE)):
if not self.inside_a(user_url):
if self.is_standalone(user_url):
template_dir = original_template_dir
else:
template_dir = 'inline'
replacement = block_parser.parse(
str(user_url),
maxwidth,
maxheight,
template_dir,
context,
urlize_all_links
)
user_url.replaceWith(replacement)
return unicode(soup)
def is_standalone(self, soupie):
if re.match(STANDALONE_URL_RE, soupie):
if soupie.parent.name in OEMBED_BLOCK_ELEMENTS:
return True
return False
def inside_a(self, soupie):
parent = soupie.parent
while parent is not None:
if parent.name == 'a':
return True
parent = parent.parent
return False
def extract_urls(self, text):
block_parser = TextBlockParser()
soup = BeautifulSoup(text)
urls = set()
url_list = []
for user_url in soup.findAll(text=re.compile(URL_RE)):
if not self.inside_a(user_url):
block_urls = block_parser.extract_urls(unicode(user_url))
for url in block_urls:
if url not in urls:
url_list.append(url)
urls.add(url)
return url_list
| mit |
gg7/diamond | src/collectors/http/test/testhttp.py | 31 | 1545 | #!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from http import HttpCollector
##########################################################################
class TestHttpCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('HttpCollector', {
'req_vhost': 'www.my_server.com',
'req_url': ['http://www.my_server.com/']
})
self.collector = HttpCollector(config, None)
def test_import(self):
self.assertTrue(HttpCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('index')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'http__www_my_server_com_.size': 150,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany([publish_mock,
], metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| mit |
svetlyak40wt/pymongo-bongo | test/test_attributed.py | 1 | 2252 | # This code is licensed under the New BSD License
# 2009, Alexander Artemenko <svetlyak.40wt@gmail.com>
# For other contacts, visit http://aartemenko.com
import unittest
from mongobongo.attributed import AttributedDict, \
AttributedList
class AttrDictTests(unittest.TestCase):
def test_attr_dict_just_a_proxy(self):
d = {'blah': 'minor', 'one': 1}
a = AttributedDict(d)
self.assertEqual('minor', a.blah)
self.assertEqual(1, a.one)
a.blah = 'shvah'
self.assertEqual('shvah', a.blah)
self.assertEqual('shvah', d['blah'])
def test_attr_dict_returns_inner_dicts_as_proxies(self):
d = {'ddd': {'b': 2, 'c': 3}}
a = AttributedDict(d)
a.ddd.b = 5
self.assertEqual(5, a.ddd.b)
self.assertEqual(5, d['ddd']['b'])
def test_attr_dict_returns_inner_lists_as_proxies(self):
d = {'ddd': [{'b': 2}, {'c': 3}]}
a = AttributedDict(d)
a.ddd[0].b = 5
self.assertEqual(5, a.ddd[0].b)
self.assertEqual(5, d['ddd'][0]['b'])
def test_supports_iteration_by_keys(self):
d = {'ddd': {'b': 2}}
a = AttributedDict(d)
self.assertEqual('ddd', iter(d).next())
self.assertEqual('ddd', iter(a).next())
def test_supports_iteration_by_items(self):
d = {'ddd': {'b': 2}}
a = AttributedDict(d)
self.assertEqual(2, d.iteritems().next()[1]['b'])
self.assertEqual(2, a.iteritems().next()[1].b)
class AttrListTests(unittest.TestCase):
def test_attr_list_just_a_proxy(self):
l = [{'b': 2}]
al = AttributedList(l)
self.assertEqual(2, al[0].b)
al[0].b = 123
self.assertEqual(123, al[0].b)
def test_attr_list_returns_other_lists_as_proxies(self):
l = [[{'b': 2}]]
al = AttributedList(l)
self.assertEqual(2, al[0][0].b)
al[0][0].b = 123
self.assertEqual(123, al[0][0].b)
def test_have_eq(self):
l = [[{'b': 2}]]
al = AttributedList(l)
self.assertEqual(l, al)
def test_supports_iteration(self):
l = [{'b': 2}]
al = AttributedList(l)
self.assertEqual(2, iter(al).next().b)
| bsd-3-clause |
andreaslarssonublox/mbed | tools/export/atmelstudio.py | 4 | 2539 | """
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import uuid
from exporters import Exporter
from os.path import splitext, basename, dirname
class AtmelStudio(Exporter):
NAME = 'AtmelStudio'
TOOLCHAIN = 'GCC_ARM'
TARGETS = [
'SAMD21J18A',
'SAMR21G18A',
'SAMD21G18A',
'SAML21J18A',
'SAMG55J19',
]
DOT_IN_RELATIVE_PATH = True
MBED_CONFIG_HEADER_SUPPORTED = True
def generate(self):
source_files = []
dirs = []
for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
r = getattr(self.resources, r_type)
if r:
for source in r:
source_files.append(source[2:])
dirs.append(dirname(source[2:]))
source_folders = []
for e in dirs:
if e and e not in source_folders:
source_folders.append(e)
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
solution_uuid = '{' + str(uuid.uuid4()) + '}'
project_uuid = '{' + str(uuid.uuid4()) + '}'
ctx = {
'target': self.target,
'name': self.project_name,
'source_files': source_files,
'source_folders': source_folders,
'object_files': self.resources.objects,
'include_paths': self.resources.inc_dirs,
'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script,
'libraries': libraries,
'symbols': self.toolchain.get_symbols(),
'solution_uuid': solution_uuid.upper(),
'project_uuid': project_uuid.upper()
}
ctx.update(self.flags)
target = self.target.lower()
self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.project_name)
self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.project_name)
| apache-2.0 |
josesanch/django-oscar | src/oscar/apps/address/south_migrations/0012_auto__del_index_country_iso_3166_1_a3__chg_field_country_iso_3166_1_nu.py | 6 | 7643 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing index on 'Country', fields ['iso_3166_1_a3']
db.delete_index(u'address_country', ['iso_3166_1_a3'])
# Changing field 'Country.iso_3166_1_numeric'
db.alter_column(u'address_country', 'iso_3166_1_numeric', self.gf('django.db.models.fields.CharField')(default='', max_length=3))
# Removing index on 'Country', fields ['iso_3166_1_numeric']
db.delete_index(u'address_country', ['iso_3166_1_numeric'])
def backwards(self, orm):
# Adding index on 'Country', fields ['iso_3166_1_numeric']
db.create_index(u'address_country', ['iso_3166_1_numeric'])
# Adding index on 'Country', fields ['iso_3166_1_a3']
db.create_index(u'address_country', ['iso_3166_1_a3'])
# Changing field 'Country.iso_3166_1_numeric'
db.alter_column(u'address_country', 'iso_3166_1_numeric', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True))
models = {
u'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'address.useraddress': {
'Meta': {'ordering': "['-num_orders']", 'unique_together': "(('user', 'hash'),)", 'object_name': 'UserAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default_for_billing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_default_for_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'num_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'phone_number': ('oscar.models.fields.PhoneNumberField', [], {'max_length': '128', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'blank': 'True'}),
'search_text': ('django.db.models.fields.TextField', [], {}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'addresses'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['address'] | bsd-3-clause |
jlspyaozhongkai/Uter | third_party_backup/Python-2.7.9/Tools/scripts/pysource.py | 94 | 3846 | #!/usr/bin/env python
"""\
List python source files.
There are three functions to check whether a file is a Python source, listed
here with increasing complexity:
- has_python_ext() checks whether a file name ends in '.py[w]'.
- look_like_python() checks whether the file is not binary and either has
the '.py[w]' extension or the first line contains the word 'python'.
- can_be_compiled() checks whether the file can be compiled by compile().
The file also must be of appropriate size - not bigger than a megabyte.
walk_python_files() recursively lists all Python files under the given directories.
"""
__author__ = "Oleg Broytmann, Georg Brandl"
__all__ = ["has_python_ext", "looks_like_python", "can_be_compiled", "walk_python_files"]
import os, re
binary_re = re.compile('[\x00-\x08\x0E-\x1F\x7F]')
debug = False
def print_debug(msg):
if debug: print msg
def _open(fullpath):
try:
size = os.stat(fullpath).st_size
except OSError, err: # Permission denied - ignore the file
print_debug("%s: permission denied: %s" % (fullpath, err))
return None
if size > 1024*1024: # too big
print_debug("%s: the file is too big: %d bytes" % (fullpath, size))
return None
try:
return open(fullpath, 'rU')
except IOError, err: # Access denied, or a special file - ignore it
print_debug("%s: access denied: %s" % (fullpath, err))
return None
def has_python_ext(fullpath):
return fullpath.endswith(".py") or fullpath.endswith(".pyw")
def looks_like_python(fullpath):
infile = _open(fullpath)
if infile is None:
return False
line = infile.readline()
infile.close()
if binary_re.search(line):
# file appears to be binary
print_debug("%s: appears to be binary" % fullpath)
return False
if fullpath.endswith(".py") or fullpath.endswith(".pyw"):
return True
elif "python" in line:
# disguised Python script (e.g. CGI)
return True
return False
def can_be_compiled(fullpath):
infile = _open(fullpath)
if infile is None:
return False
code = infile.read()
infile.close()
try:
compile(code, fullpath, "exec")
except Exception, err:
print_debug("%s: cannot compile: %s" % (fullpath, err))
return False
return True
def walk_python_files(paths, is_python=looks_like_python, exclude_dirs=None):
"""\
Recursively yield all Python source files below the given paths.
paths: a list of files and/or directories to be checked.
is_python: a function that takes a file name and checks whether it is a
Python source file
exclude_dirs: a list of directory base names that should be excluded in
the search
"""
if exclude_dirs is None:
exclude_dirs=[]
for path in paths:
print_debug("testing: %s" % path)
if os.path.isfile(path):
if is_python(path):
yield path
elif os.path.isdir(path):
print_debug(" it is a directory")
for dirpath, dirnames, filenames in os.walk(path):
for exclude in exclude_dirs:
if exclude in dirnames:
dirnames.remove(exclude)
for filename in filenames:
fullpath = os.path.join(dirpath, filename)
print_debug("testing: %s" % fullpath)
if is_python(fullpath):
yield fullpath
else:
print_debug(" unknown type")
if __name__ == "__main__":
# Two simple examples/tests
for fullpath in walk_python_files(['.']):
print fullpath
print "----------"
for fullpath in walk_python_files(['.'], is_python=can_be_compiled):
print fullpath
| gpl-3.0 |
NamelessRom/android_kernel_lge_p880 | tools/perf/python/twatch.py | 3213 | 1338 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, sample_period = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
elijah513/django | tests/one_to_one/tests.py | 21 | 19217 | from __future__ import unicode_literals
from django.db import IntegrityError, connection, transaction
from django.test import TestCase
from .models import (
Bar, Director, Favorites, HiddenPointer, ManualPrimaryKey, MultiModel,
Place, RelatedModel, Restaurant, School, Target, UndergroundBar, Waiter,
)
class OneToOneTests(TestCase):
def setUp(self):
self.p1 = Place.objects.create(name='Demon Dogs', address='944 W. Fullerton')
self.p2 = Place.objects.create(name='Ace Hardware', address='1013 N. Ashland')
self.r1 = Restaurant.objects.create(place=self.p1, serves_hot_dogs=True, serves_pizza=False)
self.b1 = Bar.objects.create(place=self.p1, serves_cocktails=False)
def test_getter(self):
# A Restaurant can access its place.
self.assertEqual(repr(self.r1.place), '<Place: Demon Dogs the place>')
# A Place can access its restaurant, if available.
self.assertEqual(repr(self.p1.restaurant), '<Restaurant: Demon Dogs the restaurant>')
# p2 doesn't have an associated restaurant.
with self.assertRaisesMessage(Restaurant.DoesNotExist, 'Place has no restaurant'):
self.p2.restaurant
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(self.p2, 'restaurant'))
def test_setter(self):
# Set the place using assignment notation. Because place is the primary
# key on Restaurant, the save will create a new restaurant
self.r1.place = self.p2
self.r1.save()
self.assertEqual(repr(self.p2.restaurant), '<Restaurant: Ace Hardware the restaurant>')
self.assertEqual(repr(self.r1.place), '<Place: Ace Hardware the place>')
self.assertEqual(self.p2.pk, self.r1.pk)
# Set the place back again, using assignment in the reverse direction.
self.p1.restaurant = self.r1
self.assertEqual(repr(self.p1.restaurant), '<Restaurant: Demon Dogs the restaurant>')
r = Restaurant.objects.get(pk=self.p1.id)
self.assertEqual(repr(r.place), '<Place: Demon Dogs the place>')
def test_manager_all(self):
# Restaurant.objects.all() just returns the Restaurants, not the Places.
self.assertQuerysetEqual(Restaurant.objects.all(), [
'<Restaurant: Demon Dogs the restaurant>',
])
# Place.objects.all() returns all Places, regardless of whether they
# have Restaurants.
self.assertQuerysetEqual(Place.objects.order_by('name'), [
'<Place: Ace Hardware the place>',
'<Place: Demon Dogs the place>',
])
def test_manager_get(self):
def assert_get_restaurant(**params):
self.assertEqual(repr(Restaurant.objects.get(**params)),
'<Restaurant: Demon Dogs the restaurant>')
assert_get_restaurant(place__id__exact=self.p1.pk)
assert_get_restaurant(place__id=self.p1.pk)
assert_get_restaurant(place__exact=self.p1.pk)
assert_get_restaurant(place__exact=self.p1)
assert_get_restaurant(place=self.p1.pk)
assert_get_restaurant(place=self.p1)
assert_get_restaurant(pk=self.p1.pk)
assert_get_restaurant(place__pk__exact=self.p1.pk)
assert_get_restaurant(place__pk=self.p1.pk)
assert_get_restaurant(place__name__startswith="Demon")
def assert_get_place(**params):
self.assertEqual(repr(Place.objects.get(**params)),
'<Place: Demon Dogs the place>')
assert_get_place(restaurant__place__exact=self.p1.pk)
assert_get_place(restaurant__place__exact=self.p1)
assert_get_place(restaurant__place__pk=self.p1.pk)
assert_get_place(restaurant__exact=self.p1.pk)
assert_get_place(restaurant__exact=self.r1)
assert_get_place(restaurant__pk=self.p1.pk)
assert_get_place(restaurant=self.p1.pk)
assert_get_place(restaurant=self.r1)
assert_get_place(id__exact=self.p1.pk)
assert_get_place(pk=self.p1.pk)
def test_foreign_key(self):
# Add a Waiter to the Restaurant.
w = self.r1.waiter_set.create(name='Joe')
self.assertEqual(repr(w), '<Waiter: Joe the waiter at Demon Dogs the restaurant>')
# Query the waiters
def assert_filter_waiters(**params):
self.assertQuerysetEqual(Waiter.objects.filter(**params), [
'<Waiter: Joe the waiter at Demon Dogs the restaurant>'
])
assert_filter_waiters(restaurant__place__exact=self.p1.pk)
assert_filter_waiters(restaurant__place__exact=self.p1)
assert_filter_waiters(restaurant__place__pk=self.p1.pk)
assert_filter_waiters(restaurant__exact=self.r1.pk)
assert_filter_waiters(restaurant__exact=self.r1)
assert_filter_waiters(restaurant__pk=self.r1.pk)
assert_filter_waiters(restaurant=self.r1.pk)
assert_filter_waiters(restaurant=self.r1)
assert_filter_waiters(id__exact=w.pk)
assert_filter_waiters(pk=w.pk)
# Delete the restaurant; the waiter should also be removed
r = Restaurant.objects.get(pk=self.r1.pk)
r.delete()
self.assertEqual(Waiter.objects.count(), 0)
def test_multiple_o2o(self):
# One-to-one fields still work if you create your own primary key
o1 = ManualPrimaryKey(primary_key="abc123", name="primary")
o1.save()
o2 = RelatedModel(link=o1, name="secondary")
o2.save()
# You can have multiple one-to-one fields on a model, too.
x1 = MultiModel(link1=self.p1, link2=o1, name="x1")
x1.save()
self.assertEqual(repr(o1.multimodel), '<MultiModel: Multimodel x1>')
# This will fail because each one-to-one field must be unique (and
# link2=o1 was used for x1, above).
mm = MultiModel(link1=self.p2, link2=o1, name="x1")
with self.assertRaises(IntegrityError):
with transaction.atomic():
mm.save()
def test_unsaved_object(self):
"""
#10811 -- Assigning an unsaved object to a OneToOneField
should raise an exception.
"""
place = Place(name='User', address='London')
msg = "save() prohibited to prevent data loss due to unsaved related object 'place'."
with self.assertRaisesMessage(ValueError, msg):
Restaurant.objects.create(place=place, serves_hot_dogs=True, serves_pizza=False)
def test_reverse_relationship_cache_cascade(self):
"""
Regression test for #9023: accessing the reverse relationship shouldn't
result in a cascading delete().
"""
bar = UndergroundBar.objects.create(place=self.p1, serves_cocktails=False)
# The bug in #9023: if you access the one-to-one relation *before*
# setting to None and deleting, the cascade happens anyway.
self.p1.undergroundbar
bar.place.name = 'foo'
bar.place = None
bar.save()
self.p1.delete()
self.assertEqual(Place.objects.all().count(), 1)
self.assertEqual(UndergroundBar.objects.all().count(), 1)
def test_create_models_m2m(self):
"""
Regression test for #1064 and #1506
Check that we create models via the m2m relation if the remote model
has a OneToOneField.
"""
f = Favorites(name='Fred')
f.save()
f.restaurants = [self.r1]
self.assertQuerysetEqual(
f.restaurants.all(),
['<Restaurant: Demon Dogs the restaurant>']
)
def test_reverse_object_cache(self):
"""
Regression test for #7173
Check that the name of the cache for the reverse object is correct.
"""
self.assertEqual(self.p1.restaurant, self.r1)
self.assertEqual(self.p1.bar, self.b1)
def test_related_object_cache(self):
""" Regression test for #6886 (the related-object cache) """
# Look up the objects again so that we get "fresh" objects
p = Place.objects.get(name="Demon Dogs")
r = p.restaurant
# Accessing the related object again returns the exactly same object
self.assertIs(p.restaurant, r)
# But if we kill the cache, we get a new object
del p._restaurant_cache
self.assertIsNot(p.restaurant, r)
# Reassigning the Restaurant object results in an immediate cache update
# We can't use a new Restaurant because that'll violate one-to-one, but
# with a new *instance* the is test below will fail if #6886 regresses.
r2 = Restaurant.objects.get(pk=r.pk)
p.restaurant = r2
self.assertIs(p.restaurant, r2)
# Assigning None succeeds if field is null=True.
ug_bar = UndergroundBar.objects.create(place=p, serves_cocktails=False)
ug_bar.place = None
self.assertIsNone(ug_bar.place)
# Assigning None fails: Place.restaurant is null=False
self.assertRaises(ValueError, setattr, p, 'restaurant', None)
# You also can't assign an object of the wrong type here
self.assertRaises(ValueError, setattr, p, 'restaurant', p)
# Creation using keyword argument should cache the related object.
p = Place.objects.get(name="Demon Dogs")
r = Restaurant(place=p)
self.assertIs(r.place, p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Place()
r = Restaurant(place=p)
self.assertTrue(r.place is p)
# Creation using attname keyword argument and an id will cause the related
# object to be fetched.
p = Place.objects.get(name="Demon Dogs")
r = Restaurant(place_id=p.id)
self.assertIsNot(r.place, p)
self.assertEqual(r.place, p)
def test_filter_one_to_one_relations(self):
"""
Regression test for #9968
filtering reverse one-to-one relations with primary_key=True was
misbehaving. We test both (primary_key=True & False) cases here to
prevent any reappearance of the problem.
"""
Target.objects.create()
self.assertQuerysetEqual(
Target.objects.filter(pointer=None),
['<Target: Target object>']
)
self.assertQuerysetEqual(
Target.objects.exclude(pointer=None),
[]
)
self.assertQuerysetEqual(
Target.objects.filter(second_pointer=None),
['<Target: Target object>']
)
self.assertQuerysetEqual(
Target.objects.exclude(second_pointer=None),
[]
)
def test_reverse_object_does_not_exist_cache(self):
"""
Regression for #13839 and #17439.
DoesNotExist on a reverse one-to-one relation is cached.
"""
p = Place(name='Zombie Cats', address='Not sure')
p.save()
with self.assertNumQueries(1):
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
with self.assertNumQueries(0):
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
def test_reverse_object_cached_when_related_is_accessed(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is cached
when the origin is accessed through the reverse relation.
"""
# Use a fresh object without caches
r = Restaurant.objects.get(pk=self.r1.pk)
p = r.place
with self.assertNumQueries(0):
self.assertEqual(p.restaurant, r)
def test_related_object_cached_when_reverse_is_accessed(self):
"""
Regression for #13839 and #17439.
The origin of a one-to-one relation is cached
when the target is accessed through the reverse relation.
"""
# Use a fresh object without caches
p = Place.objects.get(pk=self.p1.pk)
r = p.restaurant
with self.assertNumQueries(0):
self.assertEqual(r.place, p)
def test_reverse_object_cached_when_related_is_set(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is always cached.
"""
p = Place(name='Zombie Cats', address='Not sure')
p.save()
self.r1.place = p
self.r1.save()
with self.assertNumQueries(0):
self.assertEqual(p.restaurant, self.r1)
def test_reverse_object_cached_when_related_is_unset(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is always cached.
"""
b = UndergroundBar(place=self.p1, serves_cocktails=True)
b.save()
with self.assertNumQueries(0):
self.assertEqual(self.p1.undergroundbar, b)
b.place = None
b.save()
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
self.p1.undergroundbar
def test_get_reverse_on_unsaved_object(self):
"""
Regression for #18153 and #19089.
Accessing the reverse relation on an unsaved object
always raises an exception.
"""
p = Place()
# When there's no instance of the origin of the one-to-one
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
UndergroundBar.objects.create()
# When there's one instance of the origin
# (p.undergroundbar used to return that instance)
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
# Several instances of the origin are only possible if database allows
# inserting multiple NULL rows for a unique constraint
if connection.features.supports_nullable_unique_constraints:
UndergroundBar.objects.create()
# When there are several instances of the origin
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
def test_set_reverse_on_unsaved_object(self):
"""
Writing to the reverse relation on an unsaved object
is impossible too.
"""
p = Place()
b = UndergroundBar.objects.create()
msg = (
'Cannot assign "<UndergroundBar: UndergroundBar object>": "Place" '
'instance isn\'t saved in the database.'
)
with self.assertNumQueries(0):
with self.assertRaisesMessage(ValueError, msg):
p.undergroundbar = b
def test_nullable_o2o_delete(self):
u = UndergroundBar.objects.create(place=self.p1)
u.place_id = None
u.save()
self.p1.delete()
self.assertTrue(UndergroundBar.objects.filter(pk=u.pk).exists())
self.assertIsNone(UndergroundBar.objects.get(pk=u.pk).place)
def test_hidden_accessor(self):
"""
When a '+' ending related name is specified no reverse accessor should
be added to the related model.
"""
self.assertFalse(
hasattr(Target, HiddenPointer._meta.get_field('target').remote_field.get_accessor_name())
)
def test_related_object(self):
public_school = School.objects.create(is_public=True)
public_director = Director.objects.create(school=public_school, is_temp=False)
private_school = School.objects.create(is_public=False)
private_director = Director.objects.create(school=private_school, is_temp=True)
# Only one school is available via all() due to the custom default manager.
self.assertQuerysetEqual(
School.objects.all(),
["<School: School object>"]
)
# Only one director is available via all() due to the custom default manager.
self.assertQuerysetEqual(
Director.objects.all(),
["<Director: Director object>"]
)
self.assertEqual(public_director.school, public_school)
self.assertEqual(public_school.director, public_director)
# Make sure the base manager is used so that the related objects
# is still accessible even if the default manager doesn't normally
# allow it.
self.assertEqual(private_director.school, private_school)
# Make sure the base manager is used so that an student can still access
# its related school even if the default manager doesn't normally
# allow it.
self.assertEqual(private_school.director, private_director)
# If the manager is marked "use_for_related_fields", it'll get used instead
# of the "bare" queryset. Usually you'd define this as a property on the class,
# but this approximates that in a way that's easier in tests.
School.objects.use_for_related_fields = True
try:
private_director = Director._base_manager.get(pk=private_director.pk)
self.assertRaises(School.DoesNotExist, lambda: private_director.school)
finally:
School.objects.use_for_related_fields = False
Director.objects.use_for_related_fields = True
try:
private_school = School._base_manager.get(pk=private_school.pk)
self.assertRaises(Director.DoesNotExist, lambda: private_school.director)
finally:
Director.objects.use_for_related_fields = False
def test_hasattr_related_object(self):
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(Director(), 'director'))
self.assertFalse(hasattr(School(), 'school'))
def test_update_one_to_one_pk(self):
p1 = Place.objects.create()
p2 = Place.objects.create()
r1 = Restaurant.objects.create(place=p1)
r2 = Restaurant.objects.create(place=p2)
w = Waiter.objects.create(restaurant=r1)
Waiter.objects.update(restaurant=r2)
w.refresh_from_db()
self.assertEqual(w.restaurant, r2)
def test_rel_pk_subquery(self):
r = Restaurant.objects.first()
q1 = Restaurant.objects.filter(place_id=r.pk)
# Test that subquery using primary key and a query against the
# same model works correctly.
q2 = Restaurant.objects.filter(place_id__in=q1)
self.assertQuerysetEqual(q2, [r], lambda x: x)
# Test that subquery using 'pk__in' instead of 'place_id__in' work, too.
q2 = Restaurant.objects.filter(
pk__in=Restaurant.objects.filter(place__id=r.place.pk)
)
self.assertQuerysetEqual(q2, [r], lambda x: x)
def test_rel_pk_exact(self):
r = Restaurant.objects.first()
r2 = Restaurant.objects.filter(pk__exact=r).first()
self.assertEqual(r, r2)
| bsd-3-clause |
cuilishen/cuilishenMissionPlanner | Lib/dummy_threading.py | 321 | 2804 | """Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
The module ``_dummy_threading`` is added to ``sys.modules`` in order
to not have ``threading`` considered imported. Had ``threading`` been
directly imported it would have made all subsequent imports succeed
regardless of whether ``thread`` was available which is not desired.
"""
from sys import modules as sys_modules
import dummy_thread
# Declaring now so as to not have to nest ``try``s to get proper clean-up.
holding_thread = False
holding_threading = False
holding__threading_local = False
try:
# Could have checked if ``thread`` was not in sys.modules and gone
# a different route, but decided to mirror technique used with
# ``threading`` below.
if 'thread' in sys_modules:
held_thread = sys_modules['thread']
holding_thread = True
# Must have some module named ``thread`` that implements its API
# in order to initially import ``threading``.
sys_modules['thread'] = sys_modules['dummy_thread']
if 'threading' in sys_modules:
# If ``threading`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held_threading = sys_modules['threading']
holding_threading = True
del sys_modules['threading']
if '_threading_local' in sys_modules:
# If ``_threading_local`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held__threading_local = sys_modules['_threading_local']
holding__threading_local = True
del sys_modules['_threading_local']
import threading
# Need a copy of the code kept somewhere...
sys_modules['_dummy_threading'] = sys_modules['threading']
del sys_modules['threading']
sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
del sys_modules['_threading_local']
from _dummy_threading import *
from _dummy_threading import __all__
finally:
# Put back ``threading`` if we overwrote earlier
if holding_threading:
sys_modules['threading'] = held_threading
del held_threading
del holding_threading
# Put back ``_threading_local`` if we overwrote earlier
if holding__threading_local:
sys_modules['_threading_local'] = held__threading_local
del held__threading_local
del holding__threading_local
# Put back ``thread`` if we overwrote, else del the entry we made
if holding_thread:
sys_modules['thread'] = held_thread
del held_thread
else:
del sys_modules['thread']
del holding_thread
del dummy_thread
del sys_modules
| gpl-3.0 |
stanchan/jenkins-job-builder | tests/duplicates/test_duplicates.py | 23 | 1543 | # Joint copyright:
# - Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from testtools import TestCase, ExpectedException
from testscenarios.testcase import TestWithScenarios
from jenkins_jobs.errors import JenkinsJobsException
from tests.base import SingleJobTestCase
from tests.base import get_scenarios
from tests.base import mock
class TestCaseModuleDuplicates(TestWithScenarios, TestCase,
SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path)
@mock.patch('jenkins_jobs.builder.logger', autospec=True)
def test_yaml_snippet(self, mock_logger):
if os.path.basename(self.in_filename).startswith("exception_"):
with ExpectedException(JenkinsJobsException, "^Duplicate .*"):
super(TestCaseModuleDuplicates, self).test_yaml_snippet()
else:
super(TestCaseModuleDuplicates, self).test_yaml_snippet()
| apache-2.0 |
qitan/SOMS | deploy/templatetags/myinclusion.py | 1 | 3599 | #!/usr/bin/env python
# coding: utf8
'''
@author: qitan
@contact: qqing_lai@hotmail.com
@file: myinclusion.py
@time: 2017/3/30 15:32
@desc:
'''
from django import template
from django.db.models import Q
from deploy.models import *
from userauth.models import *
register = template.Library()
def show_single_minions(pk, user_type):
'''
文件回滚中单项显示主机列表
'''
tgt_list = []
if user_type:
tgt_list = [i['hostname'] for i in SaltHost.objects.filter(status=True).values('hostname')]
else:
tgt_list = [i['hostname'] for d in User.objects.get(pk=pk).department.all() for i in
d.host_department_set.values('hostname')]
return {'tgt_list': sorted(list(set(tgt_list)))}
register.inclusion_tag('tag_single_minions.html')(show_single_minions)
def show_groups(pk, user_type):
'''
远程命令、模块部署及文件管理中显示所有分组
'''
group_dict = {}
if user_type:
group_dict = {i['groupname']:i['nickname'] for i in SaltGroup.objects.values('groupname', 'nickname')}
else:
d = User.objects.get(pk=pk).department
group_dict = {i['groupname']:i['nickname'] for d in User.objects.get(pk=pk).department.all()
for i in d.saltgroup_department_set.values('groupname', 'nickname')}
return {'group_dict':sorted(list(set(group_dict.items())))}
register.inclusion_tag('tag_user_departments.html')(show_groups)
def show_modules(u, user_type):
'''
模块部署中显示所有模块
'''
if user_type:
module_list = ModuleUpload.objects.all()
else:
# 获取用户创建或公开模块
module_visible_list = [{'pk': i.pk, 'name': i.name, 'module': i.module, 'remark': i.remark}
for i in ModuleUpload.objects.filter(Q(user=u) | Q(visible=2))]
# 获取用户组模块
module_user_group_list = [{'pk': i.pk, 'name': i.name, 'module': i.module, 'remark': i.remark}
for g in User.objects.get(pk=u.pk).group.all() for i in
ModuleUpload.objects.filter(user_group=g)]
# 合并list
module_list = module_visible_list + [i for i in module_user_group_list if i not in module_visible_list]
return {'module_list': module_list}
register.inclusion_tag('tag_modules.html')(show_modules)
def show_user_group_minions(pk, user_type, list_type):
'''
远程命令、模块部署及文件上传中显示主机列表
'''
if user_type:
tgt_list = [i['hostname'] for i in SaltHost.objects.filter(status=True).values('hostname')]
else:
tgt_list = [i['hostname'] for g in User.objects.get(pk=pk).group.all() for i in
SaltHost.objects.filter(user_group=g).values('hostname')]
return {'tgt_list':sorted(list(set(tgt_list))), 'list_type':list_type}
register.inclusion_tag('tag_user_group_minions.html')(show_user_group_minions)
def show_user_group_groups(pk, user_type):
'''
远程命令、模块部署及文件管理中显示用户分组
'''
group_dict = {}
if user_type:
group_dict = {i['id']:i['nickname'] for i in SaltGroup.objects.values('id', 'nickname')}
else:
group_dict = {i['id']:i['nickname'] for g in User.objects.get(pk=pk).group.all()
for i in SaltGroup.objects.filter(user_group=g).values('id', 'nickname')}
return {'group_dict':sorted(list(set(group_dict.items())))}
register.inclusion_tag('tag_user_group_groups.html')(show_user_group_groups)
| gpl-3.0 |
zhaodelong/django | django/db/backends/base/introspection.py | 370 | 7363 | from collections import namedtuple
from django.utils import six
# Structure returned by DatabaseIntrospection.get_table_list()
TableInfo = namedtuple('TableInfo', ['name', 'type'])
# Structure returned by the DB-API cursor.description interface (PEP 249)
FieldInfo = namedtuple('FieldInfo',
'name type_code display_size internal_size precision scale null_ok')
class BaseDatabaseIntrospection(object):
"""
This class encapsulates all backend-specific introspection utilities
"""
data_types_reverse = {}
def __init__(self, connection):
self.connection = connection
def get_field_type(self, data_type, description):
"""Hook for a database backend to use the cursor description to
match a Django field type to a database column.
For Oracle, the column data_type on its own is insufficient to
distinguish between a FloatField and IntegerField, for example."""
return self.data_types_reverse[data_type]
def table_name_converter(self, name):
"""Apply a conversion to the name for the purposes of comparison.
The default table name converter is for case sensitive comparison.
"""
return name
def column_name_converter(self, name):
"""
Apply a conversion to the column name for the purposes of comparison.
Uses table_name_converter() by default.
"""
return self.table_name_converter(name)
def table_names(self, cursor=None, include_views=False):
"""
Returns a list of names of all tables that exist in the database.
The returned table list is sorted by Python's default sorting. We
do NOT use database's ORDER BY here to avoid subtle differences
in sorting order between databases.
"""
def get_names(cursor):
return sorted(ti.name for ti in self.get_table_list(cursor)
if include_views or ti.type == 't')
if cursor is None:
with self.connection.cursor() as cursor:
return get_names(cursor)
return get_names(cursor)
def get_table_list(self, cursor):
"""
Returns an unsorted list of TableInfo named tuples of all tables and
views that exist in the database.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method')
def django_table_names(self, only_existing=False, include_views=True):
"""
Returns a list of all table names that have associated Django models and
are in INSTALLED_APPS.
If only_existing is True, the resulting list will only include the tables
that actually exist in the database.
"""
from django.apps import apps
from django.db import router
tables = set()
for app_config in apps.get_app_configs():
for model in router.get_migratable_models(app_config, self.connection.alias):
if not model._meta.managed:
continue
tables.add(model._meta.db_table)
tables.update(f.m2m_db_table() for f in model._meta.local_many_to_many)
tables = list(tables)
if only_existing:
existing_tables = self.table_names(include_views=include_views)
tables = [
t
for t in tables
if self.table_name_converter(t) in existing_tables
]
return tables
def installed_models(self, tables):
"Returns a set of all models represented by the provided list of table names."
from django.apps import apps
from django.db import router
all_models = []
for app_config in apps.get_app_configs():
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
tables = list(map(self.table_name_converter, tables))
return {
m for m in all_models
if self.table_name_converter(m._meta.db_table) in tables
}
def sequence_list(self):
"Returns a list of information about all DB sequences for all models in all apps."
from django.apps import apps
from django.db import models, router
sequence_list = []
for app_config in apps.get_app_configs():
for model in router.get_migratable_models(app_config, self.connection.alias):
if not model._meta.managed:
continue
if model._meta.swapped:
continue
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
sequence_list.append({'table': model._meta.db_table, 'column': f.column})
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.local_many_to_many:
# If this is an m2m using an intermediate table,
# we don't need to reset the sequence.
if f.remote_field.through is None:
sequence_list.append({'table': f.m2m_db_table(), 'column': None})
return sequence_list
def get_key_columns(self, cursor, table_name):
"""
Backends can override this to return a list of (column_name, referenced_table_name,
referenced_column_name) for all key columns in given table.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method')
def get_primary_key_column(self, cursor, table_name):
"""
Returns the name of the primary key column for the given table.
"""
for column in six.iteritems(self.get_indexes(cursor, table_name)):
if column[1]['primary_key']:
return column[0]
return None
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of indexed fieldname -> infodict for the given
table, where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
Only single-column indexes are introspected.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_indexes() method')
def get_constraints(self, cursor, table_name):
"""
Retrieves any constraints or keys (unique, pk, fk, check, index)
across one or more columns.
Returns a dict mapping constraint names to their attributes,
where attributes is a dict with keys:
* columns: List of columns this covers
* primary_key: True if primary key, False otherwise
* unique: True if this is a unique constraint, False otherwise
* foreign_key: (table, column) of target, or None
* check: True if check constraint, False otherwise
* index: True if index, False otherwise.
Some backends may return special constraint names that don't exist
if they don't name constraints of a certain type (e.g. SQLite)
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_constraints() method')
| bsd-3-clause |
DolphinDream/sverchok | nodes/analyzer/bbox_mk3.py | 1 | 7158 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
from itertools import product
import bpy
from bpy.props import BoolVectorProperty, EnumProperty
from mathutils import Matrix
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import dataCorrect, updateNode
class SvBBoxNodeMk3(bpy.types.Node, SverchCustomTreeNode):
"""
Triggers: Bbox 2D or 3D
Tooltip: Get vertices bounding box (vertices, sizes, center)
"""
bl_idname = 'SvBBoxNodeMk3'
bl_label = 'Bounding box'
bl_icon = 'NONE'
sv_icon = 'SV_BOUNDING_BOX'
def update_sockets(self, context):
bools = [self.min_list, self.max_list, self.size_list]
dims = int(self.box_dimensions[0])
for i in range(3):
for j in range(3):
out_index = 4 + j + 3*i
hidden = self.outputs[out_index].hide_safe
if bools[i][j] and j < dims:
if hidden:
self.outputs[out_index].hide_safe = False
else:
self.outputs[out_index].hide_safe = True
updateNode(self, context)
min_list: BoolVectorProperty(
name='Min', description="Show Minimum values sockets", size=3, update=update_sockets)
max_list: BoolVectorProperty(
name='Max', description="Show Maximun values sockets", size=3, update=update_sockets)
size_list: BoolVectorProperty(
name='Size', description="Show Size values sockets", size=3, update=update_sockets)
implentation_modes = [
("2D", "2D", "Outputs Rectangle over XY plane", 0),
("3D", "3D", "Outputs standard bounding box", 1)]
box_dimensions: EnumProperty(
name='Implementation', items=implentation_modes,
description='Choose calculation method',
default="3D", update=update_sockets)
def draw_buttons(self, context, layout):
layout .prop(self, 'box_dimensions', expand=True)
col = layout.column(align=True)
titles = ["Min", "Max", "Size"]
prop = ['min_list', 'max_list', 'size_list']
dims = int(self.box_dimensions[0])
for i in range(3):
row = col.row(align=True)
row.label(text=titles[i])
row2 = row.row(align=True)
for j in range(dims):
row2 .prop(self, prop[i], index=j, text='XYZ'[j], toggle=True)
def sv_init(self, context):
son = self.outputs.new
self.inputs.new('SvVerticesSocket', 'Vertices')
son('SvVerticesSocket', 'Vertices')
son('SvStringsSocket', 'Edges')
son('SvVerticesSocket', 'Mean')
son('SvMatrixSocket', 'Center')
titles = ['Min', 'Max', 'Size']
for j in range(3):
for i in range(3):
son('SvStringsSocket', titles[j] + ' ' + 'XYZ'[i])
self.update_sockets(context)
def migrate_from(self, old_node):
self.box_dimensions = old_node.dimensions
def generate_matrix(self, maxmin, dims, to_2d):
center = [(u+v)*.5 for u, v in maxmin[:dims]]
scale = [(u-v) for u, v in maxmin[:dims]]
if to_2d:
center += [0]
scale += [1]
mat = Matrix.Translation(center)
for i, sca in enumerate(scale):
mat[i][i] = sca
return mat
def generate_mean(self, verts, dims, to_2d):
avr = list(map(sum, zip(*verts)))
avr = [n/len(verts) for n in avr[:dims]]
if to_2d:
avr += [0]
return [avr]
def process(self):
if not self.inputs['Vertices'].is_linked:
return
if not any(s.is_linked for s in self.outputs):
return
has_mat_out = bool(self.outputs['Center'].is_linked)
has_mean = bool(self.outputs['Mean'].is_linked)
has_vert_out = bool(self.outputs['Vertices'].is_linked)
verts = self.inputs['Vertices'].sv_get(deepcopy=False)
verts = dataCorrect(verts, nominal_dept=2)
has_limits = any(s.is_linked for s in self.outputs[4:])
if verts:
verts_out = []
edges_out = []
edges = [
(0, 1), (1, 3), (3, 2), (2, 0), # bottom edges
(4, 5), (5, 7), (7, 6), (6, 4), # top edges
(0, 4), (1, 5), (2, 6), (3, 7) # sides
]
mat_out = []
mean_out = []
min_vals = [[], [], []]
max_vals = [[], [], []]
size_vals = [[], [], []]
to_2d = self.box_dimensions == '2D'
dims = int(self.box_dimensions[0])
for vec in verts:
if has_mat_out or has_vert_out or has_limits:
maxmin = list(zip(map(max, *vec), map(min, *vec)))
if has_vert_out:
out = list(product(*reversed(maxmin)))
v_out = [l[::-1] for l in out[::-1]]
if to_2d:
verts_out.append([[v[0], v[1], 0] for v in v_out[:4]])
edges = edges[:4]
else:
verts_out.append(v_out)
edges_out.append(edges)
if has_mat_out:
mat_out.append(self.generate_matrix(maxmin, dims, to_2d))
if has_mean:
mean_out.append(self.generate_mean(vec, dims, to_2d))
if has_limits:
for i in range(dims):
min_vals[i].append([maxmin[i][1]])
max_vals[i].append([maxmin[i][0]])
size_vals[i].append([maxmin[i][0] - maxmin[i][1]])
if has_vert_out:
self.outputs['Vertices'].sv_set(verts_out)
if self.outputs['Edges'].is_linked:
self.outputs['Edges'].sv_set(edges_out)
if has_mean:
self.outputs['Mean'].sv_set(mean_out)
if self.outputs['Center'].is_linked:
self.outputs['Center'].sv_set(mat_out)
vals = [min_vals, max_vals, size_vals]
for j in range(3):
for i, socket in enumerate(self.outputs[4+3*j:7+3*j]):
if socket.is_linked:
socket.sv_set(vals[j][i])
def register():
bpy.utils.register_class(SvBBoxNodeMk3)
def unregister():
bpy.utils.unregister_class(SvBBoxNodeMk3)
| gpl-3.0 |
JohnWinter/ThinkStats2 | code/analytic.py | 69 | 6265 | """This file contains code used in "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2010 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function
import math
import numpy as np
import pandas
import nsfg
import thinkplot
import thinkstats2
def ParetoMedian(xmin, alpha):
"""Computes the median of a Pareto distribution."""
return xmin * pow(2, 1/alpha)
def MakeExpoCdf():
"""Generates a plot of the exponential CDF."""
thinkplot.PrePlot(3)
for lam in [2.0, 1, 0.5]:
xs, ps = thinkstats2.RenderExpoCdf(lam, 0, 3.0, 50)
label = r'$\lambda=%g$' % lam
thinkplot.Plot(xs, ps, label=label)
thinkplot.Save(root='analytic_expo_cdf',
title='Exponential CDF',
xlabel='x',
ylabel='CDF')
def ReadBabyBoom(filename='babyboom.dat'):
"""Reads the babyboom data.
filename: string
returns: DataFrame
"""
var_info = [
('time', 1, 8, int),
('sex', 9, 16, int),
('weight_g', 17, 24, int),
('minutes', 25, 32, int),
]
columns = ['name', 'start', 'end', 'type']
variables = pandas.DataFrame(var_info, columns=columns)
variables.end += 1
dct = thinkstats2.FixedWidthVariables(variables, index_base=1)
df = dct.ReadFixedWidth(filename, skiprows=59)
return df
def MakeBabyBoom():
"""Plot CDF of interarrival time on log and linear scales.
"""
# compute the interarrival times
df = ReadBabyBoom()
diffs = df.minutes.diff()
cdf = thinkstats2.Cdf(diffs, label='actual')
thinkplot.PrePlot(cols=2)
thinkplot.Cdf(cdf)
thinkplot.Config(xlabel='minutes',
ylabel='CDF',
legend=False)
thinkplot.SubPlot(2)
thinkplot.Cdf(cdf, complement=True)
thinkplot.Config(xlabel='minutes',
ylabel='CCDF',
yscale='log',
legend=False)
thinkplot.Save(root='analytic_interarrivals',
legend=False)
def MakeParetoCdf():
"""Generates a plot of the Pareto CDF."""
xmin = 0.5
thinkplot.PrePlot(3)
for alpha in [2.0, 1.0, 0.5]:
xs, ps = thinkstats2.RenderParetoCdf(xmin, alpha, 0, 10.0, n=100)
thinkplot.Plot(xs, ps, label=r'$\alpha=%g$' % alpha)
thinkplot.Save(root='analytic_pareto_cdf',
title='Pareto CDF',
xlabel='x',
ylabel='CDF')
def MakeParetoCdf2():
"""Generates a plot of the CDF of height in Pareto World."""
xmin = 100
alpha = 1.7
xs, ps = thinkstats2.RenderParetoCdf(xmin, alpha, 0, 1000.0, n=100)
thinkplot.Plot(xs, ps)
thinkplot.Save(root='analytic_pareto_height',
title='Pareto CDF',
xlabel='height (cm)',
ylabel='CDF',
legend=False)
def MakeNormalCdf():
"""Generates a plot of the normal CDF."""
thinkplot.PrePlot(3)
mus = [1.0, 2.0, 3.0]
sigmas = [0.5, 0.4, 0.3]
for mu, sigma in zip(mus, sigmas):
xs, ps = thinkstats2.RenderNormalCdf(mu=mu, sigma=sigma,
low=-1.0, high=4.0)
label = r'$\mu=%g$, $\sigma=%g$' % (mu, sigma)
thinkplot.Plot(xs, ps, label=label)
thinkplot.Save(root='analytic_normal_cdf',
title='Normal CDF',
xlabel='x',
ylabel='CDF',
loc=2)
def MakeNormalModel(weights):
"""Plot the CDF of birthweights with a normal model."""
# estimate parameters: trimming outliers yields a better fit
mu, var = thinkstats2.TrimmedMeanVar(weights, p=0.01)
print('Mean, Var', mu, var)
# plot the model
sigma = math.sqrt(var)
print('Sigma', sigma)
xs, ps = thinkstats2.RenderNormalCdf(mu, sigma, low=0, high=12.5)
thinkplot.Plot(xs, ps, label='model', color='0.8')
# plot the data
cdf = thinkstats2.Cdf(weights, label='data')
thinkplot.PrePlot(1)
thinkplot.Cdf(cdf)
thinkplot.Save(root='analytic_birthwgt_model',
title='Birth weights',
xlabel='birth weight (lbs)',
ylabel='CDF')
def MakeExampleNormalPlot():
"""Generates a sample normal probability plot.
"""
n = 1000
thinkplot.PrePlot(3)
mus = [0, 1, 5]
sigmas = [1, 1, 2]
for mu, sigma in zip(mus, sigmas):
sample = np.random.normal(mu, sigma, n)
xs, ys = thinkstats2.NormalProbability(sample)
label = '$\mu=%d$, $\sigma=%d$' % (mu, sigma)
thinkplot.Plot(xs, ys, label=label)
thinkplot.Save(root='analytic_normal_prob_example',
title='Normal probability plot',
xlabel='standard normal sample',
ylabel='sample values')
def MakeNormalPlot(weights, term_weights):
"""Generates a normal probability plot of birth weights."""
mean, var = thinkstats2.TrimmedMeanVar(weights, p=0.01)
std = math.sqrt(var)
xs = [-4, 4]
fxs, fys = thinkstats2.FitLine(xs, mean, std)
thinkplot.Plot(fxs, fys, linewidth=4, color='0.8')
thinkplot.PrePlot(2)
xs, ys = thinkstats2.NormalProbability(weights)
thinkplot.Plot(xs, ys, label='all live')
xs, ys = thinkstats2.NormalProbability(term_weights)
thinkplot.Plot(xs, ys, label='full term')
thinkplot.Save(root='analytic_birthwgt_normal',
title='Normal probability plot',
xlabel='Standard deviations from mean',
ylabel='Birth weight (lbs)')
def main():
thinkstats2.RandomSeed(18)
MakeExampleNormalPlot()
# make the analytic CDFs
MakeExpoCdf()
MakeBabyBoom()
MakeParetoCdf()
MakeParetoCdf2()
MakeNormalCdf()
# test the distribution of birth weights for normality
preg = nsfg.ReadFemPreg()
full_term = preg[preg.prglngth >= 37]
weights = preg.totalwgt_lb.dropna()
term_weights = full_term.totalwgt_lb.dropna()
MakeNormalModel(weights)
MakeNormalPlot(weights, term_weights)
if __name__ == "__main__":
main()
| gpl-3.0 |
hellhovnd/django | django/templatetags/cache.py | 118 | 2303 | from __future__ import unicode_literals
from django.core.cache.utils import make_template_fragment_key
from django.template import Library, Node, TemplateSyntaxError, VariableDoesNotExist
from django.core.cache import cache
register = Library()
class CacheNode(Node):
def __init__(self, nodelist, expire_time_var, fragment_name, vary_on):
self.nodelist = nodelist
self.expire_time_var = expire_time_var
self.fragment_name = fragment_name
self.vary_on = vary_on
def render(self, context):
try:
expire_time = self.expire_time_var.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var)
try:
expire_time = int(expire_time)
except (ValueError, TypeError):
raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time)
vary_on = [var.resolve(context) for var in self.vary_on]
cache_key = make_template_fragment_key(self.fragment_name, vary_on)
value = cache.get(cache_key)
if value is None:
value = self.nodelist.render(context)
cache.set(cache_key, value, expire_time)
return value
@register.tag('cache')
def do_cache(parser, token):
"""
This will cache the contents of a template fragment for a given amount
of time.
Usage::
{% load cache %}
{% cache [expire_time] [fragment_name] %}
.. some expensive processing ..
{% endcache %}
This tag also supports varying by a list of arguments::
{% load cache %}
{% cache [expire_time] [fragment_name] [var1] [var2] .. %}
.. some expensive processing ..
{% endcache %}
Each unique set of arguments will result in a unique cache entry.
"""
nodelist = parser.parse(('endcache',))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
return CacheNode(nodelist,
parser.compile_filter(tokens[1]),
tokens[2], # fragment_name can't be a variable.
[parser.compile_filter(token) for token in tokens[3:]])
| bsd-3-clause |
moylop260/odoo-dev | addons/base_report_designer/plugin/openerp_report_designer/bin/script/AddAttachment.py | 384 | 11148 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import os
import uno
import unohelper
import xmlrpclib
import base64
from com.sun.star.task import XJobExecutor
if __name__<>"package":
from lib.gui import *
from lib.error import ErrorDialog
from lib.tools import *
from LoginTest import *
from lib.rpc import *
database="test"
uid = 3
class AddAttachment(unohelper.Base, XJobExecutor ):
Kind = {
'PDF' : 'pdf',
'OpenOffice': 'sxw',
}
def __init__(self, ctx):
self.ctx = ctx
self.module = "openerp_report"
self.version = "0.1"
LoginTest()
if not loginstatus and __name__=="package":
exit(1)
self.aSearchResult = []
desktop=getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo=oDoc2.getDocumentInfo()
global passwd
self.password = passwd
global url
self.sock=RPCSession(url)
if docinfo.getUserFieldValue(2) <> "" and docinfo.getUserFieldValue(3) <> "":
self.win = DBModalDialog(60, 50, 180, 70, "Add Attachment to Server")
self.win.addFixedText("lblResourceType", 2 , 5, 100, 10, "Select Appropriate Resource Type:")
self.win.addComboListBox("lstResourceType", -2, 25, 176, 15,True)
self.win.addButton('btnOkWithoutInformation', -2 , -5, 25 , 15,'OK' ,actionListenerProc = self.btnOkWithoutInformation_clicked )
else:
self.win = DBModalDialog(60, 50, 180, 190, "Add Attachment to Server")
self.win.addFixedText("lblModuleName",2 , 9, 42, 20, "Select Module:")
self.win.addComboListBox("lstmodel", -2, 5, 134, 15,True)
self.lstModel = self.win.getControl( "lstmodel" )
self.dModel = {}
# Open a new connexion to the server
ids = self.sock.execute(database, uid, self.password, 'ir.module.module', 'search', [('name','=','base_report_model'),('state', '=', 'installed')])
if not len(ids):
# If the module 'base_report_model' is not installed, use the default model
self.dModel = {
"Partner":'res.partner',
}
else:
ids =self.sock.execute(database, uid, self.password, 'base.report.model' , 'search', [])
res = self.sock.execute(database, uid, self.password, 'base.report.model' , 'read', ids, ['name','model_id'])
models = self.sock.execute(database, uid, self.password, 'ir.model' , 'read', map(lambda x:x['model_id'][0], res), ['model'])
models = dict(map(lambda x:(x['id'],x['model']), models))
self.dModel = dict(map(lambda x: (x['name'],models[x['model_id'][0]]), res))
for item in self.dModel.keys():
self.lstModel.addItem(item, self.lstModel.getItemCount())
self.win.addFixedText("lblSearchName",2 , 25, 60, 10, "Enter Search String:")
self.win.addEdit("txtSearchName", 2, 35, 149, 15,)
self.win.addButton('btnSearch', -2 , 35, 25 , 15,'Search' ,actionListenerProc = self.btnSearch_clicked )
self.win.addFixedText("lblSearchRecord", 2 , 55, 60, 10, "Search Result:")
self.win.addComboListBox("lstResource", -2, 65, 176, 70, False )
self.lstResource = self.win.getControl( "lstResource" )
self.win.addFixedText("lblResourceType", 2 , 137, 100, 20, "Select Appropriate Resource Type:")
self.win.addComboListBox("lstResourceType", -2, 147, 176, 15,True )
self.win.addButton('btnOkWithInformation', -2 , -5, 25 , 15,'OK' ,actionListenerProc = self.btnOkWithInformation_clicked )
self.lstResourceType = self.win.getControl( "lstResourceType" )
for kind in self.Kind.keys():
self.lstResourceType.addItem( kind, self.lstResourceType.getItemCount() )
self.win.addButton('btnCancel', -2 - 27 , -5 , 30 , 15, 'Cancel' ,actionListenerProc = self.btnCancel_clicked )
self.win.doModalDialog("lstResourceType", self.Kind.keys()[0])
def btnSearch_clicked(self, oActionEvent):
modelSelectedItem = self.win.getListBoxSelectedItem("lstmodel")
if modelSelectedItem == "":
return
desktop=getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo=oDoc2.getDocumentInfo()
self.aSearchResult =self.sock.execute( database, uid, self.password, self.dModel[modelSelectedItem], 'name_search', self.win.getEditText("txtSearchName"))
self.win.removeListBoxItems("lstResource", 0, self.win.getListBoxItemCount("lstResource"))
if self.aSearchResult == []:
ErrorDialog("No search result found.", "", "Search Error.")
return
for result in self.aSearchResult:
self.lstResource.addItem(result[1],result[0])
def _send_attachment(self, name, data, res_model, res_id):
desktop = getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo = oDoc2.getDocumentInfo()
params = {
'name': name,
'datas': base64.encodestring( data ),
'datas_fname': name,
'res_model' : res_model,
'res_id' : int(res_id),
}
return self.sock.execute( database, uid, self.password, 'ir.attachment', 'create', params )
def send_attachment(self, model, resource_id):
desktop = getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo = oDoc2.getDocumentInfo()
if oDoc2.getURL() == "":
ErrorDialog("You should save your file.", "", "Saving Error.")
return None
url = oDoc2.getURL()
if self.Kind[self.win.getListBoxSelectedItem("lstResourceType")] == "pdf":
url = self.doc2pdf(url[7:])
if url == None:
ErrorDialog( "Problem in creating PDF.", "", "PDF Error.")
return None
url = url[7:]
data = read_data_from_file( get_absolute_file_path( url ) )
return self._send_attachment( os.path.basename( url ), data, model, resource_id )
def btnOkWithoutInformation_clicked(self, oActionEvent):
desktop = getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo = oDoc2.getDocumentInfo()
if self.win.getListBoxSelectedItem("lstResourceType") == "":
ErrorDialog("You have to select a resource type.", "", "Selection Error." )
return
res = self.send_attachment( docinfo.getUserFieldValue(3), docinfo.getUserFieldValue(2) )
self.win.endExecute()
def btnOkWithInformation_clicked(self, oActionEvent):
if self.win.getListBoxSelectedItem("lstResourceType") == "":
ErrorDialog( "You have to select a resource type.", "", "Selection Error." )
return
if self.win.getListBoxSelectedItem("lstResource") == "" or self.win.getListBoxSelectedItem("lstmodel") == "":
ErrorDialog("You have to select Model and Resource.", "", "Selection Error.")
return
resourceid = None
for s in self.aSearchResult:
if s[1] == self.win.getListBoxSelectedItem("lstResource"):
resourceid = s[0]
break
if resourceid == None:
ErrorDialog("No resource is selected.", "", "Resource Error." )
return
res = self.send_attachment( self.dModel[self.win.getListBoxSelectedItem('lstmodel')], resourceid )
self.win.endExecute()
def btnCancel_clicked(self, oActionEvent):
self.win.endExecute()
def doc2pdf(self, strFile):
oDoc = None
strFilterSubName = ''
strUrl = convertToURL( strFile )
desktop = getDesktop()
oDoc = desktop.loadComponentFromURL( strUrl, "_blank", 0, Array(self._MakePropertyValue("Hidden",True)))
if oDoc:
strFilterSubName = ""
# select appropriate filter
if oDoc.supportsService("com.sun.star.presentation.PresentationDocument"):
strFilterSubName = "impress_pdf_Export"
elif oDoc.supportsService("com.sun.star.sheet.SpreadsheetDocument"):
strFilterSubName = "calc_pdf_Export"
elif oDoc.supportsService("com.sun.star.text.WebDocument"):
strFilterSubName = "writer_web_pdf_Export"
elif oDoc.supportsService("com.sun.star.text.GlobalDocument"):
strFilterSubName = "writer_globaldocument_pdf_Export"
elif oDoc.supportsService("com.sun.star.text.TextDocument"):
strFilterSubName = "writer_pdf_Export"
elif oDoc.supportsService("com.sun.star.drawing.DrawingDocument"):
strFilterSubName = "draw_pdf_Export"
elif oDoc.supportsService("com.sun.star.formula.FormulaProperties"):
strFilterSubName = "math_pdf_Export"
elif oDoc.supportsService("com.sun.star.chart.ChartDocument"):
strFilterSubName = "chart_pdf_Export"
else:
pass
filename = len(strFilterSubName) > 0 and convertToURL( os.path.splitext( strFile )[0] + ".pdf" ) or None
if len(strFilterSubName) > 0:
oDoc.storeToURL( filename, Array(self._MakePropertyValue("FilterName", strFilterSubName ),self._MakePropertyValue("CompressMode", "1" )))
oDoc.close(True)
# Can be None if len(strFilterSubName) <= 0
return filename
def _MakePropertyValue(self, cName="", uValue=u""):
oPropertyValue = createUnoStruct( "com.sun.star.beans.PropertyValue" )
if cName:
oPropertyValue.Name = cName
if uValue:
oPropertyValue.Value = uValue
return oPropertyValue
if __name__<>"package" and __name__=="__main__":
AddAttachment(None)
elif __name__=="package":
g_ImplementationHelper.addImplementation( AddAttachment, "org.openoffice.openerp.report.addattachment", ("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
TheBits/translateapi | app.py | 1 | 2601 | import hug
import requests
import falcon
from yandex_translate import YandexTranslate
from mstranslator import Translator
from envparse import env
env.read_envfile('keys')
class TransltrTranslator(object):
MAX_TEXT = 100
def translate(self, text, translate_to):
payload = {
"text": text,
"from": "ru",
"to": translate_to,
}
response = requests.post(
'http://www.transltr.org/api/translate',
data=payload,
headers={'Accept': 'application/json'}
)
return response.json()['translationText']
def languages(self):
langs = requests.get('http://www.transltr.org/api/getlanguagesfortranslate',
headers={'Accept': 'application/json'})
return [item['languageCode'] for item in langs.json()]
class MicrosoftTranslator(object):
MAX_TEXT = 10000
def __init__(self):
self.translator = Translator(env('MSFT_TRANSLATOR'))
def translate(self, text, translate_to):
return self.translator.translate(text, lang_from='ru', lang_to=translate_to)
def languages(self):
return self.translator.get_langs()
class YandexTranslator(object):
MAX_TEXT = 10000
def __init__(self):
self.translator = YandexTranslate(env('YANDEX_KEY'))
def translate(self, text, translate_to):
pair = 'ru-%s' % translate_to
return self.translator.translate(text, pair)['text'].pop()
def languages(self):
return self.translator.langs
def check_availability(translator, text_size, language):
if translator.MAX_TEXT < text_size:
return None
if language not in translator.languages():
return None
return translator
# by priority
TRANSLATORS = [
YandexTranslator(),
MicrosoftTranslator(),
TransltrTranslator(),
]
@hug.get('/translate/')
def translate(lang: hug.types.text, text: hug.types.text):
try:
available = (check_availability(translator, len(text), lang)
for translator in TRANSLATORS)
translator = next(t for t in available if t is not None)
except StopIteration:
raise falcon.HTTPBadRequest('Bad Request', 'There is no available translators')
except Exception as e:
raise falcon.HTTPBadRequest('Bad Request', e)
try:
response = {
"translator": translator.__class__.__name__,
"text": translator.translate(text, lang),
}
return response
except Exception as e:
raise falcon.HTTPInternalServerError('Translate Error', e)
| unlicense |
onfire73/pypeskg | ui/pypesvds/lib/extras/pdflib/lzw.py | 4 | 2356 | #!/usr/bin/env python
import sys
stderr = sys.stderr
## LZWDecoder
##
class LZWDecoder(object):
debug = 0
def __init__(self, fp):
self.fp = fp
self.buff = 0
self.bpos = 8
self.nbits = 9
self.table = None
self.prevbuf = None
return
def readbits(self, bits):
v = 0
while 1:
# the number of remaining bits we can get from the current buffer.
r = 8-self.bpos
if bits <= r:
# |-----8-bits-----|
# |-bpos-|-bits-| |
# | |----r----|
v = (v<<bits) | ((self.buff>>(r-bits)) & ((1<<bits)-1))
self.bpos += bits
break
else:
# |-----8-bits-----|
# |-bpos-|---bits----...
# | |----r----|
v = (v<<r) | (self.buff & ((1<<r)-1))
bits -= r
x = self.fp.read(1)
if not x: raise EOFError
self.buff = ord(x)
self.bpos = 0
return v
def feed(self, code):
x = ''
if code == 256:
self.table = [ chr(c) for c in xrange(256) ] # 0-255
self.table.append(None) # 256
self.table.append(None) # 257
self.prevbuf = ''
self.nbits = 9
elif code == 257:
pass
elif not self.prevbuf:
x = self.prevbuf = self.table[code]
else:
if code < len(self.table):
x = self.table[code]
self.table.append(self.prevbuf+x[0])
else:
self.table.append(self.prevbuf+self.prevbuf[0])
x = self.table[code]
l = len(self.table)
if l == 511:
self.nbits = 10
elif l == 1023:
self.nbits = 11
elif l == 2047:
self.nbits = 12
self.prevbuf = x
return x
def run(self):
while 1:
try:
code = self.readbits(self.nbits)
except EOFError:
break
x = self.feed(code)
yield x
if self.debug:
print >>stderr, ('nbits=%d, code=%d, output=%r, table=%r' %
(self.nbits, code, x, self.table[258:]))
return
def main(argv):
import StringIO
input = '\x80\x0b\x60\x50\x22\x0c\x0c\x85\x01'
fp = StringIO.StringIO(input)
expected = '\x2d\x2d\x2d\x2d\x2d\x41\x2d\x2d\x2d\x42'
LZWDecoder.debug = 1
output = ''.join(LZWDecoder(fp).run())
print (input, expected, output)
print output == expected
return 0
if __name__ == '__main__': sys.exit(main(sys.argv))
| apache-2.0 |
Soya93/Extract-Refactoring | python/lib/Lib/mimetypes.py | 92 | 18690 | """Guess the MIME type of a file.
This module defines two useful functions:
guess_type(url, strict=1) -- guess the MIME type and encoding of a URL.
guess_extension(type, strict=1) -- guess the extension for a given MIME type.
It also contains the following, for tuning the behavior:
Data:
knownfiles -- list of files to parse
inited -- flag set when init() has been called
suffix_map -- dictionary mapping suffixes to suffixes
encodings_map -- dictionary mapping suffixes to encodings
types_map -- dictionary mapping suffixes to types
Functions:
init([files]) -- parse a list of files, default knownfiles
read_mime_types(file) -- parse one file, return a dictionary or None
"""
import os
import posixpath
import urllib
__all__ = [
"guess_type","guess_extension","guess_all_extensions",
"add_type","read_mime_types","init"
]
knownfiles = [
"/etc/mime.types",
"/etc/httpd/mime.types", # Mac OS X
"/etc/httpd/conf/mime.types", # Apache
"/etc/apache/mime.types", # Apache 1
"/etc/apache2/mime.types", # Apache 2
"/usr/local/etc/httpd/conf/mime.types",
"/usr/local/lib/netscape/mime.types",
"/usr/local/etc/httpd/conf/mime.types", # Apache 1.2
"/usr/local/etc/mime.types", # Apache 1.3
]
inited = False
class MimeTypes:
"""MIME-types datastore.
This datastore can handle information from mime.types-style files
and supports basic determination of MIME type from a filename or
URL, and can guess a reasonable extension given a MIME type.
"""
def __init__(self, filenames=(), strict=True):
if not inited:
init()
self.encodings_map = encodings_map.copy()
self.suffix_map = suffix_map.copy()
self.types_map = ({}, {}) # dict for (non-strict, strict)
self.types_map_inv = ({}, {})
for (ext, type) in types_map.items():
self.add_type(type, ext, True)
for (ext, type) in common_types.items():
self.add_type(type, ext, False)
for name in filenames:
self.read(name, strict)
def add_type(self, type, ext, strict=True):
"""Add a mapping between a type and an extension.
When the extension is already known, the new
type will replace the old one. When the type
is already known the extension will be added
to the list of known extensions.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
self.types_map[strict][ext] = type
exts = self.types_map_inv[strict].setdefault(type, [])
if ext not in exts:
exts.append(ext)
def guess_type(self, url, strict=True):
"""Guess the type of a file based on its URL.
Return value is a tuple (type, encoding) where type is None if
the type can't be guessed (no or unknown suffix) or a string
of the form type/subtype, usable for a MIME Content-type
header; and encoding is None for no encoding or the name of
the program used to encode (e.g. compress or gzip). The
mappings are table driven. Encoding suffixes are case
sensitive; type suffixes are first tried case sensitive, then
case insensitive.
The suffixes .tgz, .taz and .tz (case sensitive!) are all
mapped to '.tar.gz'. (This is table-driven too, using the
dictionary suffix_map.)
Optional `strict' argument when False adds a bunch of commonly found,
but non-standard types.
"""
scheme, url = urllib.splittype(url)
if scheme == 'data':
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
# type/subtype defaults to "text/plain"
comma = url.find(',')
if comma < 0:
# bad data URL
return None, None
semi = url.find(';', 0, comma)
if semi >= 0:
type = url[:semi]
else:
type = url[:comma]
if '=' in type or '/' not in type:
type = 'text/plain'
return type, None # never compressed, so encoding is None
base, ext = posixpath.splitext(url)
while ext in self.suffix_map:
base, ext = posixpath.splitext(base + self.suffix_map[ext])
if ext in self.encodings_map:
encoding = self.encodings_map[ext]
base, ext = posixpath.splitext(base)
else:
encoding = None
types_map = self.types_map[True]
if ext in types_map:
return types_map[ext], encoding
elif ext.lower() in types_map:
return types_map[ext.lower()], encoding
elif strict:
return None, encoding
types_map = self.types_map[False]
if ext in types_map:
return types_map[ext], encoding
elif ext.lower() in types_map:
return types_map[ext.lower()], encoding
else:
return None, encoding
def guess_all_extensions(self, type, strict=True):
"""Guess the extensions for a file based on its MIME type.
Return value is a list of strings giving the possible filename
extensions, including the leading dot ('.'). The extension is not
guaranteed to have been associated with any particular data stream,
but would be mapped to the MIME type `type' by guess_type().
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
type = type.lower()
extensions = self.types_map_inv[True].get(type, [])
if not strict:
for ext in self.types_map_inv[False].get(type, []):
if ext not in extensions:
extensions.append(ext)
return extensions
def guess_extension(self, type, strict=True):
"""Guess the extension for a file based on its MIME type.
Return value is a string giving a filename extension,
including the leading dot ('.'). The extension is not
guaranteed to have been associated with any particular data
stream, but would be mapped to the MIME type `type' by
guess_type(). If no extension can be guessed for `type', None
is returned.
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
extensions = self.guess_all_extensions(type, strict)
if not extensions:
return None
return extensions[0]
def read(self, filename, strict=True):
"""
Read a single mime.types-format file, specified by pathname.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
fp = open(filename)
self.readfp(fp, strict)
fp.close()
def readfp(self, fp, strict=True):
"""
Read a single mime.types-format file.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
while 1:
line = fp.readline()
if not line:
break
words = line.split()
for i in range(len(words)):
if words[i][0] == '#':
del words[i:]
break
if not words:
continue
type, suffixes = words[0], words[1:]
for suff in suffixes:
self.add_type(type, '.' + suff, strict)
def guess_type(url, strict=True):
"""Guess the type of a file based on its URL.
Return value is a tuple (type, encoding) where type is None if the
type can't be guessed (no or unknown suffix) or a string of the
form type/subtype, usable for a MIME Content-type header; and
encoding is None for no encoding or the name of the program used
to encode (e.g. compress or gzip). The mappings are table
driven. Encoding suffixes are case sensitive; type suffixes are
first tried case sensitive, then case insensitive.
The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped
to ".tar.gz". (This is table-driven too, using the dictionary
suffix_map).
Optional `strict' argument when false adds a bunch of commonly found, but
non-standard types.
"""
init()
return guess_type(url, strict)
def guess_all_extensions(type, strict=True):
"""Guess the extensions for a file based on its MIME type.
Return value is a list of strings giving the possible filename
extensions, including the leading dot ('.'). The extension is not
guaranteed to have been associated with any particular data
stream, but would be mapped to the MIME type `type' by
guess_type(). If no extension can be guessed for `type', None
is returned.
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
init()
return guess_all_extensions(type, strict)
def guess_extension(type, strict=True):
"""Guess the extension for a file based on its MIME type.
Return value is a string giving a filename extension, including the
leading dot ('.'). The extension is not guaranteed to have been
associated with any particular data stream, but would be mapped to the
MIME type `type' by guess_type(). If no extension can be guessed for
`type', None is returned.
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
init()
return guess_extension(type, strict)
def add_type(type, ext, strict=True):
"""Add a mapping between a type and an extension.
When the extension is already known, the new
type will replace the old one. When the type
is already known the extension will be added
to the list of known extensions.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
init()
return add_type(type, ext, strict)
def init(files=None):
global guess_all_extensions, guess_extension, guess_type
global suffix_map, types_map, encodings_map, common_types
global add_type, inited
inited = True
db = MimeTypes()
if files is None:
files = knownfiles
for file in files:
if os.path.isfile(file):
db.readfp(open(file))
encodings_map = db.encodings_map
suffix_map = db.suffix_map
types_map = db.types_map[True]
guess_all_extensions = db.guess_all_extensions
guess_extension = db.guess_extension
guess_type = db.guess_type
add_type = db.add_type
common_types = db.types_map[False]
def read_mime_types(file):
try:
f = open(file)
except IOError:
return None
db = MimeTypes()
db.readfp(f, True)
return db.types_map[True]
def _default_mime_types():
global suffix_map
global encodings_map
global types_map
global common_types
suffix_map = {
'.tgz': '.tar.gz',
'.taz': '.tar.gz',
'.tz': '.tar.gz',
}
encodings_map = {
'.gz': 'gzip',
'.Z': 'compress',
}
# Before adding new types, make sure they are either registered with IANA,
# at http://www.isi.edu/in-notes/iana/assignments/media-types
# or extensions, i.e. using the x- prefix
# If you add to these, please keep them sorted!
types_map = {
'.a' : 'application/octet-stream',
'.ai' : 'application/postscript',
'.aif' : 'audio/x-aiff',
'.aifc' : 'audio/x-aiff',
'.aiff' : 'audio/x-aiff',
'.au' : 'audio/basic',
'.avi' : 'video/x-msvideo',
'.bat' : 'text/plain',
'.bcpio' : 'application/x-bcpio',
'.bin' : 'application/octet-stream',
'.bmp' : 'image/x-ms-bmp',
'.c' : 'text/plain',
# Duplicates :(
'.cdf' : 'application/x-cdf',
'.cdf' : 'application/x-netcdf',
'.cpio' : 'application/x-cpio',
'.csh' : 'application/x-csh',
'.css' : 'text/css',
'.dll' : 'application/octet-stream',
'.doc' : 'application/msword',
'.dot' : 'application/msword',
'.dvi' : 'application/x-dvi',
'.eml' : 'message/rfc822',
'.eps' : 'application/postscript',
'.etx' : 'text/x-setext',
'.exe' : 'application/octet-stream',
'.gif' : 'image/gif',
'.gtar' : 'application/x-gtar',
'.h' : 'text/plain',
'.hdf' : 'application/x-hdf',
'.htm' : 'text/html',
'.html' : 'text/html',
'.ief' : 'image/ief',
'.jpe' : 'image/jpeg',
'.jpeg' : 'image/jpeg',
'.jpg' : 'image/jpeg',
'.js' : 'application/x-javascript',
'.ksh' : 'text/plain',
'.latex' : 'application/x-latex',
'.m1v' : 'video/mpeg',
'.man' : 'application/x-troff-man',
'.me' : 'application/x-troff-me',
'.mht' : 'message/rfc822',
'.mhtml' : 'message/rfc822',
'.mif' : 'application/x-mif',
'.mov' : 'video/quicktime',
'.movie' : 'video/x-sgi-movie',
'.mp2' : 'audio/mpeg',
'.mp3' : 'audio/mpeg',
'.mpa' : 'video/mpeg',
'.mpe' : 'video/mpeg',
'.mpeg' : 'video/mpeg',
'.mpg' : 'video/mpeg',
'.ms' : 'application/x-troff-ms',
'.nc' : 'application/x-netcdf',
'.nws' : 'message/rfc822',
'.o' : 'application/octet-stream',
'.obj' : 'application/octet-stream',
'.oda' : 'application/oda',
'.p12' : 'application/x-pkcs12',
'.p7c' : 'application/pkcs7-mime',
'.pbm' : 'image/x-portable-bitmap',
'.pdf' : 'application/pdf',
'.pfx' : 'application/x-pkcs12',
'.pgm' : 'image/x-portable-graymap',
'.pl' : 'text/plain',
'.png' : 'image/png',
'.pnm' : 'image/x-portable-anymap',
'.pot' : 'application/vnd.ms-powerpoint',
'.ppa' : 'application/vnd.ms-powerpoint',
'.ppm' : 'image/x-portable-pixmap',
'.pps' : 'application/vnd.ms-powerpoint',
'.ppt' : 'application/vnd.ms-powerpoint',
'.ps' : 'application/postscript',
'.pwz' : 'application/vnd.ms-powerpoint',
'.py' : 'text/x-python',
'.pyc' : 'application/x-python-code',
'.pyo' : 'application/x-python-code',
'.qt' : 'video/quicktime',
'.ra' : 'audio/x-pn-realaudio',
'.ram' : 'application/x-pn-realaudio',
'.ras' : 'image/x-cmu-raster',
'.rdf' : 'application/xml',
'.rgb' : 'image/x-rgb',
'.roff' : 'application/x-troff',
'.rtx' : 'text/richtext',
'.sgm' : 'text/x-sgml',
'.sgml' : 'text/x-sgml',
'.sh' : 'application/x-sh',
'.shar' : 'application/x-shar',
'.snd' : 'audio/basic',
'.so' : 'application/octet-stream',
'.src' : 'application/x-wais-source',
'.sv4cpio': 'application/x-sv4cpio',
'.sv4crc' : 'application/x-sv4crc',
'.swf' : 'application/x-shockwave-flash',
'.t' : 'application/x-troff',
'.tar' : 'application/x-tar',
'.tcl' : 'application/x-tcl',
'.tex' : 'application/x-tex',
'.texi' : 'application/x-texinfo',
'.texinfo': 'application/x-texinfo',
'.tif' : 'image/tiff',
'.tiff' : 'image/tiff',
'.tr' : 'application/x-troff',
'.tsv' : 'text/tab-separated-values',
'.txt' : 'text/plain',
'.ustar' : 'application/x-ustar',
'.vcf' : 'text/x-vcard',
'.wav' : 'audio/x-wav',
'.wiz' : 'application/msword',
'.wsdl' : 'application/xml',
'.xbm' : 'image/x-xbitmap',
'.xlb' : 'application/vnd.ms-excel',
# Duplicates :(
'.xls' : 'application/excel',
'.xls' : 'application/vnd.ms-excel',
'.xml' : 'text/xml',
'.xpdl' : 'application/xml',
'.xpm' : 'image/x-xpixmap',
'.xsl' : 'application/xml',
'.xwd' : 'image/x-xwindowdump',
'.zip' : 'application/zip',
}
# These are non-standard types, commonly found in the wild. They will
# only match if strict=0 flag is given to the API methods.
# Please sort these too
common_types = {
'.jpg' : 'image/jpg',
'.mid' : 'audio/midi',
'.midi': 'audio/midi',
'.pct' : 'image/pict',
'.pic' : 'image/pict',
'.pict': 'image/pict',
'.rtf' : 'application/rtf',
'.xul' : 'text/xul'
}
_default_mime_types()
if __name__ == '__main__':
import sys
import getopt
USAGE = """\
Usage: mimetypes.py [options] type
Options:
--help / -h -- print this message and exit
--lenient / -l -- additionally search of some common, but non-standard
types.
--extension / -e -- guess extension instead of type
More than one type argument may be given.
"""
def usage(code, msg=''):
print USAGE
if msg: print msg
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], 'hle',
['help', 'lenient', 'extension'])
except getopt.error, msg:
usage(1, msg)
strict = 1
extension = 0
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-l', '--lenient'):
strict = 0
elif opt in ('-e', '--extension'):
extension = 1
for gtype in args:
if extension:
guess = guess_extension(gtype, strict)
if not guess: print "I don't know anything about type", gtype
else: print guess
else:
guess, encoding = guess_type(gtype, strict)
if not guess: print "I don't know anything about type", gtype
else: print 'type:', guess, 'encoding:', encoding
| apache-2.0 |
jingjidejuren/hadoop | dev-support/determine-flaky-tests-hadoop.py | 118 | 8525 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Given a jenkins test job, this script examines all runs of the job done
# within specified period of time (number of days prior to the execution
# time of this script), and reports all failed tests.
#
# The output of this script includes a section for each run that has failed
# tests, with each failed test name listed.
#
# More importantly, at the end, it outputs a summary section to list all failed
# tests within all examined runs, and indicate how many runs a same test
# failed, and sorted all failed tests by how many runs each test failed.
#
# This way, when we see failed tests in PreCommit build, we can quickly tell
# whether a failed test is a new failure, or it failed before and how often it
# failed, so to have idea whether it may just be a flaky test.
#
# Of course, to be 100% sure about the reason of a test failure, closer look
# at the failed test for the specific run is necessary.
#
import sys
import platform
sysversion = sys.hexversion
onward30 = False
if sysversion < 0x020600F0:
sys.exit("Minimum supported python version is 2.6, the current version is " +
"Python" + platform.python_version())
if sysversion == 0x030000F0:
sys.exit("There is a known bug with Python" + platform.python_version() +
", please try a different version");
if sysversion < 0x03000000:
import urllib2
else:
onward30 = True
import urllib.request
import datetime
import json as simplejson
import logging
from optparse import OptionParser
import time
# Configuration
DEFAULT_JENKINS_URL = "https://builds.apache.org"
DEFAULT_JOB_NAME = "Hadoop-Common-trunk"
DEFAULT_NUM_PREVIOUS_DAYS = 14
DEFAULT_TOP_NUM_FAILED_TEST = -1
SECONDS_PER_DAY = 86400
# total number of runs to examine
numRunsToExamine = 0
#summary mode
summary_mode = False
#total number of errors
error_count = 0
""" Parse arguments """
def parse_args():
parser = OptionParser()
parser.add_option("-J", "--jenkins-url", type="string",
dest="jenkins_url", help="Jenkins URL",
default=DEFAULT_JENKINS_URL)
parser.add_option("-j", "--job-name", type="string",
dest="job_name", help="Job name to look at",
default=DEFAULT_JOB_NAME)
parser.add_option("-n", "--num-days", type="int",
dest="num_prev_days", help="Number of days to examine",
default=DEFAULT_NUM_PREVIOUS_DAYS)
parser.add_option("-t", "--top", type="int",
dest="num_failed_tests",
help="Summary Mode, only show top number of failed tests",
default=DEFAULT_TOP_NUM_FAILED_TEST)
(options, args) = parser.parse_args()
if args:
parser.error("unexpected arguments: " + repr(args))
return options
""" Load data from specified url """
def load_url_data(url):
if onward30:
ourl = urllib.request.urlopen(url)
codec = ourl.info().get_param('charset')
content = ourl.read().decode(codec)
data = simplejson.loads(content, strict=False)
else:
ourl = urllib2.urlopen(url)
data = simplejson.load(ourl, strict=False)
return data
""" List all builds of the target project. """
def list_builds(jenkins_url, job_name):
global summary_mode
url = "%(jenkins)s/job/%(job_name)s/api/json?tree=builds[url,result,timestamp]" % dict(
jenkins=jenkins_url,
job_name=job_name)
try:
data = load_url_data(url)
except:
if not summary_mode:
logging.error("Could not fetch: %s" % url)
error_count += 1
raise
return data['builds']
""" Find the names of any tests which failed in the given build output URL. """
def find_failing_tests(testReportApiJson, jobConsoleOutput):
global summary_mode
global error_count
ret = set()
try:
data = load_url_data(testReportApiJson)
except:
if not summary_mode:
logging.error(" Could not open testReport, check " +
jobConsoleOutput + " for why it was reported failed")
error_count += 1
return ret
for suite in data['suites']:
for cs in suite['cases']:
status = cs['status']
errDetails = cs['errorDetails']
if (status == 'REGRESSION' or status == 'FAILED' or (errDetails is not None)):
ret.add(cs['className'] + "." + cs['name'])
if len(ret) == 0 and (not summary_mode):
logging.info(" No failed tests in testReport, check " +
jobConsoleOutput + " for why it was reported failed.")
return ret
""" Iterate runs of specfied job within num_prev_days and collect results """
def find_flaky_tests(jenkins_url, job_name, num_prev_days):
global numRunsToExamine
global summary_mode
all_failing = dict()
# First list all builds
builds = list_builds(jenkins_url, job_name)
# Select only those in the last N days
min_time = int(time.time()) - SECONDS_PER_DAY * num_prev_days
builds = [b for b in builds if (int(b['timestamp']) / 1000) > min_time]
# Filter out only those that failed
failing_build_urls = [(b['url'] , b['timestamp']) for b in builds
if (b['result'] in ('UNSTABLE', 'FAILURE'))]
tnum = len(builds)
num = len(failing_build_urls)
numRunsToExamine = tnum
if not summary_mode:
logging.info(" THERE ARE " + str(num) + " builds (out of " + str(tnum)
+ ") that have failed tests in the past " + str(num_prev_days) + " days"
+ ((".", ", as listed below:\n")[num > 0]))
for failed_build_with_time in failing_build_urls:
failed_build = failed_build_with_time[0];
jobConsoleOutput = failed_build + "Console";
testReport = failed_build + "testReport";
testReportApiJson = testReport + "/api/json";
ts = float(failed_build_with_time[1]) / 1000.
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
if not summary_mode:
logging.info("===>%s" % str(testReport) + " (" + st + ")")
failing = find_failing_tests(testReportApiJson, jobConsoleOutput)
if failing:
for ftest in failing:
if not summary_mode:
logging.info(" Failed test: %s" % ftest)
all_failing[ftest] = all_failing.get(ftest,0)+1
return all_failing
def main():
global numRunsToExamine
global summary_mode
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
# set up logger to write to stdout
soh = logging.StreamHandler(sys.stdout)
soh.setLevel(logging.INFO)
logger = logging.getLogger()
logger.removeHandler(logger.handlers[0])
logger.addHandler(soh)
opts = parse_args()
logging.info("****Recently FAILED builds in url: " + opts.jenkins_url
+ "/job/" + opts.job_name + "")
if opts.num_failed_tests != -1:
summary_mode = True
all_failing = find_flaky_tests(opts.jenkins_url, opts.job_name,
opts.num_prev_days)
if len(all_failing) == 0:
raise SystemExit(0)
if summary_mode and opts.num_failed_tests < len(all_failing):
logging.info("\nAmong " + str(numRunsToExamine) +
" runs examined, top " + str(opts.num_failed_tests) +
" failed tests <#failedRuns: testName>:")
else:
logging.info("\nAmong " + str(numRunsToExamine) +
" runs examined, all failed tests <#failedRuns: testName>:")
# print summary section: all failed tests sorted by how many times they failed
line_count = 0
for tn in sorted(all_failing, key=all_failing.get, reverse=True):
logging.info(" " + str(all_failing[tn])+ ": " + tn)
if summary_mode:
line_count += 1
if line_count == opts.num_failed_tests:
break
if summary_mode and error_count > 0:
logging.info("\n" + str(error_count) + " errors found, you may "
+ "re-run in non summary mode to see error details.");
if __name__ == "__main__":
main()
| apache-2.0 |
Matt-Deacalion/django | django/db/migrations/executor.py | 103 | 14172 | from __future__ import unicode_literals
from django.apps.registry import apps as global_apps
from django.db import migrations
from .exceptions import InvalidMigrationPlan
from .loader import MigrationLoader
from .recorder import MigrationRecorder
from .state import ProjectState
class MigrationExecutor(object):
"""
End-to-end migration execution - loads migrations, and runs them
up or down to a specified set of targets.
"""
def __init__(self, connection, progress_callback=None):
self.connection = connection
self.loader = MigrationLoader(self.connection)
self.recorder = MigrationRecorder(self.connection)
self.progress_callback = progress_callback
def migration_plan(self, targets, clean_start=False):
"""
Given a set of targets, returns a list of (Migration instance, backwards?).
"""
plan = []
if clean_start:
applied = set()
else:
applied = set(self.loader.applied_migrations)
for target in targets:
# If the target is (app_label, None), that means unmigrate everything
if target[1] is None:
for root in self.loader.graph.root_nodes():
if root[0] == target[0]:
for migration in self.loader.graph.backwards_plan(root):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.remove(migration)
# If the migration is already applied, do backwards mode,
# otherwise do forwards mode.
elif target in applied:
# Don't migrate backwards all the way to the target node (that
# may roll back dependencies in other apps that don't need to
# be rolled back); instead roll back through target's immediate
# child(ren) in the same app, and no further.
next_in_app = sorted(
n for n in
self.loader.graph.node_map[target].children
if n[0] == target[0]
)
for node in next_in_app:
for migration in self.loader.graph.backwards_plan(node):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.remove(migration)
else:
for migration in self.loader.graph.forwards_plan(target):
if migration not in applied:
plan.append((self.loader.graph.nodes[migration], False))
applied.add(migration)
return plan
def migrate(self, targets, plan=None, fake=False, fake_initial=False):
"""
Migrates the database up to the given targets.
Django first needs to create all project states before a migration is
(un)applied and in a second step run all the database operations.
"""
if plan is None:
plan = self.migration_plan(targets)
# Create the forwards plan Django would follow on an empty database
full_plan = self.migration_plan(self.loader.graph.leaf_nodes(), clean_start=True)
all_forwards = all(not backwards for mig, backwards in plan)
all_backwards = all(backwards for mig, backwards in plan)
if not plan:
pass # Nothing to do for an empty plan
elif all_forwards == all_backwards:
# This should only happen if there's a mixed plan
raise InvalidMigrationPlan(
"Migration plans with both forwards and backwards migrations "
"are not supported. Please split your migration process into "
"separate plans of only forwards OR backwards migrations.",
plan
)
elif all_forwards:
self._migrate_all_forwards(plan, full_plan, fake=fake, fake_initial=fake_initial)
else:
# No need to check for `elif all_backwards` here, as that condition
# would always evaluate to true.
self._migrate_all_backwards(plan, full_plan, fake=fake)
self.check_replacements()
def _migrate_all_forwards(self, plan, full_plan, fake, fake_initial):
"""
Take a list of 2-tuples of the form (migration instance, False) and
apply them in the order they occur in the full_plan.
"""
migrations_to_run = {m[0] for m in plan}
state = ProjectState(real_apps=list(self.loader.unmigrated_apps))
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration that we applied from this set so
# that we can bail out once the last migration has been applied
# and don't always run until the very end of the migration
# process.
break
if migration in migrations_to_run:
if 'apps' not in state.__dict__:
if self.progress_callback:
self.progress_callback("render_start")
state.apps # Render all -- performance critical
if self.progress_callback:
self.progress_callback("render_success")
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
migrations_to_run.remove(migration)
else:
migration.mutate_state(state, preserve=False)
def _migrate_all_backwards(self, plan, full_plan, fake):
"""
Take a list of 2-tuples of the form (migration instance, True) and
unapply them in reverse order they occur in the full_plan.
Since unapplying a migration requires the project state prior to that
migration, Django will compute the migration states before each of them
in a first run over the plan and then unapply them in a second run over
the plan.
"""
migrations_to_run = {m[0] for m in plan}
# Holds all migration states prior to the migrations being unapplied
states = {}
state = ProjectState(real_apps=list(self.loader.unmigrated_apps))
if self.progress_callback:
self.progress_callback("render_start")
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration that we applied from this set so
# that we can bail out once the last migration has been applied
# and don't always run until the very end of the migration
# process.
break
if migration in migrations_to_run:
if 'apps' not in state.__dict__:
state.apps # Render all -- performance critical
# The state before this migration
states[migration] = state
# The old state keeps as-is, we continue with the new state
state = migration.mutate_state(state, preserve=True)
migrations_to_run.remove(migration)
else:
migration.mutate_state(state, preserve=False)
if self.progress_callback:
self.progress_callback("render_success")
for migration, _ in plan:
self.unapply_migration(states[migration], migration, fake=fake)
def collect_sql(self, plan):
"""
Takes a migration plan and returns a list of collected SQL
statements that represent the best-efforts version of that plan.
"""
statements = []
state = None
for migration, backwards in plan:
with self.connection.schema_editor(collect_sql=True) as schema_editor:
if state is None:
state = self.loader.project_state((migration.app_label, migration.name), at_end=False)
if not backwards:
state = migration.apply(state, schema_editor, collect_sql=True)
else:
state = migration.unapply(state, schema_editor, collect_sql=True)
statements.extend(schema_editor.collected_sql)
return statements
def apply_migration(self, state, migration, fake=False, fake_initial=False):
"""
Runs a migration forwards.
"""
if self.progress_callback:
self.progress_callback("apply_start", migration, fake)
if not fake:
if fake_initial:
# Test to see if this is an already-applied initial migration
applied, state = self.detect_soft_applied(state, migration)
if applied:
fake = True
if not fake:
# Alright, do it normally
with self.connection.schema_editor() as schema_editor:
state = migration.apply(state, schema_editor)
# For replacement migrations, record individual statuses
if migration.replaces:
for app_label, name in migration.replaces:
self.recorder.record_applied(app_label, name)
else:
self.recorder.record_applied(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("apply_success", migration, fake)
return state
def unapply_migration(self, state, migration, fake=False):
"""
Runs a migration backwards.
"""
if self.progress_callback:
self.progress_callback("unapply_start", migration, fake)
if not fake:
with self.connection.schema_editor() as schema_editor:
state = migration.unapply(state, schema_editor)
# For replacement migrations, record individual statuses
if migration.replaces:
for app_label, name in migration.replaces:
self.recorder.record_unapplied(app_label, name)
else:
self.recorder.record_unapplied(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("unapply_success", migration, fake)
return state
def check_replacements(self):
"""
Mark replacement migrations applied if their replaced set all are.
We do this unconditionally on every migrate, rather than just when
migrations are applied or unapplied, so as to correctly handle the case
when a new squash migration is pushed to a deployment that already had
all its replaced migrations applied. In this case no new migration will
be applied, but we still want to correctly maintain the applied state
of the squash migration.
"""
applied = self.recorder.applied_migrations()
for key, migration in self.loader.replacements.items():
all_applied = all(m in applied for m in migration.replaces)
if all_applied and key not in applied:
self.recorder.record_applied(*key)
def detect_soft_applied(self, project_state, migration):
"""
Tests whether a migration has been implicitly applied - that the
tables or columns it would create exist. This is intended only for use
on initial migrations (as it only looks for CreateModel and AddField).
"""
if migration.initial is None:
# Bail if the migration isn't the first one in its app
if any(app == migration.app_label for app, name in migration.dependencies):
return False, project_state
elif migration.initial is False:
# Bail if it's NOT an initial migration
return False, project_state
if project_state is None:
after_state = self.loader.project_state((migration.app_label, migration.name), at_end=True)
else:
after_state = migration.mutate_state(project_state)
apps = after_state.apps
found_create_model_migration = False
found_add_field_migration = False
# Make sure all create model and add field operations are done
for operation in migration.operations:
if isinstance(operation, migrations.CreateModel):
model = apps.get_model(migration.app_label, operation.name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if model._meta.proxy or not model._meta.managed:
continue
if model._meta.db_table not in self.connection.introspection.table_names(self.connection.cursor()):
return False, project_state
found_create_model_migration = True
elif isinstance(operation, migrations.AddField):
model = apps.get_model(migration.app_label, operation.model_name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if model._meta.proxy or not model._meta.managed:
continue
table = model._meta.db_table
db_field = model._meta.get_field(operation.name).column
fields = self.connection.introspection.get_table_description(self.connection.cursor(), table)
if db_field not in (f.name for f in fields):
return False, project_state
found_add_field_migration = True
# If we get this far and we found at least one CreateModel or AddField migration,
# the migration is considered implicitly applied.
return (found_create_model_migration or found_add_field_migration), after_state
| bsd-3-clause |
castroflavio/onos | tools/test/scenarios/bin/find-flow.py | 32 | 1147 | #! /usr/bin/env python
import requests
import sys
from requests.auth import HTTPBasicAuth
if len(sys.argv) != 4:
print "usage: find-flow onos-node name device-id"
sys.exit(1)
node = sys.argv[1]
name = sys.argv[2]
deviceId = sys.argv[3]
flowsRequest = requests.get('http://' + node + ':8181/onos/v1/flows/' + deviceId,
auth=HTTPBasicAuth('onos', 'rocks'))
if flowsRequest.status_code != 200:
print flowsRequest.text
sys.exit(1)
flowsJson = flowsRequest.json()
for flow in flowsJson["flows"]:
if deviceId == flow["deviceId"]:
for criterion in flow["selector"]["criteria"]:
if criterion["type"] == 'IN_PORT' and criterion["port"] > 0:
for instruction in flow["treatment"]["instructions"]:
if instruction["port"] > 0 and instruction["type"] == 'OUTPUT':
print "@stc " + name + "FlowState=" + flow["state"]
print "@stc " + name + "FlowId=" + flow["id"]
print "@stc " + name + "FlowPort=" + str(instruction["port"])
sys.exit(0)
sys.exit(1)
| apache-2.0 |
LighthouseHPC/lighthouse | sandbox/lily/django_orthg/django_orthg/settings.py | 1 | 6722 | """
Django settings for django_orthg project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
'''BASE_DIR = os.path.dirname(os.path.dirname(__file__))'''
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dar9nvrnq40u_=liq65gaprm-4**vre#lx1)h_zztifr@bmti8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'orthg',
'dojango',
###----- for haystack -----###
'haystack',
###----- for dajaxice and dajax -----###
'dajaxice',
'dajax',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'django_orthg.urls'
WSGI_APPLICATION = 'django_orthg.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE':'django.db.backends.sqlite3',
'NAME': 'orthg.db',
#'HOST':'/Users/lily/lighthouse-taxonomy/sandbox/lily/django_orthg/lighthouse_orthg.db'
# 'USER': 'lighthouse',
# 'PASSWORD': 'yellow1234',
# 'HOST': '127.0.0.1'
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
TEMPLATE_DIRS = ( os.path.join(SITE_ROOT, 'templates'),
)
#in my own comouter:os.path.join('/Users/lily/lighthouse-taxonomy/sandbox/lily/django_orthg/django_orthg', 'templates')]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
#in my own computer:
#"/Users/lily/lighthouse-taxonomy/sandbox/lily/django_orthg/orthg/static",
os.path.join(SITE_ROOT, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
# 'dajaxice.finders.DajaxiceFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '!v-5+5whgdi1zfttr%jr7#k3@cya#$17ab^&z0%0qb@a+k^f^5'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.csrf",
# Must define a function "templates(request)" in django.core.context_processors
# in order to enable the template tag {{ templates }} for TEMPLATE_URL
#"django.core.context_processors.templates",
# For dajaxice.
"django.core.context_processors.request",
'django.contrib.messages.context_processors.messages'
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
'''LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}'''
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages'
)
# for Whoosh
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': os.path.join(os.path.dirname(__file__), 'whoosh_index'),
},
}
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
#HAYSTACK_SITECONF = 'orthg'
#Configuration of dojango
DOJANGO_DOJO_VERSION = "1.8.3"
DOJANGO_DOJO_THEME = "claro" #default is "claro" -- "tundra", "soria", "nihilo"
| mit |
oliverhr/odoo | openerp/addons/base/ir/ir_default.py | 342 | 1883 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class ir_default(osv.osv):
_name = 'ir.default'
_columns = {
'field_tbl': fields.char('Object'),
'field_name': fields.char('Object Field'),
'value': fields.char('Default Value'),
'uid': fields.many2one('res.users', 'Users'),
'page': fields.char('View'),
'ref_table': fields.char('Table Ref.'),
'ref_id': fields.integer('ID Ref.',size=64),
'company_id': fields.many2one('res.company','Company')
}
def _get_company_id(self, cr, uid, context=None):
res = self.pool.get('res.users').read(cr, uid, [uid], ['company_id'], context=context)
if res and res[0]['company_id']:
return res[0]['company_id'][0]
return False
_defaults = {
'company_id': _get_company_id,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kuri-kustar/haptic_interface-project | manipulation/mitsubishi_arm_hardware_interface/scripts/writePos.py | 2 | 2942 | #!/usr/bin/env python
###############################################################################################
# #
# Mitsubishi RV-6SDL pose writing and command example #
# #
# by Rui P. de Figueiredo and Tarek Taha, Dongming Gan, Khalifa University Robotics Institute #
# #
###############################################################################################
import rospy
import serial
import time
# Open serial port connection
baudrate=19200
COM =serial.Serial('/dev/ttyUSB0',baudrate)
COM.parity = 'E'
COM.stopbits = 2
COM.timeout= 0.0
#########################################
# Read end effector POSE values routine #
#########################################
buffer =[]
# Handshake: Send "1\r\n" to enter reading routine (read mode) and wait for response
COM.write("1\r\n")
while 1: # Get response
read_char = COM.read(1)
buffer.append(read_char) #
if read_char == '\n': # finish reading
break
if "R" not in buffer: # READ ("R") MODE
exit(1) # something went wrong while reading
# End handshake
# Get end-effector pose values
pose_values_buffer = ""
while '\n' not in pose_values_buffer:
read_char=COM.read(1)
pose_values_buffer=pose_values_buffer+str(read_char)
print pose_values_buffer #
# End get end-effector values
# Finalize reading routine by reading a "E" character
buffer = []
while '\n' not in buffer: # Get response
read_char = COM.read(1)
buffer.append(read_char) #
if "E" not in buffer: # READ ("R") MODE
exit(1) # something went wrong while reading
# End reading routine
####################################
# Send desired pose values routine #
####################################
buffer =[]
# Handshake: Send "2\r\n" to enter commanding routine (move mode) and wait for response
COM.write("2\r\n")
while 1: # Get response
read_char = COM.read(1)
buffer.append(read_char) #
if read_char == '\n': # finish reading
break
print buffer
if "M" not in buffer: # READ END ("E") CHARACTER
exit(1) # something went wrong while reading
# End handshake
command = [0.0,0.0,0.0,0.0,0.0,0.0] # IN MILIMETERS
S = str(command[0]) + ',' + str(command[1]) + ',' + str(command[2]) + ',' + str(command[3]) + ',' + str(command[4]) + ',' + str(command[5]) + "\r\n"
COM.write(S) # Send command
# Finalize reading routine by reading a "E" character
buffer = []
while '\n' not in buffer: # Get response
read_char = COM.read(1)
buffer.append(read_char)
if "E" not in buffer: # READ END ("E") CHARACTER
exit(1) # something went wrong while reading
# End reading routine
| gpl-3.0 |
lordmuffin/aws-cfn-plex | functions/credstash/pip/vcs/subversion.py | 343 | 9350 | from __future__ import absolute_import
import logging
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.index import Link
from pip.utils import rmtree, display_path
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
output = self.run_command(
['info', location],
show_stdout=False,
extra_environ={'LANG': 'C'},
)
match = _svn_url_re.search(output)
if not match:
logger.warning(
'Cannot determine URL of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warning(
'Cannot determine revision of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return url, None
return url, match.group(1)
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
url = self.remove_auth_from_url(url)
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
rmtree(location)
self.run_command(
['export'] + rev_options + [url, location],
show_stdout=False)
def switch(self, dest, url, rev_options):
self.run_command(['switch'] + rev_options + [url, dest])
def update(self, dest, rev_options):
self.run_command(['update'] + rev_options + [dest])
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
url = self.remove_auth_from_url(url)
if rev:
rev_display = ' (to revision %s)' % rev
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['checkout', '-q'] + rev_options + [url, dest])
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if self.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries')
if not os.path.exists(entries_fn):
# FIXME: should we warn?
continue
dirurl, localrev = self._get_svn_url_rev(base)
if base == location:
base_url = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
def get_url_rev(self):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
return self._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location):
from pip.exceptions import InstallationError
entries_path = os.path.join(location, self.dirname, 'entries')
if os.path.exists(entries_path):
with open(entries_path) as f:
data = f.read()
else: # subversion >= 1.7 does not have the 'entries' file
data = ''
if (data.startswith('8') or
data.startswith('9') or
data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
xml = self.run_command(
['info', '--xml', location],
show_stdout=False,
)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
def get_src_requirement(self, dist, location):
repo = self.get_url(location)
if repo is None:
return None
# FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
rev = self.get_revision(location)
return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
def check_version(self, dest, rev_options):
"""Always assume the versions don't match"""
return False
@staticmethod
def remove_auth_from_url(url):
# Return a copy of url with 'username:password@' removed.
# username/pass params are passed to subversion through flags
# and are not recognized in the url.
# parsed url
purl = urllib_parse.urlsplit(url)
stripped_netloc = \
purl.netloc.split('@')[-1]
# stripped url
url_pieces = (
purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
)
surl = urllib_parse.urlunsplit(url_pieces)
return surl
def get_rev_options(url, rev):
if rev:
rev_options = ['-r', rev]
else:
rev_options = []
r = urllib_parse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
if username:
rev_options += ['--username', username]
if password:
rev_options += ['--password', password]
return rev_options
vcs.register(Subversion)
| mit |
jabl/offlineimap | offlineimap/folder/IMAP.py | 6 | 20816 | # IMAP folder support
# Copyright (C) 2002-2007 John Goerzen
# <jgoerzen@complete.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from Base import BaseFolder
import imaplib
from offlineimap import imaputil, imaplibutil
from offlineimap.ui import UIBase
from offlineimap.version import versionstr
import rfc822, time, string, random, binascii, re
from StringIO import StringIO
from copy import copy
import time
class IMAPFolder(BaseFolder):
def __init__(self, imapserver, name, visiblename, accountname, repository):
self.config = imapserver.config
self.expunge = repository.getexpunge()
self.name = imaputil.dequote(name)
self.root = None # imapserver.root
self.sep = imapserver.delim
self.imapserver = imapserver
self.messagelist = None
self.visiblename = visiblename
self.accountname = accountname
self.repository = repository
self.randomgenerator = random.Random()
BaseFolder.__init__(self)
def selectro(self, imapobj):
"""Select this folder when we do not need write access.
Prefer SELECT to EXAMINE if we can, since some servers
(Courier) do not stabilize UID validity until the folder is
selected."""
try:
imapobj.select(self.getfullname())
except imapobj.readonly:
imapobj.select(self.getfullname(), readonly = 1)
def getaccountname(self):
return self.accountname
def suggeststhreads(self):
return 1
def waitforthread(self):
self.imapserver.connectionwait()
def getcopyinstancelimit(self):
return 'MSGCOPY_' + self.repository.getname()
def getvisiblename(self):
return self.visiblename
def getuidvalidity(self):
imapobj = self.imapserver.acquireconnection()
try:
# Primes untagged_responses
self.selectro(imapobj)
return long(imapobj.untagged_responses['UIDVALIDITY'][0])
finally:
self.imapserver.releaseconnection(imapobj)
def quickchanged(self, statusfolder):
# An IMAP folder has definitely changed if the number of
# messages or the UID of the last message have changed. Otherwise
# only flag changes could have occurred.
imapobj = self.imapserver.acquireconnection()
try:
# Primes untagged_responses
imapobj.select(self.getfullname(), readonly = 1, force = 1)
try:
# Some mail servers do not return an EXISTS response if
# the folder is empty.
maxmsgid = long(imapobj.untagged_responses['EXISTS'][0])
except KeyError:
return True
# Different number of messages than last time?
if maxmsgid != len(statusfolder.getmessagelist()):
return True
if maxmsgid < 1:
# No messages; return
return False
# Now, get the UID for the last message.
response = imapobj.fetch('%d' % maxmsgid, '(UID)')[1]
finally:
self.imapserver.releaseconnection(imapobj)
# Discard the message number.
messagestr = string.split(response[0], maxsplit = 1)[1]
options = imaputil.flags2hash(messagestr)
if not options.has_key('UID'):
return True
uid = long(options['UID'])
saveduids = statusfolder.getmessagelist().keys()
saveduids.sort()
if uid != saveduids[-1]:
return True
return False
# TODO: Make this so that it can define a date that would be the oldest messages etc.
def cachemessagelist(self):
imapobj = self.imapserver.acquireconnection()
self.messagelist = {}
try:
# Primes untagged_responses
imapobj.select(self.getfullname(), readonly = 1, force = 1)
maxage = self.config.getdefaultint("Account " + self.accountname, "maxage", -1)
maxsize = self.config.getdefaultint("Account " + self.accountname, "maxsize", -1)
if (maxage != -1) | (maxsize != -1):
try:
search_condition = "(";
if(maxage != -1):
#find out what the oldest message is that we should look at
oldest_time_struct = time.gmtime(time.time() - (60*60*24*maxage))
#format this manually - otherwise locales could cause problems
monthnames_standard = ["Jan", "Feb", "Mar", "Apr", "May", \
"June", "July", "Aug", "Sep", "Oct", "Nov", "Dec"]
our_monthname = monthnames_standard[oldest_time_struct[1]-1]
daystr = "%(day)02d" % {'day' : oldest_time_struct[2]}
date_search_str = "SINCE " + daystr + "-" + our_monthname \
+ "-" + str(oldest_time_struct[0])
search_condition += date_search_str
if(maxsize != -1):
if(maxage != 1): #There are two conditions - add a space
search_condition += " "
search_condition += "SMALLER " + self.config.getdefault("Account " + self.accountname, "maxsize", -1)
search_condition += ")"
searchresult = imapobj.search(None, search_condition)
#result would come back seperated by space - to change into a fetch
#statement we need to change space to comma
messagesToFetch = searchresult[1][0].replace(" ", ",")
except KeyError:
return
if len(messagesToFetch) < 1:
# No messages; return
return
else:
try:
# Some mail servers do not return an EXISTS response if
# the folder is empty.
maxmsgid = long(imapobj.untagged_responses['EXISTS'][0])
messagesToFetch = '1:%d' % maxmsgid;
except KeyError:
return
if maxmsgid < 1:
#no messages; return
return
# Now, get the flags and UIDs for these.
# We could conceivably get rid of maxmsgid and just say
# '1:*' here.
response = imapobj.fetch(messagesToFetch, '(FLAGS UID)')[1]
finally:
self.imapserver.releaseconnection(imapobj)
for messagestr in response:
# Discard the message number.
messagestr = string.split(messagestr, maxsplit = 1)[1]
options = imaputil.flags2hash(messagestr)
if not options.has_key('UID'):
UIBase.getglobalui().warn('No UID in message with options %s' %\
str(options),
minor = 1)
else:
uid = long(options['UID'])
flags = imaputil.flagsimap2maildir(options['FLAGS'])
rtime = imaplibutil.Internaldate2epoch(messagestr)
self.messagelist[uid] = {'uid': uid, 'flags': flags, 'time': rtime}
def getmessagelist(self):
return self.messagelist
def getmessage(self, uid):
ui = UIBase.getglobalui()
imapobj = self.imapserver.acquireconnection()
try:
imapobj.select(self.getfullname(), readonly = 1)
initialresult = imapobj.uid('fetch', '%d' % uid, '(BODY.PEEK[])')
ui.debug('imap', 'Returned object from fetching %d: %s' % \
(uid, str(initialresult)))
return initialresult[1][0][1].replace("\r\n", "\n")
finally:
self.imapserver.releaseconnection(imapobj)
def getmessagetime(self, uid):
return self.messagelist[uid]['time']
def getmessageflags(self, uid):
return self.messagelist[uid]['flags']
def savemessage_getnewheader(self, content):
headername = 'X-OfflineIMAP-%s-' % str(binascii.crc32(content)).replace('-', 'x')
headername += binascii.hexlify(self.repository.getname()) + '-'
headername += binascii.hexlify(self.getname())
headervalue= '%d-' % long(time.time())
headervalue += str(self.randomgenerator.random()).replace('.', '')
headervalue += '-v' + versionstr
return (headername, headervalue)
def savemessage_addheader(self, content, headername, headervalue):
ui = UIBase.getglobalui()
ui.debug('imap',
'savemessage_addheader: called to add %s: %s' % (headername,
headervalue))
insertionpoint = content.find("\r\n")
ui.debug('imap', 'savemessage_addheader: insertionpoint = %d' % insertionpoint)
leader = content[0:insertionpoint]
ui.debug('imap', 'savemessage_addheader: leader = %s' % repr(leader))
if insertionpoint == 0 or insertionpoint == -1:
newline = ''
insertionpoint = 0
else:
newline = "\r\n"
newline += "%s: %s" % (headername, headervalue)
ui.debug('imap', 'savemessage_addheader: newline = ' + repr(newline))
trailer = content[insertionpoint:]
ui.debug('imap', 'savemessage_addheader: trailer = ' + repr(trailer))
return leader + newline + trailer
def savemessage_searchforheader(self, imapobj, headername, headervalue):
if imapobj.untagged_responses.has_key('APPENDUID'):
return long(imapobj.untagged_responses['APPENDUID'][-1].split(' ')[1])
ui = UIBase.getglobalui()
ui.debug('imap', 'savemessage_searchforheader called for %s: %s' % \
(headername, headervalue))
# Now find the UID it got.
headervalue = imapobj._quote(headervalue)
try:
matchinguids = imapobj.uid('search', 'HEADER', headername, headervalue)[1][0]
except imapobj.error, err:
# IMAP server doesn't implement search or had a problem.
ui.debug('imap', "savemessage_searchforheader: got IMAP error '%s' while attempting to UID SEARCH for message with header %s" % (err, headername))
return 0
ui.debug('imap', 'savemessage_searchforheader got initial matchinguids: ' + repr(matchinguids))
if matchinguids == '':
ui.debug('imap', "savemessage_searchforheader: UID SEARCH for message with header %s yielded no results" % headername)
return 0
matchinguids = matchinguids.split(' ')
ui.debug('imap', 'savemessage_searchforheader: matchinguids now ' + \
repr(matchinguids))
if len(matchinguids) != 1 or matchinguids[0] == None:
raise ValueError, "While attempting to find UID for message with header %s, got wrong-sized matchinguids of %s" % (headername, str(matchinguids))
matchinguids.sort()
return long(matchinguids[0])
def savemessage(self, uid, content, flags, rtime):
imapobj = self.imapserver.acquireconnection()
ui = UIBase.getglobalui()
ui.debug('imap', 'savemessage: called')
try:
try:
imapobj.select(self.getfullname()) # Needed for search
except imapobj.readonly:
ui.msgtoreadonly(self, uid, content, flags)
# Return indicating message taken, but no UID assigned.
# Fudge it.
return 0
# This backend always assigns a new uid, so the uid arg is ignored.
# In order to get the new uid, we need to save off the message ID.
message = rfc822.Message(StringIO(content))
datetuple_msg = rfc822.parsedate(message.getheader('Date'))
# Will be None if missing or not in a valid format.
# If time isn't known
if rtime == None and datetuple_msg == None:
datetuple = time.localtime()
elif rtime == None:
datetuple = datetuple_msg
else:
datetuple = time.localtime(rtime)
try:
if datetuple[0] < 1981:
raise ValueError
# Check for invalid date
datetuple_check = time.localtime(time.mktime(datetuple))
if datetuple[:2] != datetuple_check[:2]:
raise ValueError
# This could raise a value error if it's not a valid format.
date = imaplib.Time2Internaldate(datetuple)
except (ValueError, OverflowError):
# Argh, sometimes it's a valid format but year is 0102
# or something. Argh. It seems that Time2Internaldate
# will rause a ValueError if the year is 0102 but not 1902,
# but some IMAP servers nonetheless choke on 1902.
date = imaplib.Time2Internaldate(time.localtime())
ui.debug('imap', 'savemessage: using date ' + str(date))
content = re.sub("(?<!\r)\n", "\r\n", content)
ui.debug('imap', 'savemessage: initial content is: ' + repr(content))
(headername, headervalue) = self.savemessage_getnewheader(content)
ui.debug('imap', 'savemessage: new headers are: %s: %s' % \
(headername, headervalue))
content = self.savemessage_addheader(content, headername,
headervalue)
ui.debug('imap', 'savemessage: new content is: ' + repr(content))
ui.debug('imap', 'savemessage: new content length is ' + \
str(len(content)))
assert(imapobj.append(self.getfullname(),
imaputil.flagsmaildir2imap(flags),
date, content)[0] == 'OK')
# Checkpoint. Let it write out the messages, etc.
assert(imapobj.check()[0] == 'OK')
# Keep trying until we get the UID.
ui.debug('imap', 'savemessage: first attempt to get new UID')
uid = self.savemessage_searchforheader(imapobj, headername,
headervalue)
# See docs for savemessage in Base.py for explanation of this and other return values
if uid <= 0:
ui.debug('imap', 'savemessage: first attempt to get new UID failed. Going to run a NOOP and try again.')
assert(imapobj.noop()[0] == 'OK')
uid = self.savemessage_searchforheader(imapobj, headername,
headervalue)
finally:
self.imapserver.releaseconnection(imapobj)
if uid: # avoid UID FETCH 0 crash happening later on
self.messagelist[uid] = {'uid': uid, 'flags': flags}
ui.debug('imap', 'savemessage: returning %d' % uid)
return uid
def savemessageflags(self, uid, flags):
imapobj = self.imapserver.acquireconnection()
try:
try:
imapobj.select(self.getfullname())
except imapobj.readonly:
UIBase.getglobalui().flagstoreadonly(self, [uid], flags)
return
result = imapobj.uid('store', '%d' % uid, 'FLAGS',
imaputil.flagsmaildir2imap(flags))
assert result[0] == 'OK', 'Error with store: ' + '. '.join(r[1])
finally:
self.imapserver.releaseconnection(imapobj)
result = result[1][0]
if not result:
self.messagelist[uid]['flags'] = flags
else:
flags = imaputil.flags2hash(imaputil.imapsplit(result)[1])['FLAGS']
self.messagelist[uid]['flags'] = imaputil.flagsimap2maildir(flags)
def addmessageflags(self, uid, flags):
self.addmessagesflags([uid], flags)
def addmessagesflags_noconvert(self, uidlist, flags):
self.processmessagesflags('+', uidlist, flags)
def addmessagesflags(self, uidlist, flags):
"""This is here for the sake of UIDMaps.py -- deletemessages must
add flags and get a converted UID, and if we don't have noconvert,
then UIDMaps will try to convert it twice."""
self.addmessagesflags_noconvert(uidlist, flags)
def deletemessageflags(self, uid, flags):
self.deletemessagesflags([uid], flags)
def deletemessagesflags(self, uidlist, flags):
self.processmessagesflags('-', uidlist, flags)
def processmessagesflags(self, operation, uidlist, flags):
if len(uidlist) > 101:
# Hack for those IMAP ervers with a limited line length
self.processmessagesflags(operation, uidlist[:100], flags)
self.processmessagesflags(operation, uidlist[100:], flags)
return
imapobj = self.imapserver.acquireconnection()
try:
try:
imapobj.select(self.getfullname())
except imapobj.readonly:
UIBase.getglobalui().flagstoreadonly(self, uidlist, flags)
return
r = imapobj.uid('store',
imaputil.listjoin(uidlist),
operation + 'FLAGS',
imaputil.flagsmaildir2imap(flags))
assert r[0] == 'OK', 'Error with store: ' + '. '.join(r[1])
r = r[1]
finally:
self.imapserver.releaseconnection(imapobj)
# Some IMAP servers do not always return a result. Therefore,
# only update the ones that it talks about, and manually fix
# the others.
needupdate = copy(uidlist)
for result in r:
if result == None:
# Compensate for servers that don't return anything from
# STORE.
continue
attributehash = imaputil.flags2hash(imaputil.imapsplit(result)[1])
if not ('UID' in attributehash and 'FLAGS' in attributehash):
# Compensate for servers that don't return a UID attribute.
continue
lflags = attributehash['FLAGS']
uid = long(attributehash['UID'])
self.messagelist[uid]['flags'] = imaputil.flagsimap2maildir(lflags)
try:
needupdate.remove(uid)
except ValueError: # Let it slide if it's not in the list
pass
for uid in needupdate:
if operation == '+':
for flag in flags:
if not flag in self.messagelist[uid]['flags']:
self.messagelist[uid]['flags'].append(flag)
self.messagelist[uid]['flags'].sort()
elif operation == '-':
for flag in flags:
if flag in self.messagelist[uid]['flags']:
self.messagelist[uid]['flags'].remove(flag)
def deletemessage(self, uid):
self.deletemessages_noconvert([uid])
def deletemessages(self, uidlist):
self.deletemessages_noconvert(uidlist)
def deletemessages_noconvert(self, uidlist):
# Weed out ones not in self.messagelist
uidlist = [uid for uid in uidlist if uid in self.messagelist]
if not len(uidlist):
return
self.addmessagesflags_noconvert(uidlist, ['T'])
imapobj = self.imapserver.acquireconnection()
try:
try:
imapobj.select(self.getfullname())
except imapobj.readonly:
UIBase.getglobalui().deletereadonly(self, uidlist)
return
if self.expunge:
assert(imapobj.expunge()[0] == 'OK')
finally:
self.imapserver.releaseconnection(imapobj)
for uid in uidlist:
del self.messagelist[uid]
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.