repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
dewadg/mako-kernel | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
radecker/Etch-A-Pic | TestProject/MotorTest.py | 1 | 4427 |
import sys
import time
import RPi.GPIO as GPIO
delay = 0.001
class Motor:
# Use BCM GPIO references
# instead of physical pin numbers
GPIO.setmode(GPIO.BCM)
# Define GPIO signals to use
# Physical pins 35, 36, 37, 38, 29, 31, 32, 33, 13, 15, 16, 18
# GPIO19, GPIO16, GPIO26, GPIO20, GPIO5, GPIO6, GPIO12, GPIO13, GPIO27, GPIO22, GPIO23, GPIO24
StepPins = [19,16,26,20,5,6,12,13,27,22,23,24]
# Set all pins as output
for pin in StepPins:
print "Setup pins"
GPIO.setup(pin,GPIO.OUT)
GPIO.output(pin, False)
StepPin1 = 19
DirPin1 = 16
StepPin2 = 26
DirPin2 = 20
Step = 1.0
#Dir = 1 # Set to 1 or 2 for clockwise
# Set to -1 or -2 for anti-clockwise
def rotate(steps, speed, DirPin, StepPin):
if (steps > 0):
dir = True
else:
dir = False
steps = int(abs(steps))
GPIO.output(DirPin,dir)
#delay = float(1/(speed*1000))
for i in range(0, steps):
GPIO.output(StepPin, True)
#time.sleep(delay)
GPIO.output(StepPin, False)
time.sleep(delay)
def rotate2(steps1, steps2, speed, DirPin1, StepPin1, DirPin2, StepPin2):
if (steps1 > 0):
dir = True
else:
dir = False
if (steps2 > 0):
dir2 = True
else:
dir2 = False
steps1 = abs(steps1)
#steps2 = abs(steps2)
GPIO.output(DirPin1,dir)
GPIO.output(DirPin2,dir2)
#delay = (1/(speed*1000)) * 70
for i in range(0, steps1):
GPIO.output(StepPin1, True)
GPIO.output(StepPin2, True)
GPIO.output(StepPin1, False)
GPIO.output(StepPin2, False)
time.sleep(delay)
def rotateDeg(deg, speed, DirPin, StepPin):
if (deg > 0):
dir = True
else:
dir = False
deg = abs(deg)
steps = int(deg/0.225)
GPIO.output(DirPin,dir)
for i in range(0, steps):
GPIO.output(StepPin, True)
time.sleep(delay)
GPIO.output(StepPin, False)
time.sleep(delay)
def left(steps, speed):
global StepPin1, DirPin1
rotate(steps, speed, StepPin1, DirPin1) #TODO Change sign of steps, so that it reflects actual movement of motor
def right(steps, speed):
global StepPin1, DirPin1
rotate(steps, speed, StepPin1, DirPin1) #TODO Change sign of steps, so that it reflects actual movement of motor
def down(steps, speed):
global StepPin2, DirPin2
rotate(steps, speed, StepPin2, DirPin2) #TODO Change sign of steps, so that it reflects actual movement of motor
def up(steps, speed):
global StepPin2, DirPin2
rotate(steps, speed, StepPin2, DirPin2) #TODO Change sign of steps, so that it reflects actual movement of motor
def upLeft(steps1, steps2, speed):
global StepPin1, DirPin1, StepPin2, DirPin2
rotate2(steps1,steps2, speed, StepPin1, DirPin1, StepPin2, DirPin2) #TODO Change sign of steps, so that it reflects actual movement of motor
def upRight(steps1, steps2, speed):
global StepPin1, DirPin1, StepPin2, DirPin2
rotate2(steps1,steps2, speed, StepPin1, DirPin1, StepPin2, DirPin2) #TODO Change sign of steps, so that it reflects actual movement of motor
def downLeft(steps1, steps2, speed):
global StepPin1, DirPin1, StepPin2, DirPin2
rotate2(steps1,steps2, speed, StepPin1, DirPin1, StepPin2, DirPin2) #TODO Change sign of steps, so that it reflects actual movement of motor
def downRight(steps1, steps2, speed):
global StepPin1, DirPin1, StepPin2, DirPin2
rotate2(steps1,steps2, speed, StepPin1, DirPin1, StepPin2, DirPin2) #TODO Change sign of steps, so that it reflects actual movement of motor
#rotate(100000,1000,16,19)
'''
# Read wait time from command line
if len(sys.argv)>1:
WaitTime = int(sys.argv[1])/float(1000)
else:
WaitTime = 10/float(1000)
# Initialise variables
StepCounter = 0
# Start main loop
while True:
print StepCounter,
print Seq[StepCounter]
for pin in range(0,4):
xpin=StepPins[pin]# Get GPIO
if Seq[StepCounter][pin]!=0:
print " Enable GPIO %i" %(xpin)
GPIO.output(xpin, True)
else:
GPIO.output(xpin, False)
StepCounter += StepDir
# If we reach the end of the sequence
# start again
if (StepCounter>=StepCount):
StepCounter = 0
if (StepCounter<0):
StepCounter = StepCount+StepDir
# Wait before moving on
time.sleep(WaitTime)
''' | gpl-3.0 |
mmbtba/odoo | openerp/addons/base/res/res_lang.py | 196 | 12383 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import locale
from locale import localeconv
import logging
import re
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class lang(osv.osv):
_name = "res.lang"
_description = "Languages"
_disallowed_datetime_patterns = tools.DATETIME_FORMATS_MAP.keys()
_disallowed_datetime_patterns.remove('%y') # this one is in fact allowed, just not good practice
def install_lang(self, cr, uid, **args):
"""
This method is called from openerp/addons/base/base_data.xml to load
some language and set it as the default for every partners. The
language is set via tools.config by the RPC 'create' method on the
'db' object. This is a fragile solution and something else should be
found.
"""
lang = tools.config.get('lang')
if not lang:
return False
lang_ids = self.search(cr, uid, [('code','=', lang)])
if not lang_ids:
self.load_lang(cr, uid, lang)
ir_values_obj = self.pool.get('ir.values')
default_value = ir_values_obj.get(cr, uid, 'default', False, ['res.partner'])
if not default_value:
ir_values_obj.set(cr, uid, 'default', False, 'lang', ['res.partner'], lang)
return True
def load_lang(self, cr, uid, lang, lang_name=None):
# create the language with locale information
fail = True
iso_lang = tools.get_iso_codes(lang)
for ln in tools.get_locales(lang):
try:
locale.setlocale(locale.LC_ALL, str(ln))
fail = False
break
except locale.Error:
continue
if fail:
lc = locale.getdefaultlocale()[0]
msg = 'Unable to get information for locale %s. Information from the default locale (%s) have been used.'
_logger.warning(msg, lang, lc)
if not lang_name:
lang_name = tools.ALL_LANGUAGES.get(lang, lang)
def fix_xa0(s):
"""Fix badly-encoded non-breaking space Unicode character from locale.localeconv(),
coercing to utf-8, as some platform seem to output localeconv() in their system
encoding, e.g. Windows-1252"""
if s == '\xa0':
return '\xc2\xa0'
return s
def fix_datetime_format(format):
"""Python's strftime supports only the format directives
that are available on the platform's libc, so in order to
be 100% cross-platform we map to the directives required by
the C standard (1989 version), always available on platforms
with a C standard implementation."""
# For some locales, nl_langinfo returns a D_FMT/T_FMT that contains
# unsupported '%-' patterns, e.g. for cs_CZ
format = format.replace('%-', '%')
for pattern, replacement in tools.DATETIME_FORMATS_MAP.iteritems():
format = format.replace(pattern, replacement)
return str(format)
lang_info = {
'code': lang,
'iso_code': iso_lang,
'name': lang_name,
'translatable': 1,
'date_format' : fix_datetime_format(locale.nl_langinfo(locale.D_FMT)),
'time_format' : fix_datetime_format(locale.nl_langinfo(locale.T_FMT)),
'decimal_point' : fix_xa0(str(locale.localeconv()['decimal_point'])),
'thousands_sep' : fix_xa0(str(locale.localeconv()['thousands_sep'])),
}
lang_id = False
try:
lang_id = self.create(cr, uid, lang_info)
finally:
tools.resetlocale()
return lang_id
def _check_format(self, cr, uid, ids, context=None):
for lang in self.browse(cr, uid, ids, context=context):
for pattern in self._disallowed_datetime_patterns:
if (lang.time_format and pattern in lang.time_format)\
or (lang.date_format and pattern in lang.date_format):
return False
return True
def _check_grouping(self, cr, uid, ids, context=None):
for lang in self.browse(cr, uid, ids, context=context):
try:
if not all(isinstance(x, int) for x in eval(lang.grouping)):
return False
except Exception:
return False
return True
def _get_default_date_format(self, cursor, user, context=None):
return '%m/%d/%Y'
def _get_default_time_format(self, cursor, user, context=None):
return '%H:%M:%S'
_columns = {
'name': fields.char('Name', required=True),
'code': fields.char('Locale Code', size=16, required=True, help='This field is used to set/get locales for user'),
'iso_code': fields.char('ISO code', size=16, required=False, help='This ISO code is the name of po files to use for translations'),
'translatable': fields.boolean('Translatable'),
'active': fields.boolean('Active'),
'direction': fields.selection([('ltr', 'Left-to-Right'), ('rtl', 'Right-to-Left')], 'Direction', required=True),
'date_format':fields.char('Date Format', required=True),
'time_format':fields.char('Time Format', required=True),
'grouping':fields.char('Separator Format', required=True,help="The Separator Format should be like [,n] where 0 < n :starting from Unit digit.-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500;[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. Provided ',' as the thousand separator in each case."),
'decimal_point':fields.char('Decimal Separator', required=True),
'thousands_sep':fields.char('Thousands Separator'),
}
_defaults = {
'active': 1,
'translatable': 0,
'direction': 'ltr',
'date_format':_get_default_date_format,
'time_format':_get_default_time_format,
'grouping': '[]',
'decimal_point': '.',
'thousands_sep': ',',
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the language must be unique !'),
('code_uniq', 'unique (code)', 'The code of the language must be unique !'),
]
_constraints = [
(_check_format, 'Invalid date/time format directive specified. Please refer to the list of allowed directives, displayed when you edit a language.', ['time_format', 'date_format']),
(_check_grouping, "The Separator Format should be like [,n] where 0 < n :starting from Unit digit.-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500;[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. Provided ',' as the thousand separator in each case.", ['grouping'])
]
@tools.ormcache(skiparg=3)
def _lang_data_get(self, cr, uid, lang, monetary=False):
if type(lang) in (str, unicode):
lang = self.search(cr, uid, [('code', '=', lang)]) or \
self.search(cr, uid, [('code', '=', 'en_US')])
lang = lang[0]
conv = localeconv()
lang_obj = self.browse(cr, uid, lang)
thousands_sep = lang_obj.thousands_sep or conv[monetary and 'mon_thousands_sep' or 'thousands_sep']
decimal_point = lang_obj.decimal_point
grouping = lang_obj.grouping
return grouping, thousands_sep, decimal_point
def write(self, cr, uid, ids, vals, context=None):
for lang_id in ids :
self._lang_data_get.clear_cache(self)
return super(lang, self).write(cr, uid, ids, vals, context)
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
languages = self.read(cr, uid, ids, ['code','active'], context=context)
for language in languages:
ctx_lang = context.get('lang')
if language['code']=='en_US':
raise osv.except_osv(_('User Error'), _("Base Language 'en_US' can not be deleted!"))
if ctx_lang and (language['code']==ctx_lang):
raise osv.except_osv(_('User Error'), _("You cannot delete the language which is User's Preferred Language!"))
if language['active']:
raise osv.except_osv(_('User Error'), _("You cannot delete the language which is Active!\nPlease de-activate the language first."))
trans_obj = self.pool.get('ir.translation')
trans_ids = trans_obj.search(cr, uid, [('lang','=',language['code'])], context=context)
trans_obj.unlink(cr, uid, trans_ids, context=context)
return super(lang, self).unlink(cr, uid, ids, context=context)
#
# IDS: can be a list of IDS or a list of XML_IDS
#
def format(self, cr, uid, ids, percent, value, grouping=False, monetary=False, context=None):
""" Format() will return the language-specific output for float values"""
if percent[0] != '%':
raise ValueError("format() must be given exactly one %char format specifier")
formatted = percent % value
# floats and decimal ints need special action!
if grouping:
lang_grouping, thousands_sep, decimal_point = \
self._lang_data_get(cr, uid, ids[0], monetary)
eval_lang_grouping = eval(lang_grouping)
if percent[-1] in 'eEfFgG':
parts = formatted.split('.')
parts[0], _ = intersperse(parts[0], eval_lang_grouping, thousands_sep)
formatted = decimal_point.join(parts)
elif percent[-1] in 'diu':
formatted = intersperse(formatted, eval_lang_grouping, thousands_sep)[0]
return formatted
# import re, operator
# _percent_re = re.compile(r'%(?:\((?P<key>.*?)\))?'
# r'(?P<modifiers>[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]')
lang()
def split(l, counts):
"""
>>> split("hello world", [])
['hello world']
>>> split("hello world", [1])
['h', 'ello world']
>>> split("hello world", [2])
['he', 'llo world']
>>> split("hello world", [2,3])
['he', 'llo', ' world']
>>> split("hello world", [2,3,0])
['he', 'llo', ' wo', 'rld']
>>> split("hello world", [2,-1,3])
['he', 'llo world']
"""
res = []
saved_count = len(l) # count to use when encoutering a zero
for count in counts:
if not l:
break
if count == -1:
break
if count == 0:
while l:
res.append(l[:saved_count])
l = l[saved_count:]
break
res.append(l[:count])
l = l[count:]
saved_count = count
if l:
res.append(l)
return res
intersperse_pat = re.compile('([^0-9]*)([^ ]*)(.*)')
def intersperse(string, counts, separator=''):
"""
See the asserts below for examples.
"""
left, rest, right = intersperse_pat.match(string).groups()
def reverse(s): return s[::-1]
splits = split(reverse(rest), counts)
res = separator.join(map(reverse, reverse(splits)))
return left + res + right, len(splits) > 0 and len(splits) -1 or 0
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jelugbo/hebs_repo | common/lib/xmodule/xmodule/tests/test_poll.py | 227 | 1133 | # -*- coding: utf-8 -*-
"""Test for Poll Xmodule functional logic."""
from xmodule.poll_module import PollDescriptor
from . import LogicTest
class PollModuleTest(LogicTest):
"""Logic tests for Poll Xmodule."""
descriptor_class = PollDescriptor
raw_field_data = {
'poll_answers': {'Yes': 1, 'Dont_know': 0, 'No': 0},
'voted': False,
'poll_answer': ''
}
def test_bad_ajax_request(self):
# Make sure that answer for incorrect request is error json.
response = self.ajax_request('bad_answer', {})
self.assertDictEqual(response, {'error': 'Unknown Command!'})
def test_good_ajax_request(self):
# Make sure that ajax request works correctly.
response = self.ajax_request('No', {})
poll_answers = response['poll_answers']
total = response['total']
callback = response['callback']
self.assertDictEqual(poll_answers, {'Yes': 1, 'Dont_know': 0, 'No': 1})
self.assertEqual(total, 2)
self.assertDictEqual(callback, {'objectName': 'Conditional'})
self.assertEqual(self.xmodule.poll_answer, 'No')
| agpl-3.0 |
BehavioralInsightsTeam/edx-platform | openedx/core/djangoapps/site_configuration/models.py | 8 | 6027 | """
Django models for site configurations.
"""
import collections
from logging import getLogger
from django.contrib.sites.models import Site
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from jsonfield.fields import JSONField
from model_utils.models import TimeStampedModel
logger = getLogger(__name__) # pylint: disable=invalid-name
class SiteConfiguration(models.Model):
"""
Model for storing site configuration. These configuration override OpenEdx configurations and settings.
e.g. You can override site name, logo image, favicon etc. using site configuration.
Fields:
site (OneToOneField): one to one field relating each configuration to a single site
values (JSONField): json field to store configurations for a site
"""
site = models.OneToOneField(Site, related_name='configuration', on_delete=models.CASCADE)
enabled = models.BooleanField(default=False, verbose_name="Enabled")
values = JSONField(
null=False,
blank=True,
load_kwargs={'object_pairs_hook': collections.OrderedDict}
)
def __unicode__(self):
return u"<SiteConfiguration: {site} >".format(site=self.site)
def __repr__(self):
return self.__unicode__()
def get_value(self, name, default=None):
"""
Return Configuration value for the key specified as name argument.
Function logs a message if configuration is not enabled or if there is an error retrieving a key.
Args:
name (str): Name of the key for which to return configuration value.
default: default value tp return if key is not found in the configuration
Returns:
Configuration value for the given key or returns `None` if configuration is not enabled.
"""
if self.enabled:
try:
return self.values.get(name, default) # pylint: disable=no-member
except AttributeError as error:
logger.exception('Invalid JSON data. \n [%s]', error)
else:
logger.info("Site Configuration is not enabled for site (%s).", self.site)
return default
@classmethod
def get_value_for_org(cls, org, name, default=None):
"""
This returns site configuration value which has an org_filter that matches
what is passed in,
Args:
org (str): Course ord filter, this value will be used to filter out the correct site configuration.
name (str): Name of the key for which to return configuration value.
default: default value tp return if key is not found in the configuration
Returns:
Configuration value for the given key.
"""
for configuration in cls.objects.filter(values__contains=org, enabled=True).all():
course_org_filter = configuration.get_value('course_org_filter', [])
# The value of 'course_org_filter' can be configured as a string representing
# a single organization or a list of strings representing multiple organizations.
if not isinstance(course_org_filter, list):
course_org_filter = [course_org_filter]
if org in course_org_filter:
return configuration.get_value(name, default)
return default
@classmethod
def get_all_orgs(cls):
"""
This returns all of the orgs that are considered in site configurations, This can be used,
for example, to do filtering.
Returns:
A list of all organizations present in site configuration.
"""
org_filter_set = set()
for configuration in cls.objects.filter(values__contains='course_org_filter', enabled=True).all():
course_org_filter = configuration.get_value('course_org_filter', [])
if not isinstance(course_org_filter, list):
course_org_filter = [course_org_filter]
org_filter_set.update(course_org_filter)
return org_filter_set
@classmethod
def has_org(cls, org):
"""
Check if the given organization is present in any of the site configuration.
Returns:
True if given organization is present in site configurations otherwise False.
"""
return org in cls.get_all_orgs()
class SiteConfigurationHistory(TimeStampedModel):
"""
This is an archive table for SiteConfiguration, so that we can maintain a history of
changes. Note that the site field is not unique in this model, compared to SiteConfiguration.
Fields:
site (ForeignKey): foreign-key to django Site
values (JSONField): json field to store configurations for a site
"""
site = models.ForeignKey(Site, related_name='configuration_histories', on_delete=models.CASCADE)
enabled = models.BooleanField(default=False, verbose_name="Enabled")
values = JSONField(
null=False,
blank=True,
load_kwargs={'object_pairs_hook': collections.OrderedDict}
)
class Meta:
get_latest_by = 'modified'
ordering = ('-modified', '-created',)
def __unicode__(self):
return u"<SiteConfigurationHistory: {site}, Last Modified: {modified} >".format(
modified=self.modified,
site=self.site,
)
def __repr__(self):
return self.__unicode__()
@receiver(post_save, sender=SiteConfiguration)
def update_site_configuration_history(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Add site configuration changes to site configuration history.
Args:
sender: sender of the signal i.e. SiteConfiguration model
instance: SiteConfiguration instance associated with the current signal
**kwargs: extra key word arguments
"""
SiteConfigurationHistory.objects.create(
site=instance.site,
values=instance.values,
enabled=instance.enabled,
)
| agpl-3.0 |
UQ-UQx/edx-platform_lti | lms/djangoapps/verify_student/views.py | 8 | 43784 | """
Views for the verification flow
"""
import json
import logging
import decimal
import datetime
from collections import namedtuple
from pytz import UTC
from edxmako.shortcuts import render_to_response, render_to_string
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import (
HttpResponse, HttpResponseBadRequest,
HttpResponseRedirect, Http404
)
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.views.generic.base import View
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _, ugettext_lazy
from django.contrib.auth.decorators import login_required
from django.core.mail import send_mail
from openedx.core.djangoapps.user_api.api import profile as profile_api
from course_modes.models import CourseMode
from student.models import CourseEnrollment
from student.views import reverification_info
from shoppingcart.models import Order, CertificateItem
from shoppingcart.processors import (
get_signed_purchase_params, get_purchase_endpoint
)
from verify_student.models import (
SoftwareSecurePhotoVerification,
)
from reverification.models import MidcourseReverificationWindow
import ssencrypt
from xmodule.modulestore.exceptions import ItemNotFoundError
from opaque_keys.edx.keys import CourseKey
from .exceptions import WindowExpiredException
from xmodule.modulestore.django import modulestore
from microsite_configuration import microsite
from util.json_request import JsonResponse
from util.date_utils import get_default_time_display
log = logging.getLogger(__name__)
EVENT_NAME_USER_ENTERED_MIDCOURSE_REVERIFY_VIEW = 'edx.course.enrollment.reverify.started'
EVENT_NAME_USER_SUBMITTED_MIDCOURSE_REVERIFY = 'edx.course.enrollment.reverify.submitted'
EVENT_NAME_USER_REVERIFICATION_REVIEWED_BY_SOFTWARESECURE = 'edx.course.enrollment.reverify.reviewed'
class VerifyView(View):
@method_decorator(login_required)
def get(self, request, course_id):
"""
Displays the main verification view, which contains three separate steps:
- Taking the standard face photo
- Taking the id photo
- Confirming that the photos and payment price are correct
before proceeding to payment
"""
upgrade = request.GET.get('upgrade', False)
course_id = CourseKey.from_string(course_id)
# If the user has already been verified within the given time period,
# redirect straight to the payment -- no need to verify again.
if SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user):
return redirect(
reverse('verify_student_verified',
kwargs={'course_id': course_id.to_deprecated_string()}) + "?upgrade={}".format(upgrade)
)
elif CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == ('verified', True):
return redirect(reverse('dashboard'))
else:
# If they haven't completed a verification attempt, we have to
# restart with a new one. We can't reuse an older one because we
# won't be able to show them their encrypted photo_id -- it's easier
# bookkeeping-wise just to start over.
progress_state = "start"
# we prefer professional over verify
current_mode = CourseMode.verified_mode_for_course(course_id)
# if the course doesn't have a verified mode, we want to kick them
# from the flow
if not current_mode:
return redirect(reverse('dashboard'))
if course_id.to_deprecated_string() in request.session.get("donation_for_course", {}):
chosen_price = request.session["donation_for_course"][unicode(course_id)]
else:
chosen_price = current_mode.min_price
course = modulestore().get_course(course_id)
if current_mode.suggested_prices != '':
suggested_prices = [
decimal.Decimal(price)
for price in current_mode.suggested_prices.split(",")
]
else:
suggested_prices = []
context = {
"progress_state": progress_state,
"user_full_name": request.user.profile.name,
"course_id": course_id.to_deprecated_string(),
"course_modes_choose_url": reverse('course_modes_choose', kwargs={'course_id': course_id.to_deprecated_string()}),
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"purchase_endpoint": get_purchase_endpoint(),
"suggested_prices": suggested_prices,
"currency": current_mode.currency.upper(),
"chosen_price": chosen_price,
"min_price": current_mode.min_price,
"upgrade": upgrade == u'True',
"can_audit": CourseMode.mode_for_course(course_id, 'audit') is not None,
"modes_dict": CourseMode.modes_for_course_dict(course_id),
"retake": request.GET.get('retake', False),
}
return render_to_response('verify_student/photo_verification.html', context)
class VerifiedView(View):
"""
View that gets shown once the user has already gone through the
verification flow
"""
@method_decorator(login_required)
def get(self, request, course_id):
"""
Handle the case where we have a get request
"""
upgrade = request.GET.get('upgrade', False)
course_id = CourseKey.from_string(course_id)
if CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == ('verified', True):
return redirect(reverse('dashboard'))
modes_dict = CourseMode.modes_for_course_dict(course_id)
# we prefer professional over verify
current_mode = CourseMode.verified_mode_for_course(course_id)
# if the course doesn't have a verified mode, we want to kick them
# from the flow
if not current_mode:
return redirect(reverse('dashboard'))
if course_id.to_deprecated_string() in request.session.get("donation_for_course", {}):
chosen_price = request.session["donation_for_course"][unicode(course_id)]
else:
chosen_price = current_mode.min_price
course = modulestore().get_course(course_id)
context = {
"course_id": course_id.to_deprecated_string(),
"course_modes_choose_url": reverse('course_modes_choose', kwargs={'course_id': course_id.to_deprecated_string()}),
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"purchase_endpoint": get_purchase_endpoint(),
"currency": current_mode.currency.upper(),
"chosen_price": chosen_price,
"create_order_url": reverse("verify_student_create_order"),
"upgrade": upgrade == u'True',
"can_audit": "audit" in modes_dict,
"modes_dict": modes_dict,
}
return render_to_response('verify_student/verified.html', context)
class PayAndVerifyView(View):
"""View for the "verify and pay" flow.
This view is somewhat complicated, because the user
can enter it from a number of different places:
* From the "choose your track" page.
* After completing payment.
* From the dashboard in order to complete verification.
* From the dashboard in order to upgrade to a verified track.
The page will display different steps and requirements
depending on:
* Whether the user has submitted a photo verification recently.
* Whether the user has paid for the course.
* How the user reached the page (mostly affects messaging)
We are also super-paranoid about how users reach this page.
If they somehow aren't enrolled, or the course doesn't exist,
or they've unenrolled, or they've already paid/verified,
... then we try to redirect them to the page with the
most appropriate messaging (including the dashboard).
Note that this page does NOT handle re-verification
(photo verification that was denied or had an error);
that is handled by the "reverify" view.
"""
# Step definitions
#
# These represent the numbered steps a user sees in
# the verify / payment flow.
#
# Steps can either be:
# - displayed or hidden
# - complete or incomplete
#
# For example, when a user enters the verification/payment
# flow for the first time, the user will see steps
# for both payment and verification. As the user
# completes these steps (for example, submitting a photo)
# the steps will be marked "complete".
#
# If a user has already verified for another course,
# then the verification steps will be hidden,
# since the user has already completed them.
#
# If a user re-enters the flow from another application
# (for example, after completing payment through
# a third-party payment processor), then the user
# will resume the flow at an intermediate step.
#
INTRO_STEP = 'intro-step'
MAKE_PAYMENT_STEP = 'make-payment-step'
PAYMENT_CONFIRMATION_STEP = 'payment-confirmation-step'
FACE_PHOTO_STEP = 'face-photo-step'
ID_PHOTO_STEP = 'id-photo-step'
REVIEW_PHOTOS_STEP = 'review-photos-step'
ENROLLMENT_CONFIRMATION_STEP = 'enrollment-confirmation-step'
ALL_STEPS = [
INTRO_STEP,
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP,
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
PAYMENT_STEPS = [
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP
]
VERIFICATION_STEPS = [
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
# These steps can be skipped using the ?skip-first-step GET param
SKIP_STEPS = [
INTRO_STEP,
]
Step = namedtuple(
'Step',
[
'title',
'template_name'
]
)
STEP_INFO = {
INTRO_STEP: Step(
title=ugettext_lazy("Intro"),
template_name="intro_step"
),
MAKE_PAYMENT_STEP: Step(
title=ugettext_lazy("Make payment"),
template_name="make_payment_step"
),
PAYMENT_CONFIRMATION_STEP: Step(
title=ugettext_lazy("Payment confirmation"),
template_name="payment_confirmation_step"
),
FACE_PHOTO_STEP: Step(
title=ugettext_lazy("Take photo"),
template_name="face_photo_step"
),
ID_PHOTO_STEP: Step(
title=ugettext_lazy("Take a photo of your ID"),
template_name="id_photo_step"
),
REVIEW_PHOTOS_STEP: Step(
title=ugettext_lazy("Review your info"),
template_name="review_photos_step"
),
ENROLLMENT_CONFIRMATION_STEP: Step(
title=ugettext_lazy("Enrollment confirmation"),
template_name="enrollment_confirmation_step"
),
}
# Messages
#
# Depending on how the user entered reached the page,
# we will display different text messaging.
# For example, we show users who are upgrading
# slightly different copy than users who are verifying
# for the first time.
#
FIRST_TIME_VERIFY_MSG = 'first-time-verify'
VERIFY_NOW_MSG = 'verify-now'
VERIFY_LATER_MSG = 'verify-later'
UPGRADE_MSG = 'upgrade'
PAYMENT_CONFIRMATION_MSG = 'payment-confirmation'
# Requirements
#
# These explain to the user what he or she
# will need to successfully pay and/or verify.
#
# These are determined by the steps displayed
# to the user; for example, if the user does not
# need to complete the verification steps,
# then the photo ID and webcam requirements are hidden.
#
ACCOUNT_ACTIVATION_REQ = "account-activation-required"
PHOTO_ID_REQ = "photo-id-required"
WEBCAM_REQ = "webcam-required"
STEP_REQUIREMENTS = {
ID_PHOTO_STEP: [PHOTO_ID_REQ, WEBCAM_REQ],
FACE_PHOTO_STEP: [WEBCAM_REQ],
}
@method_decorator(login_required)
def get(
self, request, course_id,
always_show_payment=False,
current_step=None,
message=FIRST_TIME_VERIFY_MSG
):
"""Render the pay/verify requirements page.
Arguments:
request (HttpRequest): The request object.
course_id (unicode): The ID of the course the user is trying
to enroll in.
Keyword Arguments:
always_show_payment (bool): If True, show the payment steps
even if the user has already paid. This is useful
for users returning to the flow after paying.
current_step (string): The current step in the flow.
message (string): The messaging to display.
Returns:
HttpResponse
Raises:
Http404: The course does not exist or does not
have a verified mode.
"""
# Parse the course key
# The URL regex should guarantee that the key format is valid.
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key)
# Verify that the course exists and has a verified mode
if course is None:
log.warn(u"No course specified for verification flow request.")
raise Http404
# Verify that the course has a verified mode
course_mode = CourseMode.verified_mode_for_course(course_key)
if course_mode is None:
log.warn(
u"No verified course mode found for course '{course_id}' for verification flow request"
.format(course_id=course_id)
)
raise Http404
log.info(
u"Entering verified workflow for user '{user}', course '{course_id}', with current step '{current_step}'."
.format(user=request.user, course_id=course_id, current_step=current_step)
)
# Check whether the user has verified, paid, and enrolled.
# A user is considered "paid" if he or she has an enrollment
# with a paid course mode (such as "verified").
# For this reason, every paid user is enrolled, but not
# every enrolled user is paid.
already_verified = self._check_already_verified(request.user)
already_paid, is_enrolled = self._check_enrollment(request.user, course_key)
# Redirect the user to a more appropriate page if the
# messaging won't make sense based on the user's
# enrollment / payment / verification status.
redirect_response = self._redirect_if_necessary(
message,
already_verified,
already_paid,
is_enrolled,
course_key
)
if redirect_response is not None:
return redirect_response
display_steps = self._display_steps(
always_show_payment,
already_verified,
already_paid
)
requirements = self._requirements(display_steps, request.user.is_active)
if current_step is None:
current_step = display_steps[0]['name']
# Allow the caller to skip the first page
# This is useful if we want the user to be able to
# use the "back" button to return to the previous step.
# This parameter should only work for known skip-able steps
if request.GET.get('skip-first-step') and current_step in self.SKIP_STEPS:
display_step_names = [step['name'] for step in display_steps]
current_step_idx = display_step_names.index(current_step)
if (current_step_idx + 1) < len(display_steps):
current_step = display_steps[current_step_idx + 1]['name']
courseware_url = ""
if not course.start or course.start < datetime.datetime.today().replace(tzinfo=UTC):
courseware_url = reverse(
'course_root',
kwargs={'course_id': unicode(course_key)}
)
full_name = (
request.user.profile.name
if request.user.profile.name
else ""
)
# If the user set a contribution amount on another page,
# use that amount to pre-fill the price selection form.
contribution_amount = request.session.get(
'donation_for_course', {}
).get(unicode(course_key), '')
# Remember whether the user is upgrading
# so we can fire an analytics event upon payment.
request.session['attempting_upgrade'] = (message == self.UPGRADE_MSG)
# Render the top-level page
context = {
'contribution_amount': contribution_amount,
'course': course,
'course_key': unicode(course_key),
'course_mode': course_mode,
'courseware_url': courseware_url,
'current_step': current_step,
'disable_courseware_js': True,
'display_steps': display_steps,
'is_active': json.dumps(request.user.is_active),
'message_key': message,
'platform_name': settings.PLATFORM_NAME,
'purchase_endpoint': get_purchase_endpoint(),
'requirements': requirements,
'user_full_name': full_name,
'verification_deadline': (
get_default_time_display(course_mode.expiration_datetime)
if course_mode.expiration_datetime else ""
),
}
return render_to_response("verify_student/pay_and_verify.html", context)
def _redirect_if_necessary(
self,
message,
already_verified,
already_paid,
is_enrolled,
course_key
):
"""Redirect the user to a more appropriate page if necessary.
In some cases, a user may visit this page with
verification / enrollment / payment state that
we don't anticipate. For example, a user may unenroll
from the course after paying for it, then visit the
"verify now" page to complete verification.
When this happens, we try to redirect the user to
the most appropriate page.
Arguments:
message (string): The messaging of the page. Should be a key
in `MESSAGES`.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
is_enrolled (bool): Whether the user has an active enrollment
in the course.
course_key (CourseKey): The key for the course.
Returns:
HttpResponse or None
"""
url = None
course_kwargs = {'course_id': unicode(course_key)}
if already_verified and already_paid:
# If they've already paid and verified, there's nothing else to do,
# so redirect them to the dashboard.
if message != self.PAYMENT_CONFIRMATION_MSG:
url = reverse('dashboard')
elif message in [self.VERIFY_NOW_MSG, self.VERIFY_LATER_MSG, self.PAYMENT_CONFIRMATION_MSG]:
if is_enrolled:
# If the user is already enrolled but hasn't yet paid,
# then the "upgrade" messaging is more appropriate.
if not already_paid:
url = reverse('verify_student_upgrade_and_verify', kwargs=course_kwargs)
else:
# If the user is NOT enrolled, then send him/her
# to the first time verification page.
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
elif message == self.UPGRADE_MSG:
if is_enrolled:
# If upgrading and we've paid but haven't verified,
# then the "verify later" messaging makes more sense.
if already_paid:
url = reverse('verify_student_verify_later', kwargs=course_kwargs)
else:
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
# Redirect if necessary, otherwise implicitly return None
if url is not None:
return redirect(url)
def _display_steps(self, always_show_payment, already_verified, already_paid):
"""Determine which steps to display to the user.
Includes all steps by default, but removes steps
if the user has already completed them.
Arguments:
always_show_payment (bool): If True, display the payment steps
even if the user has already paid.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
Returns:
list
"""
display_steps = self.ALL_STEPS
remove_steps = set()
if already_verified:
remove_steps |= set(self.VERIFICATION_STEPS)
if already_paid and not always_show_payment:
remove_steps |= set(self.PAYMENT_STEPS)
else:
# The "make payment" step doubles as an intro step,
# so if we're showing the payment step, hide the intro step.
remove_steps |= set([self.INTRO_STEP])
return [
{
'name': step,
'title': unicode(self.STEP_INFO[step].title),
'templateName': self.STEP_INFO[step].template_name
}
for step in display_steps
if step not in remove_steps
]
def _requirements(self, display_steps, is_active):
"""Determine which requirements to show the user.
For example, if the user needs to submit a photo
verification, tell the user that she will need
a photo ID and a webcam.
Arguments:
display_steps (list): The steps to display to the user.
is_active (bool): If False, adds a requirement to activate the user account.
Returns:
dict: Keys are requirement names, values are booleans
indicating whether to show the requirement.
"""
all_requirements = {
self.ACCOUNT_ACTIVATION_REQ: not is_active,
self.PHOTO_ID_REQ: False,
self.WEBCAM_REQ: False,
}
display_steps = set(step['name'] for step in display_steps)
for step, step_requirements in self.STEP_REQUIREMENTS.iteritems():
if step in display_steps:
for requirement in step_requirements:
all_requirements[requirement] = True
return all_requirements
def _check_already_verified(self, user):
"""Check whether the user has a valid or pending verification.
Note that this includes cases in which the user's verification
has not been accepted (either because it hasn't been processed,
or there was an error).
This should return True if the user has done their part:
submitted photos within the expiration period.
"""
return SoftwareSecurePhotoVerification.user_has_valid_or_pending(user)
def _check_enrollment(self, user, course_key):
"""Check whether the user has an active enrollment and has paid.
If a user is enrolled in a paid course mode, we assume
that the user has paid.
Arguments:
user (User): The user to check.
course_key (CourseKey): The key of the course to check.
Returns:
Tuple `(has_paid, is_active)` indicating whether the user
has paid and whether the user has an active account.
"""
enrollment_mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_key)
has_paid = False
if enrollment_mode is not None and is_active:
all_modes = CourseMode.modes_for_course_dict(course_key)
course_mode = all_modes.get(enrollment_mode)
has_paid = (course_mode and course_mode.min_price > 0)
return (has_paid, bool(is_active))
@require_POST
@login_required
def create_order(request):
"""
Submit PhotoVerification and create a new Order for this verified cert
"""
# Only submit photos if photo data is provided by the client.
# TODO (ECOM-188): Once the A/B test of decoupling verified / payment
# completes, we may be able to remove photo submission from this step
# entirely.
submit_photo = (
'face_image' in request.POST and
'photo_id_image' in request.POST
)
if (
submit_photo and not
SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user)
):
attempt = SoftwareSecurePhotoVerification(user=request.user)
try:
b64_face_image = request.POST['face_image'].split(",")[1]
b64_photo_id_image = request.POST['photo_id_image'].split(",")[1]
except IndexError:
log.error(u"Invalid image data during photo verification.")
context = {
'success': False,
}
return JsonResponse(context)
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.upload_photo_id_image(b64_photo_id_image.decode('base64'))
attempt.mark_ready()
attempt.save()
course_id = request.POST['course_id']
course_id = CourseKey.from_string(course_id)
donation_for_course = request.session.get('donation_for_course', {})
current_donation = donation_for_course.get(unicode(course_id), decimal.Decimal(0))
contribution = request.POST.get("contribution", donation_for_course.get(unicode(course_id), 0))
try:
amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN)
except decimal.InvalidOperation:
return HttpResponseBadRequest(_("Selected price is not valid number."))
if amount != current_donation:
donation_for_course[unicode(course_id)] = amount
request.session['donation_for_course'] = donation_for_course
# prefer professional mode over verified_mode
current_mode = CourseMode.verified_mode_for_course(course_id)
# make sure this course has a verified mode
if not current_mode:
log.warn(u"Verification requested for course {course_id} without a verified mode.".format(course_id=course_id))
return HttpResponseBadRequest(_("This course doesn't support verified certificates"))
if current_mode.slug == 'professional':
amount = current_mode.min_price
if amount < current_mode.min_price:
return HttpResponseBadRequest(_("No selected price or selected price is below minimum."))
# I know, we should check this is valid. All kinds of stuff missing here
cart = Order.get_cart_for_user(request.user)
cart.clear()
enrollment_mode = current_mode.slug
CertificateItem.add_to_order(cart, course_id, amount, enrollment_mode)
# Change the order's status so that we don't accidentally modify it later.
# We need to do this to ensure that the parameters we send to the payment system
# match what we store in the database.
# (Ordinarily we would do this client-side when the user submits the form, but since
# the JavaScript on this page does that immediately, we make the change here instead.
# This avoids a second AJAX call and some additional complication of the JavaScript.)
# If a user later re-enters the verification / payment flow, she will create a new order.
cart.start_purchase()
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
params = get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=[unicode(course_id), current_mode.slug]
)
params['success'] = True
return HttpResponse(json.dumps(params), content_type="text/json")
@require_POST
@login_required
def submit_photos_for_verification(request):
"""Submit a photo verification attempt.
Arguments:
request (HttpRequest): The request to submit photos.
Returns:
HttpResponse: 200 on success, 400 if there are errors.
"""
# Check the required parameters
missing_params = set(['face_image', 'photo_id_image']) - set(request.POST.keys())
if len(missing_params) > 0:
msg = _("Missing required parameters: {missing}").format(missing=", ".join(missing_params))
return HttpResponseBadRequest(msg)
# If the user already has valid or pending request, the UI will hide
# the verification steps. For this reason, we reject any requests
# for users that already have a valid or pending verification.
if SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user):
return HttpResponseBadRequest(_("You already have a valid or pending verification."))
username = request.user.username
# If the user wants to change his/her full name,
# then try to do that before creating the attempt.
if request.POST.get('full_name'):
try:
profile_api.update_profile(
username,
full_name=request.POST.get('full_name')
)
except profile_api.ProfileUserNotFound:
return HttpResponseBadRequest(_("No profile found for user"))
except profile_api.ProfileInvalidField:
msg = _(
"Name must be at least {min_length} characters long."
).format(min_length=profile_api.FULL_NAME_MIN_LENGTH)
return HttpResponseBadRequest(msg)
# Create the attempt
attempt = SoftwareSecurePhotoVerification(user=request.user)
try:
b64_face_image = request.POST['face_image'].split(",")[1]
b64_photo_id_image = request.POST['photo_id_image'].split(",")[1]
except IndexError:
msg = _("Image data is not valid.")
return HttpResponseBadRequest(msg)
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.upload_photo_id_image(b64_photo_id_image.decode('base64'))
attempt.mark_ready()
attempt.submit()
profile_dict = profile_api.profile_info(username)
if profile_dict:
# Send a confirmation email to the user
context = {
'full_name': profile_dict.get('full_name'),
'platform_name': settings.PLATFORM_NAME
}
subject = _("Verification photos received")
message = render_to_string('emails/photo_submission_confirmation.txt', context)
from_address = microsite.get_value('default_from_email', settings.DEFAULT_FROM_EMAIL)
to_address = profile_dict.get('email')
send_mail(subject, message, from_address, [to_address], fail_silently=False)
return HttpResponse(200)
@require_POST
@csrf_exempt # SS does its own message signing, and their API won't have a cookie value
def results_callback(request):
"""
Software Secure will call this callback to tell us whether a user is
verified to be who they said they are.
"""
body = request.body
try:
body_dict = json.loads(body)
except ValueError:
log.exception("Invalid JSON received from Software Secure:\n\n{}\n".format(body))
return HttpResponseBadRequest("Invalid JSON. Received:\n\n{}".format(body))
if not isinstance(body_dict, dict):
log.error("Reply from Software Secure is not a dict:\n\n{}\n".format(body))
return HttpResponseBadRequest("JSON should be dict. Received:\n\n{}".format(body))
headers = {
"Authorization": request.META.get("HTTP_AUTHORIZATION", ""),
"Date": request.META.get("HTTP_DATE", "")
}
sig_valid = ssencrypt.has_valid_signature(
"POST",
headers,
body_dict,
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"],
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
)
_response, access_key_and_sig = headers["Authorization"].split(" ")
access_key = access_key_and_sig.split(":")[0]
# This is what we should be doing...
#if not sig_valid:
# return HttpResponseBadRequest("Signature is invalid")
# This is what we're doing until we can figure out why we disagree on sigs
if access_key != settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]:
return HttpResponseBadRequest("Access key invalid")
receipt_id = body_dict.get("EdX-ID")
result = body_dict.get("Result")
reason = body_dict.get("Reason", "")
error_code = body_dict.get("MessageType", "")
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
log.error("Software Secure posted back for receipt_id {}, but not found".format(receipt_id))
return HttpResponseBadRequest("edX ID {} not found".format(receipt_id))
if result == "PASS":
log.debug("Approving verification for {}".format(receipt_id))
attempt.approve()
elif result == "FAIL":
log.debug("Denying verification for {}".format(receipt_id))
attempt.deny(json.dumps(reason), error_code=error_code)
elif result == "SYSTEM FAIL":
log.debug("System failure for {} -- resetting to must_retry".format(receipt_id))
attempt.system_error(json.dumps(reason), error_code=error_code)
log.error("Software Secure callback attempt for %s failed: %s", receipt_id, reason)
else:
log.error("Software Secure returned unknown result {}".format(result))
return HttpResponseBadRequest(
"Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL".format(result)
)
# If this is a reverification, log an event
if attempt.window:
course_id = attempt.window.course_id
course_enrollment = CourseEnrollment.get_or_create_enrollment(attempt.user, course_id)
course_enrollment.emit_event(EVENT_NAME_USER_REVERIFICATION_REVIEWED_BY_SOFTWARESECURE)
return HttpResponse("OK!")
@login_required
def show_requirements(request, course_id):
"""
Show the requirements necessary for the verification flow.
"""
# TODO: seems borked for professional; we're told we need to take photos even if there's a pending verification
course_id = CourseKey.from_string(course_id)
upgrade = request.GET.get('upgrade', False)
if CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == ('verified', True):
return redirect(reverse('dashboard'))
if SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user):
return redirect(
reverse(
'verify_student_verified',
kwargs={'course_id': course_id.to_deprecated_string()}
) + "?upgrade={}".format(upgrade)
)
upgrade = request.GET.get('upgrade', False)
course = modulestore().get_course(course_id)
modes_dict = CourseMode.modes_for_course_dict(course_id)
context = {
"course_id": course_id.to_deprecated_string(),
"course_modes_choose_url": reverse("course_modes_choose", kwargs={'course_id': course_id.to_deprecated_string()}),
"verify_student_url": reverse('verify_student_verify', kwargs={'course_id': course_id.to_deprecated_string()}),
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"is_not_active": not request.user.is_active,
"upgrade": upgrade == u'True',
"modes_dict": modes_dict,
}
return render_to_response("verify_student/show_requirements.html", context)
class ReverifyView(View):
"""
The main reverification view. Under similar constraints as the main verification view.
Has to perform these functions:
- take new face photo
- take new id photo
- submit photos to photo verification service
Does not need to be attached to a particular course.
Does not need to worry about pricing
"""
@method_decorator(login_required)
def get(self, request):
"""
display this view
"""
context = {
"user_full_name": request.user.profile.name,
"error": False,
}
return render_to_response("verify_student/photo_reverification.html", context)
@method_decorator(login_required)
def post(self, request):
"""
submits the reverification to SoftwareSecure
"""
try:
attempt = SoftwareSecurePhotoVerification(user=request.user)
b64_face_image = request.POST['face_image'].split(",")[1]
b64_photo_id_image = request.POST['photo_id_image'].split(",")[1]
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.upload_photo_id_image(b64_photo_id_image.decode('base64'))
attempt.mark_ready()
# save this attempt
attempt.save()
# then submit it across
attempt.submit()
return HttpResponseRedirect(reverse('verify_student_reverification_confirmation'))
except Exception:
log.exception(
"Could not submit verification attempt for user {}".format(request.user.id)
)
context = {
"user_full_name": request.user.profile.name,
"error": True,
}
return render_to_response("verify_student/photo_reverification.html", context)
class MidCourseReverifyView(View):
"""
The mid-course reverification view.
Needs to perform these functions:
- take new face photo
- retrieve the old id photo
- submit these photos to photo verification service
Does not need to worry about pricing
"""
@method_decorator(login_required)
def get(self, request, course_id):
"""
display this view
"""
course_id = CourseKey.from_string(course_id)
course = modulestore().get_course(course_id)
if course is None:
raise Http404
course_enrollment = CourseEnrollment.get_or_create_enrollment(request.user, course_id)
course_enrollment.update_enrollment(mode="verified")
course_enrollment.emit_event(EVENT_NAME_USER_ENTERED_MIDCOURSE_REVERIFY_VIEW)
context = {
"user_full_name": request.user.profile.name,
"error": False,
"course_id": course_id.to_deprecated_string(),
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"reverify": True,
}
return render_to_response("verify_student/midcourse_photo_reverification.html", context)
@method_decorator(login_required)
def post(self, request, course_id):
"""
submits the reverification to SoftwareSecure
"""
try:
now = datetime.datetime.now(UTC)
course_id = CourseKey.from_string(course_id)
window = MidcourseReverificationWindow.get_window(course_id, now)
if window is None:
raise WindowExpiredException
attempt = SoftwareSecurePhotoVerification(user=request.user, window=window)
b64_face_image = request.POST['face_image'].split(",")[1]
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.fetch_photo_id_image()
attempt.mark_ready()
attempt.save()
attempt.submit()
course_enrollment = CourseEnrollment.get_or_create_enrollment(request.user, course_id)
course_enrollment.update_enrollment(mode="verified")
course_enrollment.emit_event(EVENT_NAME_USER_SUBMITTED_MIDCOURSE_REVERIFY)
return HttpResponseRedirect(reverse('verify_student_midcourse_reverification_confirmation'))
except WindowExpiredException:
log.exception(
"User {} attempted to re-verify, but the window expired before the attempt".format(request.user.id)
)
return HttpResponseRedirect(reverse('verify_student_reverification_window_expired'))
except Exception:
log.exception(
"Could not submit verification attempt for user {}".format(request.user.id)
)
context = {
"user_full_name": request.user.profile.name,
"error": True,
}
return render_to_response("verify_student/midcourse_photo_reverification.html", context)
@login_required
def midcourse_reverify_dash(request):
"""
Shows the "course reverification dashboard", which displays the reverification status (must reverify,
pending, approved, failed, etc) of all courses in which a student has a verified enrollment.
"""
user = request.user
course_enrollment_pairs = []
for enrollment in CourseEnrollment.enrollments_for_user(user):
try:
course_enrollment_pairs.append((modulestore().get_course(enrollment.course_id), enrollment))
except ItemNotFoundError:
log.error("User {0} enrolled in non-existent course {1}".format(user.username, enrollment.course_id))
statuses = ["approved", "pending", "must_reverify", "denied"]
reverifications = reverification_info(course_enrollment_pairs, user, statuses)
context = {
"user_full_name": user.profile.name,
'reverifications': reverifications,
'referer': request.META.get('HTTP_REFERER'),
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
}
return render_to_response("verify_student/midcourse_reverify_dash.html", context)
@login_required
@require_POST
def toggle_failed_banner_off(request):
"""
Finds all denied midcourse reverifications for a user and permanently toggles
the "Reverification Failed" banner off for those verifications.
"""
user_id = request.user.id
SoftwareSecurePhotoVerification.display_off(user_id)
return HttpResponse('Success')
@login_required
def reverification_submission_confirmation(_request):
"""
Shows the user a confirmation page if the submission to SoftwareSecure was successful
"""
return render_to_response("verify_student/reverification_confirmation.html")
@login_required
def midcourse_reverification_confirmation(_request): # pylint: disable=invalid-name
"""
Shows the user a confirmation page if the submission to SoftwareSecure was successful
"""
return render_to_response("verify_student/midcourse_reverification_confirmation.html")
@login_required
def reverification_window_expired(_request):
"""
Displays an error page if a student tries to submit a reverification, but the window
for that reverification has already expired.
"""
# TODO need someone to review the copy for this template
return render_to_response("verify_student/reverification_window_expired.html")
| agpl-3.0 |
shahrzadmn/skia | tools/skp/webpages_playback.py | 38 | 22705 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Archives or replays webpages and creates SKPs in a Google Storage location.
To archive webpages and store SKP files (archives should be rarely updated):
cd skia
python tools/skp/webpages_playback.py --data_store=gs://rmistry --record \
--page_sets=all --skia_tools=/home/default/trunk/out/Debug/ \
--browser_executable=/tmp/chromium/out/Release/chrome
The above command uses Google Storage bucket 'rmistry' to download needed files.
To replay archived webpages and re-generate SKP files (should be run whenever
SkPicture.PICTURE_VERSION changes):
cd skia
python tools/skp/webpages_playback.py --data_store=gs://rmistry \
--page_sets=all --skia_tools=/home/default/trunk/out/Debug/ \
--browser_executable=/tmp/chromium/out/Release/chrome
Specify the --page_sets flag (default value is 'all') to pick a list of which
webpages should be archived and/or replayed. Eg:
--page_sets=tools/skp/page_sets/skia_yahooanswers_desktop.py,\
tools/skp/page_sets/skia_googlecalendar_nexus10.py
The --browser_executable flag should point to the browser binary you want to use
to capture archives and/or capture SKP files. Majority of the time it should be
a newly built chrome binary.
The --data_store flag controls where the needed artifacts, such as
credential files, are downloaded from. It also controls where the
generated artifacts, such as recorded webpages and resulting skp renderings,
are uploaded to. URLs with scheme 'gs://' use Google Storage. Otherwise
use local filesystem.
The --upload=True flag means generated artifacts will be
uploaded or copied to the location specified by --data_store. (default value is
False if not specified).
The --non-interactive flag controls whether the script will prompt the user
(default value is False if not specified).
The --skia_tools flag if specified will allow this script to run
debugger, render_pictures, and render_pdfs on the captured
SKP(s). The tools are run after all SKPs are succesfully captured to make sure
they can be added to the buildbots with no breakages.
"""
import glob
import optparse
import os
import posixpath
import shutil
import subprocess
import sys
import tempfile
import time
import traceback
sys.path.insert(0, os.getcwd())
from common.py.utils import gs_utils
from common.py.utils import shell_utils
ROOT_PLAYBACK_DIR_NAME = 'playback'
SKPICTURES_DIR_NAME = 'skps'
# Local archive and SKP directories.
LOCAL_PLAYBACK_ROOT_DIR = os.path.join(
tempfile.gettempdir(), ROOT_PLAYBACK_DIR_NAME)
LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'page_sets', 'data')
TMP_SKP_DIR = tempfile.mkdtemp()
# Location of the credentials.json file and the string that represents missing
# passwords.
CREDENTIALS_FILE_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'page_sets', 'data',
'credentials.json'
)
# Name of the SKP benchmark
SKP_BENCHMARK = 'skpicture_printer'
# The max base name length of Skp files.
MAX_SKP_BASE_NAME_LEN = 31
# Dictionary of device to platform prefixes for SKP files.
DEVICE_TO_PLATFORM_PREFIX = {
'desktop': 'desk',
'galaxynexus': 'mobi',
'nexus10': 'tabl'
}
# How many times the record_wpr binary should be retried.
RETRY_RECORD_WPR_COUNT = 5
# How many times the run_benchmark binary should be retried.
RETRY_RUN_MEASUREMENT_COUNT = 5
# Location of the credentials.json file in Google Storage.
CREDENTIALS_GS_PATH = '/playback/credentials/credentials.json'
X11_DISPLAY = os.getenv('DISPLAY', ':0')
GS_PREDEFINED_ACL = gs_utils.GSUtils.PredefinedACL.PRIVATE
GS_FINE_GRAINED_ACL_LIST = [
(gs_utils.GSUtils.IdType.GROUP_BY_DOMAIN, 'google.com',
gs_utils.GSUtils.Permission.READ),
]
# Path to Chromium's page sets.
CHROMIUM_PAGE_SETS_PATH = os.path.join('tools', 'perf', 'page_sets')
# Dictionary of supported Chromium page sets to their file prefixes.
CHROMIUM_PAGE_SETS_TO_PREFIX = {
'key_mobile_sites_smooth.py': 'keymobi',
'top_25_smooth.py': 'top25desk',
}
def remove_prefix(s, prefix):
if s.startswith(prefix):
return s[len(prefix):]
return s
class SkPicturePlayback(object):
"""Class that archives or replays webpages and creates SKPs."""
def __init__(self, parse_options):
"""Constructs a SkPicturePlayback BuildStep instance."""
assert parse_options.browser_executable, 'Must specify --browser_executable'
self._browser_executable = parse_options.browser_executable
self._browser_args = '--disable-setuid-sandbox'
if parse_options.browser_extra_args:
self._browser_args = '%s %s' % (
self._browser_args, parse_options.browser_extra_args)
self._chrome_page_sets_path = os.path.join(parse_options.chrome_src_path,
CHROMIUM_PAGE_SETS_PATH)
self._all_page_sets_specified = parse_options.page_sets == 'all'
self._page_sets = self._ParsePageSets(parse_options.page_sets)
self._record = parse_options.record
self._skia_tools = parse_options.skia_tools
self._non_interactive = parse_options.non_interactive
self._upload = parse_options.upload
self._skp_prefix = parse_options.skp_prefix
data_store_location = parse_options.data_store
if data_store_location.startswith(gs_utils.GS_PREFIX):
self.gs = GoogleStorageDataStore(data_store_location)
else:
self.gs = LocalFileSystemDataStore(data_store_location)
self._alternate_upload_dir = parse_options.alternate_upload_dir
self._telemetry_binaries_dir = os.path.join(parse_options.chrome_src_path,
'tools', 'perf')
self._local_skp_dir = os.path.join(
parse_options.output_dir, ROOT_PLAYBACK_DIR_NAME, SKPICTURES_DIR_NAME)
self._local_record_webpages_archive_dir = os.path.join(
parse_options.output_dir, ROOT_PLAYBACK_DIR_NAME, 'webpages_archive')
# List of SKP files generated by this script.
self._skp_files = []
def _ParsePageSets(self, page_sets):
if not page_sets:
raise ValueError('Must specify at least one page_set!')
elif self._all_page_sets_specified:
# Get everything from the page_sets directory.
page_sets_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'page_sets')
ps = [os.path.join(page_sets_dir, page_set)
for page_set in os.listdir(page_sets_dir)
if not os.path.isdir(os.path.join(page_sets_dir, page_set)) and
page_set.endswith('.py')]
chromium_ps = [
os.path.join(self._chrome_page_sets_path, cr_page_set)
for cr_page_set in CHROMIUM_PAGE_SETS_TO_PREFIX]
ps.extend(chromium_ps)
elif '*' in page_sets:
# Explode and return the glob.
ps = glob.glob(page_sets)
else:
ps = page_sets.split(',')
ps.sort()
return ps
def _IsChromiumPageSet(self, page_set):
"""Returns true if the specified page set is a Chromium page set."""
return page_set.startswith(self._chrome_page_sets_path)
def Run(self):
"""Run the SkPicturePlayback BuildStep."""
# Download the credentials file if it was not previously downloaded.
if not os.path.isfile(CREDENTIALS_FILE_PATH):
# Download the credentials.json file from Google Storage.
self.gs.download_file(CREDENTIALS_GS_PATH, CREDENTIALS_FILE_PATH)
if not os.path.isfile(CREDENTIALS_FILE_PATH):
print """\n\nCould not locate credentials file in the storage.
Please create a %s file that contains:
{
"google": {
"username": "google_testing_account_username",
"password": "google_testing_account_password"
},
"facebook": {
"username": "facebook_testing_account_username",
"password": "facebook_testing_account_password"
}
}\n\n""" % CREDENTIALS_FILE_PATH
raw_input("Please press a key when you are ready to proceed...")
# Delete any left over data files in the data directory.
for archive_file in glob.glob(
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
os.remove(archive_file)
# Delete the local root directory if it already exists.
if os.path.exists(LOCAL_PLAYBACK_ROOT_DIR):
shutil.rmtree(LOCAL_PLAYBACK_ROOT_DIR)
# Create the required local storage directories.
self._CreateLocalStorageDirs()
# Start the timer.
start_time = time.time()
# Loop through all page_sets.
for page_set in self._page_sets:
page_set_basename = os.path.basename(page_set).split('.')[0]
page_set_json_name = page_set_basename + '.json'
wpr_data_file = page_set.split(os.path.sep)[-1].split('.')[0] + '_000.wpr'
page_set_dir = os.path.dirname(page_set)
if self._IsChromiumPageSet(page_set):
print 'Using Chromium\'s captured archives for Chromium\'s page sets.'
elif self._record:
# Create an archive of the specified webpages if '--record=True' is
# specified.
record_wpr_cmd = (
'PYTHONPATH=%s:$PYTHONPATH' % page_set_dir,
'DISPLAY=%s' % X11_DISPLAY,
os.path.join(self._telemetry_binaries_dir, 'record_wpr'),
'--extra-browser-args="%s"' % self._browser_args,
'--browser=exact',
'--browser-executable=%s' % self._browser_executable,
'%s_page_set' % page_set_basename,
'--page-set-base-dir=%s' % page_set_dir
)
for _ in range(RETRY_RECORD_WPR_COUNT):
try:
shell_utils.run(' '.join(record_wpr_cmd), shell=True)
# Move over the created archive into the local webpages archive
# directory.
shutil.move(
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file),
self._local_record_webpages_archive_dir)
shutil.move(
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
page_set_json_name),
self._local_record_webpages_archive_dir)
# Break out of the retry loop since there were no errors.
break
except Exception:
# There was a failure continue with the loop.
traceback.print_exc()
else:
# If we get here then record_wpr did not succeed and thus did not
# break out of the loop.
raise Exception('record_wpr failed for page_set: %s' % page_set)
else:
# Get the webpages archive so that it can be replayed.
self._DownloadWebpagesArchive(wpr_data_file, page_set_json_name)
run_benchmark_cmd = (
'PYTHONPATH=%s:$PYTHONPATH' % page_set_dir,
'DISPLAY=%s' % X11_DISPLAY,
'timeout', '300',
os.path.join(self._telemetry_binaries_dir, 'run_benchmark'),
'--extra-browser-args="%s"' % self._browser_args,
'--browser=exact',
'--browser-executable=%s' % self._browser_executable,
SKP_BENCHMARK,
'--page-set-name=%s' % page_set_basename,
'--page-set-base-dir=%s' % page_set_dir,
'--skp-outdir=%s' % TMP_SKP_DIR,
'--also-run-disabled-tests'
)
for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
try:
print '\n\n=======Capturing SKP of %s=======\n\n' % page_set
shell_utils.run(' '.join(run_benchmark_cmd), shell=True)
except shell_utils.CommandFailedException:
# skpicture_printer sometimes fails with AssertionError but the
# captured SKP is still valid. This is a known issue.
pass
# Rename generated SKP files into more descriptive names.
try:
self._RenameSkpFiles(page_set)
# Break out of the retry loop since there were no errors.
break
except Exception:
# There was a failure continue with the loop.
traceback.print_exc()
print '\n\n=======Retrying %s=======\n\n' % page_set
time.sleep(10)
else:
# If we get here then run_benchmark did not succeed and thus did not
# break out of the loop.
raise Exception('run_benchmark failed for page_set: %s' % page_set)
print '\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
time.time() - start_time)
if self._skia_tools:
render_pictures_cmd = [
os.path.join(self._skia_tools, 'render_pictures'),
'-r', self._local_skp_dir
]
render_pdfs_cmd = [
os.path.join(self._skia_tools, 'render_pdfs'),
'-r', self._local_skp_dir
]
for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
print '\n\n=======Running %s=======' % ' '.join(tools_cmd)
proc = subprocess.Popen(tools_cmd)
(code, _) = shell_utils.log_process_after_completion(proc, echo=False)
if code != 0:
raise Exception('%s failed!' % ' '.join(tools_cmd))
if not self._non_interactive:
print '\n\n=======Running debugger======='
os.system('%s %s' % (os.path.join(self._skia_tools, 'debugger'),
self._local_skp_dir))
print '\n\n'
if self._upload:
print '\n\n=======Uploading to %s=======\n\n' % self.gs.target_type()
# Copy the directory structure in the root directory into Google Storage.
dest_dir_name = ROOT_PLAYBACK_DIR_NAME
if self._alternate_upload_dir:
dest_dir_name = self._alternate_upload_dir
self.gs.upload_dir_contents(
LOCAL_PLAYBACK_ROOT_DIR, dest_dir=dest_dir_name,
upload_if=gs_utils.GSUtils.UploadIf.IF_MODIFIED,
predefined_acl=GS_PREDEFINED_ACL,
fine_grained_acl_list=GS_FINE_GRAINED_ACL_LIST)
print '\n\n=======New SKPs have been uploaded to %s =======\n\n' % (
posixpath.join(self.gs.target_name(), dest_dir_name,
SKPICTURES_DIR_NAME))
else:
print '\n\n=======Not Uploading to %s=======\n\n' % self.gs.target_type()
print 'Generated resources are available in %s\n\n' % (
LOCAL_PLAYBACK_ROOT_DIR)
return 0
def _GetSkiaSkpFileName(self, page_set):
"""Returns the SKP file name for Skia page sets."""
# /path/to/skia_yahooanswers_desktop.py -> skia_yahooanswers_desktop.py
ps_filename = os.path.basename(page_set)
# skia_yahooanswers_desktop.py -> skia_yahooanswers_desktop
ps_basename, _ = os.path.splitext(ps_filename)
# skia_yahooanswers_desktop -> skia, yahooanswers, desktop
_, page_name, device = ps_basename.split('_')
basename = '%s_%s' % (DEVICE_TO_PLATFORM_PREFIX[device], page_name)
return basename[:MAX_SKP_BASE_NAME_LEN] + '.skp'
def _GetChromiumSkpFileName(self, page_set, site):
"""Returns the SKP file name for Chromium page sets."""
# /path/to/http___mobile_news_sandbox_pt0 -> http___mobile_news_sandbox_pt0
_, webpage = os.path.split(site)
# http___mobile_news_sandbox_pt0 -> mobile_news_sandbox_pt0
for prefix in ('http___', 'https___', 'www_'):
if webpage.startswith(prefix):
webpage = webpage[len(prefix):]
# /path/to/skia_yahooanswers_desktop.py -> skia_yahooanswers_desktop.py
ps_filename = os.path.basename(page_set)
# http___mobile_news_sandbox -> pagesetprefix_http___mobile_news_sandbox
basename = '%s_%s' % (CHROMIUM_PAGE_SETS_TO_PREFIX[ps_filename], webpage)
return basename[:MAX_SKP_BASE_NAME_LEN] + '.skp'
def _RenameSkpFiles(self, page_set):
"""Rename generated SKP files into more descriptive names.
Look into the subdirectory of TMP_SKP_DIR and find the most interesting
.skp in there to be this page_set's representative .skp.
"""
subdirs = glob.glob(os.path.join(TMP_SKP_DIR, '*'))
for site in subdirs:
if self._IsChromiumPageSet(page_set):
filename = self._GetChromiumSkpFileName(page_set, site)
else:
filename = self._GetSkiaSkpFileName(page_set)
filename = filename.lower()
if self._skp_prefix:
filename = '%s%s' % (self._skp_prefix, filename)
# We choose the largest .skp as the most likely to be interesting.
largest_skp = max(glob.glob(os.path.join(site, '*.skp')),
key=lambda path: os.stat(path).st_size)
dest = os.path.join(self._local_skp_dir, filename)
print 'Moving', largest_skp, 'to', dest
shutil.move(largest_skp, dest)
self._skp_files.append(filename)
shutil.rmtree(site)
def _CreateLocalStorageDirs(self):
"""Creates required local storage directories for this script."""
for d in (self._local_record_webpages_archive_dir,
self._local_skp_dir):
if os.path.exists(d):
shutil.rmtree(d)
os.makedirs(d)
def _DownloadWebpagesArchive(self, wpr_data_file, page_set_json_name):
"""Downloads the webpages archive and its required page set from GS."""
wpr_source = posixpath.join(ROOT_PLAYBACK_DIR_NAME, 'webpages_archive',
wpr_data_file)
page_set_source = posixpath.join(ROOT_PLAYBACK_DIR_NAME,
'webpages_archive',
page_set_json_name)
gs = self.gs
if (gs.does_storage_object_exist(wpr_source) and
gs.does_storage_object_exist(page_set_source)):
gs.download_file(wpr_source,
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
wpr_data_file))
gs.download_file(page_set_source,
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
page_set_json_name))
else:
raise Exception('%s and %s do not exist in %s!' % (gs.target_type(),
wpr_source, page_set_source))
class DataStore:
"""An abstract base class for uploading recordings to a data storage.
The interface emulates the google storage api."""
def target_name(self):
raise NotImplementedError()
def target_type(self):
raise NotImplementedError()
def does_storage_object_exist(self, *args):
raise NotImplementedError()
def download_file(self, *args):
raise NotImplementedError()
def upload_dir_contents(self, source_dir, **kwargs):
raise NotImplementedError()
class GoogleStorageDataStore(DataStore):
def __init__(self, data_store_url):
self._data_store_url = data_store_url
self._bucket = remove_prefix(self._data_store_url.lstrip(),
gs_utils.GS_PREFIX)
self.gs = gs_utils.GSUtils()
def target_name(self):
return self._data_store_url
def target_type(self):
return 'Google Storage'
def does_storage_object_exist(self, *args):
return self.gs.does_storage_object_exist(self._bucket, *args)
def download_file(self, *args):
self.gs.download_file(self._bucket, *args)
def upload_dir_contents(self, source_dir, **kwargs):
self.gs.upload_dir_contents(source_dir, self._bucket, **kwargs)
class LocalFileSystemDataStore(DataStore):
def __init__(self, data_store_location):
self._base_dir = data_store_location
def target_name(self):
return self._base_dir
def target_type(self):
return self._base_dir
def does_storage_object_exist(self, name, *args):
return os.path.isfile(os.path.join(self._base_dir, name))
def download_file(self, name, local_path, *args):
shutil.copyfile(os.path.join(self._base_dir, name), local_path)
def upload_dir_contents(self, source_dir, dest_dir, **kwargs):
def copytree(source_dir, dest_dir):
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
for item in os.listdir(source_dir):
source = os.path.join(source_dir, item)
dest = os.path.join(dest_dir, item)
if os.path.isdir(source):
copytree(source, dest)
else:
shutil.copy2(source, dest)
copytree(source_dir, os.path.join(self._base_dir, dest_dir))
if '__main__' == __name__:
option_parser = optparse.OptionParser()
option_parser.add_option(
'', '--page_sets',
help='Specifies the page sets to use to archive. Supports globs.',
default='all')
option_parser.add_option(
'', '--record', action='store_true',
help='Specifies whether a new website archive should be created.',
default=False)
option_parser.add_option(
'', '--skia_tools',
help=('Path to compiled Skia executable tools. '
'render_pictures/render_pdfs is run on the set '
'after all SKPs are captured. If the script is run without '
'--non-interactive then the debugger is also run at the end. Debug '
'builds are recommended because they seem to catch more failures '
'than Release builds.'),
default=None)
option_parser.add_option(
'', '--upload', action='store_true',
help=('Uploads to Google Storage or copies to local filesystem storage '
' if this is True.'),
default=False)
option_parser.add_option(
'', '--data_store',
help=('The location of the file storage to use to download and upload '
'files. Can be \'gs://<bucket>\' for Google Storage, or '
'a directory for local filesystem storage'),
default='gs://chromium-skia-gm')
option_parser.add_option(
'', '--alternate_upload_dir',
help= ('Uploads to a different directory in Google Storage or local '
'storage if this flag is specified'),
default=None)
option_parser.add_option(
'', '--output_dir',
help=('Temporary directory where SKPs and webpage archives will be '
'outputted to.'),
default=tempfile.gettempdir())
option_parser.add_option(
'', '--browser_executable',
help='The exact browser executable to run.',
default=None)
option_parser.add_option(
'', '--browser_extra_args',
help='Additional arguments to pass to the browser.',
default=None)
option_parser.add_option(
'', '--chrome_src_path',
help='Path to the chromium src directory.',
default=None)
option_parser.add_option(
'', '--non-interactive', action='store_true',
help='Runs the script without any prompts. If this flag is specified and '
'--skia_tools is specified then the debugger is not run.',
default=False)
option_parser.add_option(
'', '--skp_prefix',
help='Prefix to add to the names of generated SKPs.',
default=None)
options, unused_args = option_parser.parse_args()
playback = SkPicturePlayback(options)
sys.exit(playback.Run())
| apache-2.0 |
rajarahulray/iDetector | tests_and_ References/iDetector_GUI_exp.py | 1 | 11602 | from tkinter import Tk, Text, Button, Label, filedialog, messagebox, Menu, StringVar, Scrollbar,\
LEFT, RIGHT, Frame, PhotoImage,Canvas, HORIZONTAL, VERTICAL, Toplevel
from clarifai.rest import ClarifaiApp, Image as ClImage, Video as ClVid
from timeit import default_timer
from terminaltables import DoubleTable
from PIL import Image, ImageTk
#common settings...
fil_img_nam = " ";
fil_vid_nam = " ";
dir_int = "/home";
bgcolor = "blue";
model = ' ';
'''__________________________________________All_Functions_______________________________________...'''
#Settings Menu function....
def stg():
print("In Settings Menu...");
#Help Menu Function.....
def hlp():
print("In help Menu..");
#About Menu Function.....
def abt():
print("In About menu");
#Making conection to Clarifai...
def mak_con():
global model;
#creating instance of ClarifaiApp() here because it is taking time to load thereby making the GUI to load very late..
srt_tim = default_timer();
try:
app = ClarifaiApp()
model = app.models.get('general-v1.3');
clf_con_btn.config(bg = 'green');
clf_con_txt.set('Connected');
messagebox.showinfo('Connection Status', 'Connection Established.\nTime Taken : %2f sec.'%(default_timer() - srt_tim));
except Exception as e:
messagebox.showerror('Connection Status', str(e));
#Image Information Function...
def img_inf():
global fil_img_nam;
if fil_img_nam != ' ':
root=Toplevel();
try:
print("Image Name is:: {}".format(fil_img_nam));
frame=Frame(master = root,width=500,height=100)
frame.grid(row=0,column=0)
#img = PhotoImage(file = '/home/raja/Pictures/Python_vs._others.png');
image = Image.open(fil_img_nam);
photo = ImageTk.PhotoImage(image)
canvas=Canvas(frame,bg='violet',width=800,height=500,scrollregion=(0,0,900,800));
canvas.create_image(300, 300, image = photo)
hbar=Scrollbar(frame,orient=HORIZONTAL)
hbar.grid(row = 1, column = 0, sticky = 'ew');
hbar.config(command=canvas.xview)
vbar=Scrollbar(frame,orient=VERTICAL)
vbar.grid(row = 0, column = 1, sticky = 'ns');
vbar.config(command=canvas.yview)
##canvas.config(width=300,height=300)
canvas.config(xscrollcommand = hbar.set, yscrollcommand = vbar.set)
canvas.grid(row = 0, column = 0, sticky = 'nsew');
except Exception as e:
print(str(e));
messagebox.showerror('I/O Error',str(e));
root.mainloop();
else:
messagebox.showerror('I/O Error', 'No file selected..');
#Video Analysis.
def vid_anl():
global fil_vid_nam;
try:
global model;
vid_fil = ClVid(file_obj=open(fil_vid_nam, 'rb'))
#clarifai returns dictionary by default....
pre = model.predict([vid_fil]);
#inserting data into the textbox from dictionary returned from clarifai...
pre_vid_inf.config(state = 'normal');
pre_vid_inf.delete("1.0", "end-1c");
for i in range(len(pre['outputs'][0]['data']['frames'])):
for j in range(len(pre['outputs'][0]['data']['frames'][i]['data']['concepts'])):
text = "{}: Name: {} \n Value: {} \n".format(j+1, pre['outputs'][0]['data']['frames'][i]['data']['concepts'][j]['name'],\
pre['outputs'][0]['data']['frames'][i]['data']['concepts'][j]['value']);
pre_vid_inf.insert("insert", text);
print(i, j);
##pre_vid_inf.insert('insert', pre['outputs'][0]['data']['frames'][0]['data']['concepts'][0]);
pre_vid_inf.config(state = 'disabled');
print("Video Analysis Complete");
except Exception as e:
print(str(e));
if str(e) == "'str' object has no attribute 'predict'":
messagebox.showerror('I/O Error', 'Please Connect to Clarifai');
else:
messagebox.showerror('I/O Error', str(e));
#Image Analysis....
def img_anl():
global fil_img_nam;
try:
global model;
img_fil = ClImage(file_obj=open(fil_img_nam, 'rb'))
#clarifai returns dictionary by default....
pre = model.predict([img_fil]);
#inserting data into the textbox...
pre_img_inf.config(state = 'normal');
pre_img_inf.delete("1.0", "end-1c");
tab = [['Sr.No.', 'Category', 'Prediction Value']];
for i in range(len(pre['outputs'][0]['data']['concepts'])):
tab.append([i+1, pre['outputs'][0]['data']['concepts'][i]['name'] ,pre['outputs'][0]['data']['concepts'][i]['value']]);
tbl = DoubleTable(tab);
print(tbl.table);
pre_img_inf.insert("insert", tbl.table);
pre_img_inf.config(state = 'disabled');
print("Image Analysis Complete");
except Exception as e:
print(str(e));
if str(e) == "'str' object has no attribute 'predict'":
messagebox.showerror('I/O Error', 'Please Connect to Clarifai');
else:
messagebox.showerror('I/O Error', str(e));
#function to open an image through file_dialog Box..
def img_bwr(file_type):
global fil_img_nam;
global fil_vid_nam;
global dir_int;
#Distinguishing FileDailog for image and Videofiles...
if file_type is 'img':
file = filedialog.askopenfile(initialdir = dir_int, title = 'Select Files...',filetypes = (("jpeg files","*.jpg"),("PNG files","*.png"), ("all files","*.*")));
print("File Opened is : {}".format(file));
fil_img_nam = str(file);
#extracting filename from askopenfile object...
fil_img_nam = fil_img_nam[fil_img_nam.find('name') + 6 : fil_img_nam.find('mode') - 2];
print('fil_img_nam = ',fil_img_nam);
#preserving browsed directory...
for i in range(len(fil_img_nam)-1, 0, -1):
if fil_img_nam[i] == '/':
print(i);
dir_int = fil_img_nam[:i];
break;
else:
file = filedialog.askopenfile(initialdir = dir_int, title = 'Select Files...',filetypes = (("Mp4 files","*.mp4"), ("all files","*.*")));
print("File Opened is : {}".format(file));
fil_vid_nam = str(file);
fil_vid_nam = fil_vid_nam[fil_vid_nam.find('name') + 6 : fil_vid_nam.find('mode') - 2];
print('fil_vid_nam = ',fil_vid_nam);
for i in range(len(fil_vid_nam)-1, 0, -1):
if fil_vid_nam[i] == '/':
dir_int = fil_vid_nam[:i];
break;
'''__________________________Root_window...________________________________________'''
#root..and its initial settings....
root = Tk();
root.title('iDetector');
root.config(background = bgcolor);
#root.geometry("897x650");
##root.resizable(0,0);
#frames.........
frm_int = Frame(root, background = bgcolor);
frm_int.pack()
frm_anl_box = Frame(root, background = bgcolor);
frm_anl_box.pack();
frm_con = Frame(root, background = bgcolor);
frm_con.pack();
'''_________________________________________________MenuBar_______________________'''
#Menubar instance...
mnu_bar = Menu(root);
#Settings_Menu:
set_mnu = Menu(mnu_bar, tearoff = 1);
set_mnu.add_command(label = "Command_1.1", command = stg);
mnu_bar.add_cascade(label = "Settings", menu = set_mnu);
#Help_Menu:
hlp_mnu = Menu(mnu_bar, tearoff = 1);
hlp_mnu.add_command(label = 'command_2.1', command = hlp);
mnu_bar.add_cascade(label = 'Help', menu = hlp_mnu);
#About_Menu:
abt_mnu = Menu(mnu_bar, tearoff = 1);
abt_mnu.add_command(label = 'command_3.1', command = abt);
mnu_bar.add_cascade(label = 'About', menu = abt_mnu);
#Packing menubar on root..
root.config(menu = mnu_bar);
'''_________________________________________________Other_Widgets_used..._________________'''
'''______________________________for_image_analysis______________'''
#Universal Label...
uni_lbl = Label(frm_int, text = "iDetector", background = bgcolor, font = 'Lucinda').grid(row = 1, column = 2, sticky = 'ew');
uni_lbl = Label(frm_int, text = "Detect Information from Images and Videos", background = bgcolor, font = 'Lucinda').grid(row = 2, column = 2, sticky = 'ew');
frm_int.grid_rowconfigure(0, weight = 1);
frm_int.grid_columnconfigure(0, weight = 1);
frm_int.grid_rowconfigure(4, weight = 2);
frm_int.grid_columnconfigure(4, weight = 1);
#uni_lbl = Label(root, text = 'Upload Image', background = bgcolor).place(x = x_cor + 50, y = y_cor + 65);
uni_lbl = Label(frm_int, background = bgcolor, font = 'Lucinda').grid(row = 3, column = 3, sticky = 'ew');
#Button to fetch image....
img_btn = Button(frm_anl_box, text = "Browse Image", command = lambda: img_bwr('img'), bg = 'violet', ).grid(row = 0, column = 0, sticky = 'ew');
#Button to fetch video....
vid_btn = Button(frm_anl_box, text = "Browse Video", command = lambda: img_bwr('vid'), bg = 'violet').grid(row = 0, column = 3, sticky = 'ew');
#Text box to show image prediction info..
pre_img_inf = Text(frm_anl_box,)
pre_img_inf.grid(row = 1, column = 0, sticky = 'nsew');
#Scrollbar for image details text box....
img_srl_bar = Scrollbar(frm_anl_box, command = pre_img_inf.yview);
img_srl_bar.grid(row = 1, column = 1, sticky = 'nsew');
pre_img_inf.config(yscrollcommand = img_srl_bar.set);
pre_img_inf.config(state = 'disabled');
#Empty Lable to seprate two Text boxes..
uni_lbl = Label(frm_anl_box, width = 10, background = bgcolor, font = 'Lucinda').grid(row = 0, column = 2, sticky = 'ns');
#Text box to show image prediction info..
pre_vid_inf = Text(frm_anl_box,)
pre_vid_inf.config(state = 'disabled');
pre_vid_inf.grid(row = 1, column = 3, sticky = 'ew');
#Scrollbar for video details text box....
vid_srl_bar = Scrollbar(frm_anl_box, command = pre_vid_inf.yview);
vid_srl_bar.grid(row = 1, column = 4, sticky = 'nsew');
pre_vid_inf.config(yscrollcommand = vid_srl_bar.set);
pre_vid_inf.config(state = 'disabled');
'''______________________________Buttons_for_Image_and_Video_Analysis___________________________________________________'''
#Button for send request and analyze an image...
alz_img_btn = Button(frm_anl_box, text = 'Ananlyze Image', command = img_anl, bg = 'light green').grid(row = 2, column = 0, sticky = 'ew');
#Button for send request and analyze an image...
alz_vid_btn = Button(frm_anl_box, text = 'Ananlyze Video', command = vid_anl, bg = 'light green').grid(row = 2, column = 3, sticky = 'ew');
'''_____________________________Info._Buttons_image_and_video____________________________________'''
#Button for showing image information.......
shw_img_inf = Button(frm_anl_box, text = 'Show Image with Analysis', bg = 'yellow', command = img_inf).grid(row = 3, column = 0, sticky = 'ew');
#Button for showing video information.......
shw_vid_inf = Button(frm_anl_box, text = 'Show Frames with Aanalysis', bg = 'yellow').grid(row = 3, column = 3, sticky = 'ew');
##print(root.children);
##print(root._windowingsystem);
#Empty Labels to create gaps between connection button and text_box...
for i in range(2):
uni_lbl = Label(frm_con, background = bgcolor, ).grid(row = i, column = 0, sticky = 'ew');
#Button to make connection through Clarifai API client....;
clf_con_txt = StringVar()
clf_con_txt.set('Connect To Clarifai');
clf_con_btn = Button(frm_con, textvariable = clf_con_txt, command = mak_con, bg = 'red')
clf_con_btn.grid(row = 4, column = 1, sticky = 'nsew');
root.mainloop();
| mit |
buzzfeed/facepy | tests/test_signed_request.py | 6 | 5969 | """Tests for the ``signed_request`` module."""
from datetime import datetime, timedelta
from nose.tools import *
from facepy import SignedRequest
TEST_ACCESS_TOKEN = '181259711925270|1570a553ad6605705d1b7a5f.1-499729129|8XqMRhCWDKtpG-i_zRkHBDSsqqk'
TEST_SIGNED_REQUEST = u'' \
'mnrG8Wc9CH_rh-GCqq97GFAPOh6AY7cMO8IYVKb6Pa4.eyJhbGdvcml0aG0iOi' \
'JITUFDLVNIQTI1NiIsImV4cGlyZXMiOjAsImlzc3VlZF9hdCI6MTMwNjE3OTkw' \
'NCwib2F1dGhfdG9rZW4iOiIxODEyNTk3MTE5MjUyNzB8MTU3MGE1NTNhZDY2MD' \
'U3MDVkMWI3YTVmLjEtNDk5NzI5MTI5fDhYcU1SaENXREt0cEctaV96UmtIQkRT' \
'c3FxayIsInVzZXIiOnsiY291bnRyeSI6Im5vIiwibG9jYWxlIjoiZW5fVVMiLC' \
'JhZ2UiOnsibWluIjoyMX19LCJ1c2VyX2lkIjoiNDk5NzI5MTI5In0'
TEST_SIGNED_REQUEST_UNKNOWN_ALGORITHM = u'' \
'HjPZBDNttKrX_DBxH-fD78wmqP5O7eDcvjE9ToayKb0=.eyJ1c2VyX2lkIjoiN' \
'Dk5NzI5MTI5IiwiYWxnb3JpdGhtIjoiVU5LTk9XTl9BTEdPUklUSE0iLCJleHB' \
'pcmVzIjowLCJvYXV0aF90b2tlbiI6IjE4MTI1OTcxMTkyNTI3MHwxNTcwYTU1M' \
'2FkNjYwNTcwNWQxYjdhNWYuMS00OTk3MjkxMjl8OFhxTVJoQ1dES3RwRy1pX3p' \
'Sa0hCRFNzcXFrIiwidXNlciI6eyJsb2NhbGUiOiJlbl9VUyIsImNvdW50cnkiO' \
'iJubyIsImFnZSI6eyJtYXgiOjk5LCJtaW4iOjIxfX0sImlzc3VlZF9hdCI6MTM' \
'wNjE3OTkwNH0='
TEST_SIGNED_REQUEST_MISSING_PAGE_DATA = u'' \
'9B19RL7tj3nvf_SA8_PSFxTZxc7xA3LEjl2ww-OGRlk=.eyJ1c2VyX2lkIjoiN' \
'Dk5NzI5MTI5IiwiYWxnb3JpdGhtIjoiSE1BQy1TSEEyNTYiLCJleHBpcmVzIjo' \
'wLCJvYXV0aF90b2tlbiI6IjE4MTI1OTcxMTkyNTI3MHwxNTcwYTU1M2FkNjYwN' \
'TcwNWQxYjdhNWYuMS00OTk3MjkxMjl8OFhxTVJoQ1dES3RwRy1pX3pSa0hCRFN' \
'zcXFrIiwidXNlciI6eyJsb2NhbGUiOiJlbl9VUyIsImNvdW50cnkiOiJubyIsI' \
'mFnZSI6eyJtYXgiOjk5LCJtaW4iOjIxfX0sImlzc3VlZF9hdCI6MTMwNjE3OTk' \
'wNCwicGFnZSI6e319'
TEST_FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b'
def test_parse_signed_request():
signed_request = SignedRequest.parse(
signed_request=TEST_SIGNED_REQUEST,
application_secret_key=TEST_FACEBOOK_APPLICATION_SECRET_KEY
)
assert signed_request == {
'user_id': '499729129',
'algorithm': 'HMAC-SHA256',
'expires': 0,
'oauth_token': '181259711925270|1570a553ad6605705d1b7a5f.1-499729129|8XqMRhCWDKtpG-i_zRkHBDSsqqk',
'user': {
'locale': 'en_US',
'country': 'no',
'age': {'min': 21}
},
'issued_at': 1306179904
}
def test_parse_invalid_signed_request():
assert_raises(
SignedRequest.Error,
SignedRequest,
signed_request="<invalid signed request>",
application_secret_key=TEST_FACEBOOK_APPLICATION_SECRET_KEY
)
def test_initialize_signed_request():
signed_request = SignedRequest(
signed_request=TEST_SIGNED_REQUEST,
application_secret_key=TEST_FACEBOOK_APPLICATION_SECRET_KEY
)
assert signed_request.user.id == '499729129'
assert signed_request.user.oauth_token.token == TEST_ACCESS_TOKEN
assert signed_request.user.oauth_token.expires_at is None
assert signed_request.raw == {
'user_id': '499729129',
'algorithm': 'HMAC-SHA256',
'expires': 0,
'oauth_token': '181259711925270|1570a553ad6605705d1b7a5f.1-499729129|8XqMRhCWDKtpG-i_zRkHBDSsqqk',
'user': {
'locale': 'en_US',
'country': 'no',
'age': {'min': 21}
},
'issued_at': 1306179904
}
def test_signed_request_missing_page_data():
try:
SignedRequest(TEST_SIGNED_REQUEST_MISSING_PAGE_DATA, TEST_FACEBOOK_APPLICATION_SECRET_KEY)
except KeyError:
raise AssertionError('Missing page data in signed request')
def test_signed_request_page_url():
page = SignedRequest.Page(id=1)
assert page.url == 'http://facebook.com/1'
def test_signed_request_user_profile_url():
user = SignedRequest.User(id=1)
assert user.profile_url == 'http://facebook.com/1'
def test_signed_request_user_has_authorized_application():
oauth_token = SignedRequest.User.OAuthToken(
token='<token>',
issued_at=datetime.now(),
expires_at=None
)
user = SignedRequest.User(id=1, oauth_token=oauth_token)
assert user.has_authorized_application is True
user = SignedRequest.User(id=1, oauth_token=None)
assert user.has_authorized_application is False
def test_signed_request_user_oauth_token_has_expired():
today = datetime.now()
yesterday = today - timedelta(days=1)
tomorrow = today + timedelta(days=1)
oauth_token = SignedRequest.User.OAuthToken(
token='<token>',
issued_at=yesterday,
expires_at=None,
)
assert oauth_token.has_expired is False
oauth_token = SignedRequest.User.OAuthToken(
token='<token>',
issued_at=yesterday,
expires_at=tomorrow
)
assert oauth_token.has_expired is False
oauth_token = SignedRequest.User.OAuthToken(
token='<token>',
issued_at=yesterday,
expires_at=yesterday
)
assert oauth_token.has_expired is True
def test_generate_signed_request():
signed_request = SignedRequest(
signed_request=TEST_SIGNED_REQUEST,
application_secret_key=TEST_FACEBOOK_APPLICATION_SECRET_KEY
)
signed_request = signed_request.generate()
def test_parse_signed_request_unknown_algorithm():
assert_raises(
SignedRequest.Error,
SignedRequest.parse,
signed_request=TEST_SIGNED_REQUEST_UNKNOWN_ALGORITHM,
application_secret_key=TEST_FACEBOOK_APPLICATION_SECRET_KEY
)
def test_parse_signed_request_incorrect_signature():
encoded_signature, _ = (str(string) for string in TEST_SIGNED_REQUEST_UNKNOWN_ALGORITHM.split('.', 2))
_, encoded_payload = (str(string) for string in TEST_SIGNED_REQUEST.split('.', 2))
assert_raises(
SignedRequest.Error,
SignedRequest.parse,
signed_request=u"%s.%s" % (encoded_signature, encoded_payload),
application_secret_key=TEST_FACEBOOK_APPLICATION_SECRET_KEY
)
| mit |
daniel-leschkowski/generateDSv2 | django/gends_generate_django.py | 10 | 3748 | #!/usr/bin/env python
"""
Synopsis:
Generate Django model and form definitions.
Write to forms.py and models.py.
Usage:
python gen_model.py [options]
Options:
-f, --force
Overwrite models.py and forms.py without asking.
-h, --help
Show this help message.
"""
import sys
import os
import getopt
import importlib
#import nexmllib as supermod
#
# Globals
#
supermod = None
#
# Classes
#
class ProgramOptions(object):
def get_force_(self):
return self.force_
def set_force_(self, force):
self.force_ = force
force = property(get_force_, set_force_)
class Writer(object):
def __init__(self, outfilename, stdout_also=False):
self.outfilename = outfilename
self.outfile = open(outfilename, 'w')
self.stdout_also = stdout_also
self.line_count = 0
def get_count(self):
return self.line_count
def write(self, content):
self.outfile.write(content)
if self.stdout_also:
sys.stdout.write(content)
count = content.count('\n')
self.line_count += count
def close(self):
self.outfile.close()
#
# Functions
#
def generate_model(options, module_name):
global supermod
supermod = importlib.import_module(module_name)
models_file_name = 'models.py'
forms_file_name = 'forms.py'
admin_file_name = 'admin.py'
if ( (os.path.exists(models_file_name) or
os.path.exists(forms_file_name) or
os.path.exists(admin_file_name)
)
and not options.force):
sys.stderr.write('\nmodels.py or forms.py or admin.py exists. Use -f/--force to overwrite.\n\n')
sys.exit(1)
globals_dict = globals()
models_writer = Writer(models_file_name)
forms_writer = Writer(forms_file_name)
admin_writer = Writer(admin_file_name)
wrtmodels = models_writer.write
wrtforms = forms_writer.write
wrtadmin = admin_writer.write
wrtmodels('from django.db import models\n\n')
wrtforms('from django import forms\n\n')
for class_name in supermod.__all__:
if hasattr(supermod, class_name):
cls = getattr(supermod, class_name)
cls.generate_model_(wrtmodels, wrtforms)
else:
sys.stderr.write('class %s not defined\n' % (class_name, ))
wrtadmin('from django.contrib import admin\n')
wrtadmin('from models import \\\n')
first_time = True
for class_name in supermod.__all__:
if first_time:
wrtadmin(' %s_model' % (class_name, ))
first_time = False
else:
wrtadmin(', \\\n %s_model' % (class_name, ))
wrtadmin('\n\n')
for class_name in supermod.__all__:
wrtadmin('admin.site.register(%s_model)\n' % (class_name, ))
wrtadmin('\n')
models_writer.close()
forms_writer.close()
admin_writer.close()
print 'Wrote %d lines to models.py' % (models_writer.get_count(), )
print 'Wrote %d lines to forms.py' % (forms_writer.get_count(), )
print 'Wrote %d lines to admin.py' % (admin_writer.get_count(), )
USAGE_TEXT = __doc__
def usage():
print USAGE_TEXT
sys.exit(1)
def main():
args = sys.argv[1:]
try:
opts, args = getopt.getopt(args, 'hfs:', ['help', 'force',
'suffix=', ])
except:
usage()
options = ProgramOptions()
options.force = False
for opt, val in opts:
if opt in ('-h', '--help'):
usage()
elif opt in ('-f', '--force'):
options.force = True
if len(args) != 1:
usage()
module_name = args[0]
generate_model(options, module_name)
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
| mit |
cs2c-zhangchao/nkwin1.0-anaconda | pyanaconda/ui/gui/spokes/network.py | 1 | 63351 | # Network configuration spoke classes
#
# Copyright (C) 2011 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Radek Vykydal <rvykydal@redhat.com>
#
# TODO:
# - move callback connection to initialize?
# - Automatically reconnecting wifi after failure
# https://bugzilla.redhat.com/show_bug.cgi?id=712778#c1
# - callback on NM_CLIENT_ACTIVE_CONNECTIONS
# - support connection to hidden network (ap-other)
# - NMClient.CLIENT_WIRELESS_ENABLED callback (hw switch?) - test
# - nm-c-e run: blocking? logging?
from gi.repository import Gtk, AnacondaWidgets
from pyanaconda import flags
from pyanaconda.i18n import _, N_, P_
from pyanaconda import constants
from pyanaconda.ui.communication import hubQ
from pyanaconda.ui.gui import GUIObject
from pyanaconda.ui.gui.spokes import NormalSpoke, StandaloneSpoke
from pyanaconda.ui.gui.categories.software import SoftwareCategory
from pyanaconda.ui.gui.hubs.summary import SummaryHub
from pyanaconda.ui.gui.utils import gtk_call_once, enlightbox
from pyanaconda import network
from pyanaconda.nm import nm_activated_devices, nm_device_setting_value
from gi.repository import GLib, GObject, Pango, Gio, NetworkManager, NMClient
import dbus
import dbus.service
import socket
import subprocess
import struct
import time
import string
from dbus.mainloop.glib import DBusGMainLoop
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
import ctypes
ctypes.cdll.LoadLibrary("libnm-util.so.2")
nm_utils = ctypes.CDLL("libnm-util.so.2")
import logging
log = logging.getLogger("anaconda")
# These are required for dbus API use we need because of
# NM_GI_BUGS: 767998, 773678
NM_SERVICE = "org.freedesktop.NetworkManager"
NM_802_11_AP_FLAGS_PRIVACY = 0x1
NM_802_11_AP_SEC_NONE = 0x0
NM_802_11_AP_SEC_KEY_MGMT_802_1X = 0x200
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
SECRET_AGENT_IFACE = 'org.freedesktop.NetworkManager.SecretAgent'
AGENT_MANAGER_IFACE = 'org.freedesktop.NetworkManager.AgentManager'
AGENT_MANAGER_PATH = "/org/freedesktop/NetworkManager/AgentManager"
def getNMObjProperty(object, nm_iface_suffix, property):
props_iface = dbus.Interface(object, DBUS_PROPS_IFACE)
return props_iface.Get("org.freedesktop.NetworkManager"+nm_iface_suffix,
property)
DEVICES_COLUMN_TITLE = 2
DEVICES_COLUMN_OBJECT = 3
def localized_string_of_device_state(device, state):
str = _("Status unknown (missing)")
if state == NetworkManager.DeviceState.UNKNOWN:
str = _("Status unknown")
elif state == NetworkManager.DeviceState.UNMANAGED:
str = _("Unmanaged")
elif state == NetworkManager.DeviceState.UNAVAILABLE:
if device.get_firmware_missing():
str = _("Firmware missing")
elif (device.get_device_type() == NetworkManager.DeviceType.ETHERNET
and not device.get_carrier()):
str = _("Cable unplugged")
else:
str = _("Unavailable")
elif state == NetworkManager.DeviceState.DISCONNECTED:
str = _("Disconnected")
elif state in (NetworkManager.DeviceState.PREPARE,
NetworkManager.DeviceState.CONFIG,
NetworkManager.DeviceState.IP_CONFIG,
NetworkManager.DeviceState.IP_CHECK):
str = _("Connecting")
elif state == NetworkManager.DeviceState.NEED_AUTH:
str = _("Authentication required")
elif state == NetworkManager.DeviceState.ACTIVATED:
str = _("Connected")
elif state == NetworkManager.DeviceState.DEACTIVATING:
str = _("Disconnecting")
elif state == NetworkManager.DeviceState.FAILED:
str = _("Connection failed")
return str
configuration_of_disconnected_devices_allowed = True
# it is not in gnome-control-center but it makes sense
# for installer
# https://bugzilla.redhat.com/show_bug.cgi?id=704119
__all__ = ["NetworkSpoke", "NetworkStandaloneSpoke"]
class CellRendererSignal(Gtk.CellRendererPixbuf):
__gtype_name__ = "CellRendererSignal"
__gproperties__ = {
"signal": (GObject.TYPE_UINT,
"Signal", "Signal",
0, GObject.G_MAXUINT, 0,
GObject.PARAM_READWRITE),
}
def __init__(self):
Gtk.CellRendererPixbuf.__init__(self)
self.signal = 0
def do_get_property(self, property):
if property.name == 'signal':
return self.signal
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'signal':
self.signal = value
self._set_icon_name(value)
else:
raise AttributeError, 'unknown property %s' % property.name
def _set_icon_name(self, value):
if value == 0:
self.set_property("gicon", None)
if value < 20:
icon_name = "network-wireless-signal-none-symbolic"
elif value < 40:
icon_name = "network-wireless-signal-weak-symbolic"
elif value < 50:
icon_name = "network-wireless-signal-ok-symbolic"
elif value < 80:
icon_name = "network-wireless-signal-good-symbolic"
else:
icon_name = "network-wireless-signal-excellent-symbolic"
icon = Gio.ThemedIcon.new_with_default_fallbacks(icon_name)
self.set_property("gicon", icon)
NM_AP_SEC_UNKNOWN = 0
NM_AP_SEC_NONE = 1
NM_AP_SEC_WEP = 2
NM_AP_SEC_WPA = 3
NM_AP_SEC_WPA2 = 4
class CellRendererSecurity(Gtk.CellRendererPixbuf):
__gtype_name__ = "CellRendererSecurity"
__gproperties__ = {
"security": (GObject.TYPE_UINT,
"Security", "Security",
0, GObject.G_MAXUINT, 0,
GObject.PARAM_READWRITE),
}
def __init__(self):
Gtk.CellRendererPixbuf.__init__(self)
self.security = NM_AP_SEC_UNKNOWN
self.icon_name = ""
def do_get_property(self, property):
if property.name == 'security':
return self.security
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'security':
self.security = value
self._set_icon_name(value)
else:
raise AttributeError, 'unknown property %s' % property.name
def _set_icon_name(self, security):
self.icon_name = ""
if security not in (NM_AP_SEC_NONE, NM_AP_SEC_UNKNOWN):
self.icon_name = "network-wireless-encrypted-symbolic"
self.set_property("icon-name", self.icon_name)
class NetworkControlBox(object):
supported_device_types = [
NetworkManager.DeviceType.ETHERNET,
NetworkManager.DeviceType.WIFI,
NetworkManager.DeviceType.BOND,
NetworkManager.DeviceType.VLAN,
]
def __init__(self, builder, spoke=None):
self.builder = builder
self._running_nmce = None
self.spoke = spoke
# button for creating of virtual bond and vlan devices
self.builder.get_object("add_toolbutton").set_sensitive(True)
self.builder.get_object("add_toolbutton").connect("clicked",
self.on_add_device_clicked)
self.builder.get_object("remove_toolbutton").set_sensitive(False)
not_supported = ["start_hotspot_button",
"stop_hotspot_button",
"heading_hotspot_network_name",
"heading_hotspot_security_key",
"label_hotspot_network_name",
"label_hotspot_security_key",
"hbox54",
]
do_not_show_in_refresh = ["heading_wireless_network_name",
"combobox_wireless_network_name"]
do_not_show_in_refresh += ["%s_%s_%s" % (widget, type, value)
for widget in ["heading", "label"]
for type in ["wired", "wireless"]
for value in ["ipv4", "ipv6", "dns", "route"]]
do_not_show_in_refresh += ["%s_wired_%s" % (widget, value)
for widget in ["heading", "label"]
for value in ["slaves", "vlanid", "parent"]]
for id in not_supported + do_not_show_in_refresh:
self.builder.get_object(id).set_no_show_all(True)
self.builder.get_object(id).hide()
self.builder.get_object("notebook_types").set_show_tabs(False)
# to prevent UI update signals races
self._updating_device = False
self.client = NMClient.Client.new()
self.remote_settings = NMClient.RemoteSettings()
# devices list
# limited to wired and wireless
treeview = self.builder.get_object("treeview_devices")
self._add_device_columns(treeview)
devices_store = self.builder.get_object("liststore_devices")
devices_store.set_sort_column_id(2, Gtk.SortType.ASCENDING)
selection = treeview.get_selection()
selection.set_mode(Gtk.SelectionMode.BROWSE)
selection.connect("changed", self.on_device_selection_changed)
# wireless APs list
combobox = self.builder.get_object("combobox_wireless_network_name")
self._add_ap_icons(combobox)
model = combobox.get_model()
model.set_sort_column_id(2, Gtk.SortType.ASCENDING)
combobox.connect("changed", self.on_wireless_ap_changed_cb)
self.selected_ssid = None
# NM Client
self.client.connect("device-added", self.on_device_added)
self.client.connect("device-removed", self.on_device_removed)
self.builder.get_object("device_wired_off_switch").connect("notify::active",
self.on_device_off_toggled)
self.builder.get_object("device_wireless_off_switch").connect("notify::active",
self.on_device_off_toggled)
self.client.connect("notify::%s" % NMClient.CLIENT_WIRELESS_ENABLED,
self.on_wireless_enabled)
self.builder.get_object("button_wired_options").connect("clicked",
self.on_edit_connection)
self.builder.get_object("button_wireless_options").connect("clicked",
self.on_edit_connection)
self.entry_hostname = self.builder.get_object("entry_hostname")
@property
def vbox(self):
return self.builder.get_object("networkControlBox_vbox")
def _add_ap_icons(self, combobox):
cell = CellRendererSecurity()
cell.set_padding(4, 0)
combobox.pack_start(cell, False)
combobox.add_attribute(cell, "security", 5)
cell = CellRendererSignal()
cell.set_padding(4, 0)
#cell.set_property("xalign", 1.0)
combobox.pack_start(cell, False)
combobox.add_attribute(cell, "signal", 3)
def _add_device_columns(self, treeview):
rnd = Gtk.CellRendererPixbuf()
rnd.set_property("stock-size", Gtk.IconSize.DND)
# TODO Gtk3 icon-name? (also at other places)
col = Gtk.TreeViewColumn("Icon", rnd, **{"icon-name":0})
treeview.append_column(col)
rnd = Gtk.CellRendererText()
rnd.set_property("wrap-mode", Pango.WrapMode.WORD)
col = Gtk.TreeViewColumn("Text", rnd, markup=2)
col.set_sort_column_id(2)
col.set_expand(True)
treeview.append_column(col)
def initialize(self):
for device in self.client.get_devices():
self.add_device_to_list(device)
treeview = self.builder.get_object("treeview_devices")
devices_store = self.builder.get_object("liststore_devices")
selection = treeview.get_selection()
itr = devices_store.get_iter_first()
if itr:
selection.select_iter(itr)
def refresh(self):
device = self.selected_device()
self.refresh_ui(device)
def activated_connections(self):
"""Returns list of tuples (device_name, ssid), ssid is None for wired."""
active_devs = []
for con in self.client.get_active_connections():
if con.get_state() != NetworkManager.ActiveConnectionState.ACTIVATED:
continue
device = con.get_devices()[0]
dev_type, dev_name, dev_info = device.get_device_type(), None, None
if dev_type == NetworkManager.DeviceType.ETHERNET:
dev_name = device.get_iface()
elif dev_type == NetworkManager.DeviceType.WIFI:
ap = device.get_active_access_point()
if ap:
dev_name = device.get_iface()
dev_info = ap.get_ssid()
elif dev_type == NetworkManager.DeviceType.BOND:
dev_name = device.get_iface()
dev_info = [d.get_iface() for d in device.get_slaves()]
elif dev_type == NetworkManager.DeviceType.VLAN:
dev_name = device.get_iface()
parent = nm_device_setting_value(dev_name, "vlan", "parent")
dev_info = [parent, str(device.get_vlan_id())]
if dev_name:
active_devs.append((dev_name, dev_type, dev_info))
return active_devs
# Signal handlers.
def on_device_selection_changed(self, *args):
device = self.selected_device()
if not device:
return
log.debug("network: selected device %s" % device.get_iface())
self.refresh_ui(device)
def on_device_state_changed(self, *args):
device = args[0]
new_state = args[1]
if new_state == NetworkManager.DeviceState.SECONDARIES:
return
self._refresh_carrier_info()
read_config_values = (new_state == NetworkManager.DeviceState.ACTIVATED)
if device == self.selected_device():
self.refresh_ui(device, read_config_values, new_state)
def on_wireless_ap_changed_cb(self, combobox, *args):
if self._updating_device:
return
iter = combobox.get_active_iter()
if not iter:
return
device = self.selected_device()
ap_obj_path, ssid_target = combobox.get_model().get(iter, 0, 1)
self.selected_ssid = ssid_target
if ap_obj_path == "ap-other...":
return
log.info("network: access point changed: %s" % ssid_target)
con = self.find_connection_for_device(device, ssid_target)
if con:
self.client.activate_connection(con, device,
None, None, None)
else:
self.client.add_and_activate_connection(None, device, ap_obj_path,
None, None)
def on_device_added(self, client, device, *args):
self.add_device_to_list(device)
def on_device_removed(self, client, device, *args):
self.remove_device(device)
def on_edit_connection(self, *args):
device = self.selected_device()
if not device:
return
con = self.find_active_connection_for_device(device)
ssid = None
if not con and configuration_of_disconnected_devices_allowed:
if device.get_device_type() == NetworkManager.DeviceType.WIFI:
ssid = self.selected_ssid
con = self.find_connection_for_device(device, ssid)
if con:
uuid = con.get_uuid()
else:
return
# 871132 auto activate wireless connection after editing if it is not
# already activated (assume entering secrets)
activate = None
if (device.get_device_type() == NetworkManager.DeviceType.WIFI and ssid
and (device.get_iface(), NetworkManager.DeviceType.WIFI, ssid) not in self.activated_connections()):
activate = (con, device)
log.info("network: configuring connection %s device %s ssid %s" % (uuid, device.get_iface(), ssid))
self.kill_nmce(msg="Configure button clicked")
proc = subprocess.Popen(["nm-connection-editor", "--edit", "%s" % uuid])
self._running_nmce = proc
GLib.child_watch_add(proc.pid, self.on_nmce_exited, activate)
def kill_nmce(self, msg=""):
if not self._running_nmce:
return False
log.debug("network: killing running nm-c-e %s: %s"
% (self._running_nmce.pid, msg))
self._running_nmce.kill()
self._running_nmce = None
return True
def on_nmce_exited(self, pid, condition, activate):
# nm-c-e was closed normally, not killed by anaconda
if condition == 0:
if self._running_nmce and self._running_nmce.pid == pid:
self._running_nmce = None
if activate:
con, device = activate
gtk_call_once(self._activate_connection_cb, con, device)
network.logIfcfgFiles("nm-c-e run")
def _activate_connection_cb(self, con, device):
self.client.activate_connection(con, device,
None, None, None)
def on_wireless_enabled(self, *args):
switch = self.builder.get_object("device_wireless_off_switch")
self._updating_device = True
switch.set_active(self.client.wireless_get_enabled())
self._updating_device = False
def on_device_off_toggled(self, switch, *args):
if self._updating_device:
return
active = switch.get_active()
device = self.selected_device()
log.info("network: device %s switched %s" %
(device.get_iface(), "on" if active else "off"))
dev_type = device.get_device_type()
if dev_type in (NetworkManager.DeviceType.ETHERNET,
NetworkManager.DeviceType.BOND,
NetworkManager.DeviceType.VLAN):
if active:
cons = self.remote_settings.list_connections()
dev_cons = device.filter_connections(cons)
if dev_cons:
self.client.activate_connection(dev_cons[0], device,
None, None, None)
else:
self.client.add_and_activate_connection(None, device, None,
None, None)
else:
device.disconnect(None, None)
elif dev_type == NetworkManager.DeviceType.WIFI:
self.client.wireless_set_enabled(active)
def on_add_device_clicked(self, *args):
dialog = self.builder.get_object("add_device_dialog")
if self.spoke:
dialog.set_transient_for(self.spoke.window)
rc = dialog.run()
dialog.hide()
if rc == 1:
ai = self.builder.get_object("combobox_add_device").get_active_iter()
model = self.builder.get_object("liststore_add_device")
dev_type = model[ai][1]
self.add_device(dev_type)
def add_device(self, type):
log.info("network: adding device of type %s" % type)
self.kill_nmce(msg="Add device button clicked")
proc = subprocess.Popen(["nm-connection-editor", "--create", "--type=%s" % type])
self._running_nmce = proc
GLib.child_watch_add(proc.pid, self.on_nmce_adding_exited)
def on_nmce_adding_exited(self, pid, condition):
if condition == 0:
if self._running_nmce and self._running_nmce.pid == pid:
self._running_nmce = None
network.logIfcfgFiles("nm-c-e run")
def selected_device(self):
selection = self.builder.get_object("treeview_devices").get_selection()
(model, iter) = selection.get_selected()
if not iter:
return None
return model.get(iter, DEVICES_COLUMN_OBJECT)[0]
def find_connection_for_device(self, device, ssid=None):
dev_type = device.get_device_type()
cons = self.remote_settings.list_connections()
for con in cons:
con_type = con.get_setting_connection().get_connection_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
if con_type != NetworkManager.SETTING_WIRED_SETTING_NAME:
continue
settings = con.get_setting_wired()
con_hwaddr = ":".join("%02X" % ord(bytechar)
for bytechar in settings.get_mac_address())
if con_hwaddr == device.get_hw_address():
return con
elif dev_type == NetworkManager.DeviceType.WIFI:
if con_type != NetworkManager.SETTING_WIRELESS_SETTING_NAME:
continue
settings = con.get_setting_wireless()
if ssid == settings.get_ssid():
return con
elif dev_type == NetworkManager.DeviceType.BOND:
if con_type != NetworkManager.SETTING_BOND_SETTING_NAME:
continue
settings = con.get_setting_bond()
if device.get_iface() == settings.get_virtual_iface_name():
return con
elif dev_type == NetworkManager.DeviceType.VLAN:
if con_type != NetworkManager.SETTING_VLAN_SETTING_NAME:
continue
settings = con.get_setting_vlan()
if device.get_iface() == settings.get_interface_name():
return con
else:
return None
def find_active_connection_for_device(self, device):
cons = self.client.get_active_connections()
for con in cons:
if con.get_devices()[0] is device:
return self.remote_settings.get_connection_by_path(con.get_connection())
return None
def _device_is_stored(self, nm_device):
"""Check that device with Udi of nm_device is already in liststore"""
udi = nm_device.get_udi()
model = self.builder.get_object("liststore_devices")
for row in model:
if udi == row[DEVICES_COLUMN_OBJECT].get_udi():
return True
return False
def add_device_to_list(self, device):
if self._device_is_stored(device):
return
if device.get_device_type() not in self.supported_device_types:
return
device.connect("state-changed", self.on_device_state_changed)
self.builder.get_object("liststore_devices").append([
self._dev_icon_name(device),
self._dev_type_sort_value(device),
self._dev_title(device),
device,
])
def _dev_icon_name(self, device):
icon_name = ""
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
if device.get_state() == NetworkManager.DeviceState.UNAVAILABLE:
icon_name = "network-wired-disconnected"
else:
icon_name = "network-wired"
elif dev_type == NetworkManager.DeviceType.BOND:
if device.get_state() == NetworkManager.DeviceState.UNAVAILABLE:
icon_name = "network-wired-disconnected"
else:
icon_name = "network-wired"
elif dev_type == NetworkManager.DeviceType.VLAN:
if device.get_state() == NetworkManager.DeviceState.UNAVAILABLE:
icon_name = "network-wired-disconnected"
else:
icon_name = "network-wired"
elif dev_type == NetworkManager.DeviceType.WIFI:
icon_name = "network-wireless"
return icon_name
def _dev_type_sort_value(self, device):
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
str = "1"
elif dev_type == NetworkManager.DeviceType.WIFI:
str = "2"
else:
str = "3"
return str
def _dev_title(self, device):
unplugged = ''
if (device.get_state() == NetworkManager.DeviceState.UNAVAILABLE
and device.get_device_type() == NetworkManager.DeviceType.ETHERNET
and not device.get_carrier()):
# Translators: ethernet cable is unplugged
unplugged = ', <i>%s</i>' % _("unplugged")
title = '<span size="large">%s (%s%s)</span>' % (self._dev_type_str(device),
device.get_iface(),
unplugged)
title += '\n<span size="small">%s %s</span>' % (device.get_vendor() or "",
device.get_product() or "")
return title
def _dev_type_str(self, device):
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.UNKNOWN:
title = _("Unknown")
elif dev_type == NetworkManager.DeviceType.ETHERNET:
title = _("Ethernet")
elif dev_type == NetworkManager.DeviceType.WIFI:
title = _("Wireless")
elif dev_type == NetworkManager.DeviceType.BOND:
title = _("Bond")
elif dev_type == NetworkManager.DeviceType.VLAN:
title = _("Vlan")
else:
title = ""
return title
def remove_device(self, device):
# This should not concern wifi and ethernet devices,
# just virtual devices e.g. vpn probably
# TODO test!, remove perhaps
model = self.builder.get_object("liststore_devices")
rows_to_remove = []
for row in model:
if (device.get_udi() == row[DEVICES_COLUMN_OBJECT].get_udi()):
rows_to_remove.append(row)
for row in rows_to_remove:
del(row)
def refresh_ui(self, device, read_config_values=True, state=None):
if not device:
notebook = self.builder.get_object("notebook_types")
notebook.set_current_page(5)
return
self._refresh_device_type_page(device)
self._refresh_header_ui(device, state)
self._refresh_slaves(device)
self._refresh_parent_vlanid(device)
self._refresh_speed_hwaddr(device, state)
self._refresh_ap(device, state)
if read_config_values:
num_of_tries = 3
else:
num_of_tries = 0
self._refresh_device_cfg((device, num_of_tries), state)
def _refresh_device_cfg(self, dev_tries, state):
device, num_of_tries = dev_tries
ipv4cfg = None
ipv6cfg = None
# We might need to wait for config objects to become available
if num_of_tries > 0:
ipv4cfg = device.get_ip4_config()
ipv6cfg = device.get_ip6_config()
if not ipv4cfg and not ipv6cfg:
GLib.timeout_add(300,
self._refresh_device_cfg,
(device, num_of_tries-1),
state)
return False
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
dt = "wired"
elif dev_type == NetworkManager.DeviceType.WIFI:
dt = "wireless"
elif dev_type == NetworkManager.DeviceType.BOND:
dt = "wired"
elif dev_type == NetworkManager.DeviceType.VLAN:
dt = "wired"
if state is None:
state = device.get_state()
if (ipv4cfg
and state == NetworkManager.DeviceState.ACTIVATED):
addr = socket.inet_ntoa(struct.pack('=L',
ipv4cfg.get_addresses()[0].get_address()))
self._set_device_info_value(dt, "ipv4", addr)
dnss = " ".join(socket.inet_ntoa(struct.pack('=L', addr))
for addr in ipv4cfg.get_nameservers())
self._set_device_info_value(dt, "dns", dnss)
gateway = socket.inet_ntoa(struct.pack('=L',
ipv4cfg.get_addresses()[0].get_gateway()))
self._set_device_info_value(dt, "route", gateway)
if dt == "wired":
prefix = ipv4cfg.get_addresses()[0].get_prefix()
nm_utils.nm_utils_ip4_prefix_to_netmask.argtypes = [ctypes.c_uint32]
nm_utils.nm_utils_ip4_prefix_to_netmask.restype = ctypes.c_uint32
netmask = nm_utils.nm_utils_ip4_prefix_to_netmask(prefix)
netmask = socket.inet_ntoa(struct.pack('=L', netmask))
self._set_device_info_value(dt, "subnet", netmask)
else:
self._set_device_info_value(dt, "ipv4", None)
self._set_device_info_value(dt, "dns", None)
self._set_device_info_value(dt, "route", None)
if dt == "wired":
self._set_device_info_value(dt, "subnet", None)
# TODO NM_GI_BUGS - segfaults on get_addres(), get_prefix()
ipv6_addr = None
if (ipv6cfg
and state == NetworkManager.DeviceState.ACTIVATED):
config = dbus.SystemBus().get_object(NM_SERVICE, ipv6cfg.get_path())
addr, prefix, gw = getNMObjProperty(config, ".IP6Config",
"Addresses")[0]
ipv6_addr = socket.inet_ntop(socket.AF_INET6, "".join(chr(byte) for byte in addr))
self._set_device_info_value(dt, "ipv6", ipv6_addr)
if ipv4cfg and ipv6_addr:
self.builder.get_object("heading_%s_ipv4" % dt).set_label(_("IPv4 Address"))
self.builder.get_object("heading_%s_ipv6" % dt).set_label(_("IPv6 Address"))
elif ipv4cfg:
self.builder.get_object("heading_%s_ipv4" % dt).set_label(_("IP Address"))
elif ipv6_addr:
self.builder.get_object("heading_%s_ipv6" % dt).set_label(_("IP Address"))
return False
def _refresh_ap(self, device, state=None):
if device.get_device_type() != NetworkManager.DeviceType.WIFI:
return
if state is None:
state = device.get_state()
if state == NetworkManager.DeviceState.UNAVAILABLE:
ap_str = None
else:
active_ap = device.get_active_access_point()
if active_ap:
active_ap_dbus = dbus.SystemBus().get_object(NM_SERVICE,
active_ap.get_path())
ap_str = self._ap_security_string_dbus(active_ap_dbus)
# TODO NM_GI_BUGS move to gi after fixed in NM
# - NetworkManager.80211ApFlags
# - active_ap.get_flags, get_wpa_flags, get_rsn_flags
#ap_str = self._ap_security_string(active_ap)
else:
ap_str = ""
self._set_device_info_value("wireless", "security", ap_str)
if state == NetworkManager.DeviceState.UNAVAILABLE:
self.builder.get_object("heading_wireless_network_name").hide()
self.builder.get_object("combobox_wireless_network_name").hide()
else:
self.builder.get_object("heading_wireless_network_name").show()
self.builder.get_object("combobox_wireless_network_name").show()
store = self.builder.get_object("liststore_wireless_network")
self._updating_device = True
store.clear()
aps = self._get_strongest_unique_aps(device.get_access_points())
for ap in aps:
active = active_ap and active_ap.get_path() == ap.get_path()
self._add_ap(ap, active)
# TODO: add access point other...
if active_ap:
combobox = self.builder.get_object("combobox_wireless_network_name")
for i in combobox.get_model():
if i[1] == active_ap.get_ssid():
combobox.set_active_iter(i.iter)
self.selected_ssid = active_ap.get_ssid()
break
self._updating_device = False
def _refresh_slaves(self, device):
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.BOND:
slaves = ",".join(s.get_iface()
for s in device.get_slaves())
self._set_device_info_value("wired", "slaves", slaves)
def _refresh_parent_vlanid(self, device):
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.VLAN:
self._set_device_info_value("wired", "vlanid", str(device.get_vlan_id()))
parent = nm_device_setting_value(device.get_iface(), "vlan", "parent")
self._set_device_info_value("wired", "parent", parent)
def _refresh_speed_hwaddr(self, device, state=None):
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
dt = "wired"
speed = device.get_speed()
elif dev_type == NetworkManager.DeviceType.WIFI:
dt = "wireless"
speed = device.get_bitrate() / 1000
elif dev_type == NetworkManager.DeviceType.BOND:
dt = "wired"
speed = None
elif dev_type == NetworkManager.DeviceType.VLAN:
dt = "wired"
speed = None
if state is None:
state = device.get_state()
if state == NetworkManager.DeviceState.UNAVAILABLE:
speed_str = None
elif speed:
speed_str = _("%d Mb/s") % speed
else:
speed_str = ""
self._set_device_info_value(dt, "speed", speed_str)
self._set_device_info_value(dt, "mac", device.get_hw_address())
def _refresh_device_type_page(self, device):
notebook = self.builder.get_object("notebook_types")
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
notebook.set_current_page(0)
self.builder.get_object("heading_wired_slaves").hide()
self.builder.get_object("label_wired_slaves").hide()
self.builder.get_object("heading_wired_vlanid").hide()
self.builder.get_object("label_wired_vlanid").hide()
self.builder.get_object("heading_wired_parent").hide()
self.builder.get_object("label_wired_parent").hide()
elif dev_type == NetworkManager.DeviceType.BOND:
notebook.set_current_page(0)
self.builder.get_object("heading_wired_slaves").show()
self.builder.get_object("label_wired_slaves").show()
self.builder.get_object("heading_wired_vlanid").hide()
self.builder.get_object("label_wired_vlanid").hide()
self.builder.get_object("heading_wired_parent").hide()
self.builder.get_object("label_wired_parent").hide()
elif dev_type == NetworkManager.DeviceType.VLAN:
notebook.set_current_page(0)
self.builder.get_object("heading_wired_slaves").hide()
self.builder.get_object("label_wired_slaves").hide()
self.builder.get_object("heading_wired_vlanid").hide()
self.builder.get_object("label_wired_vlanid").hide()
self.builder.get_object("heading_wired_parent").hide()
self.builder.get_object("label_wired_parent").hide()
elif dev_type == NetworkManager.DeviceType.WIFI:
notebook.set_current_page(1)
def _refresh_carrier_info(self):
for i in self.builder.get_object("liststore_devices"):
i[DEVICES_COLUMN_TITLE] = self._dev_title(i[DEVICES_COLUMN_OBJECT])
def _refresh_header_ui(self, device, state=None):
dev_type = device.get_device_type()
if dev_type == NetworkManager.DeviceType.ETHERNET:
dev_type_str = "wired"
elif dev_type == NetworkManager.DeviceType.WIFI:
dev_type_str = "wireless"
elif dev_type == NetworkManager.DeviceType.BOND:
dev_type_str = "wired"
elif dev_type == NetworkManager.DeviceType.VLAN:
dev_type_str = "wired"
if dev_type_str == "wired":
# update icon according to device status
img = self.builder.get_object("image_wired_device")
img.set_from_icon_name(self._dev_icon_name(device), Gtk.IconSize.DIALOG)
# TODO: is this necessary? Isn't it static from glade?
self.builder.get_object("label_%s_device" % dev_type_str).set_label(
"%s (%s)" % (self._dev_type_str(device), device.get_iface()))
if state is None:
state = device.get_state()
self.builder.get_object("label_%s_status" % dev_type_str).set_label(
localized_string_of_device_state(device, state))
switch = self.builder.get_object("device_%s_off_switch" % dev_type_str)
if dev_type_str == "wired":
switch.set_visible(state not in (NetworkManager.DeviceState.UNAVAILABLE,
NetworkManager.DeviceState.UNMANAGED))
self._updating_device = True
switch.set_active(state not in (NetworkManager.DeviceState.UNMANAGED,
NetworkManager.DeviceState.UNAVAILABLE,
NetworkManager.DeviceState.DISCONNECTED,
NetworkManager.DeviceState.DEACTIVATING,
NetworkManager.DeviceState.FAILED))
self._updating_device = False
if not configuration_of_disconnected_devices_allowed:
self.builder.get_object("button_%s_options" % dev_type_str).set_sensitive(state == NetworkManager.DeviceState.ACTIVATED)
elif dev_type_str == "wireless":
self.on_wireless_enabled()
def _set_device_info_value(self, dev_type_str, info, value_str):
heading = self.builder.get_object("heading_%s_%s" % (dev_type_str, info))
value_label = self.builder.get_object("label_%s_%s" % (dev_type_str, info))
if value_str is None:
heading.hide()
value_label.hide()
else:
heading.show()
value_label.show()
value_label.set_label(value_str)
# TODO NM_GI_BUGS use glib methods for mode and security (dbus obj or nm obj?)
def _add_ap(self, ap, active=False):
ssid = ap.get_ssid()
if not ssid:
return
# TODO NM_GI_BUGS
ap_dbus = dbus.SystemBus().get_object(NM_SERVICE, ap.get_path())
mode = getNMObjProperty(ap_dbus, ".AccessPoint", "Mode")
security = self._ap_security_dbus(ap)
store = self.builder.get_object("liststore_wireless_network")
# the third column is for sorting
iter = store.append([ap.get_path(),
ssid,
ssid,
ap.get_strength(),
mode,
security])
if active:
self.builder.get_object("combobox_wireless_network_name").set_active_iter(iter)
def _get_strongest_unique_aps(self, access_points):
strongest_aps = {}
for ap in access_points:
ssid = ap.get_ssid()
if ssid in strongest_aps:
if ap.get_strength() > strongest_aps[ssid].get_strength():
strongest_aps[ssid] = ap
else:
strongest_aps[ssid] = ap
return strongest_aps.values()
# TODO NM_GI_BUGS fix as _ap_security_string
def _ap_security_dbus(self, ap):
if ap.get_path() == "/":
return NM_AP_SEC_UNKNOWN
ap_dbus = dbus.SystemBus().get_object(NM_SERVICE, ap.get_path())
flags = getNMObjProperty(ap_dbus, ".AccessPoint", "Flags")
wpa_flags = getNMObjProperty(ap_dbus, ".AccessPoint", "WpaFlags")
rsn_flags = getNMObjProperty(ap_dbus, ".AccessPoint", "RsnFlags")
if (not (flags & NM_802_11_AP_FLAGS_PRIVACY) and
wpa_flags == NM_802_11_AP_SEC_NONE and
rsn_flags == NM_802_11_AP_SEC_NONE):
type = NM_AP_SEC_NONE
elif (flags & NM_802_11_AP_FLAGS_PRIVACY and
wpa_flags == NM_802_11_AP_SEC_NONE and
rsn_flags == NM_802_11_AP_SEC_NONE):
type = NM_AP_SEC_WEP
elif (not (flags & NM_802_11_AP_FLAGS_PRIVACY) and
wpa_flags != NM_802_11_AP_SEC_NONE and
rsn_flags != NM_802_11_AP_SEC_NONE):
type = NM_AP_SEC_WPA
else:
type = NM_AP_SEC_WPA2
return type
## TODO NM_GI_BUGS - attribute starts with number
# def _ap_security_string(self, ap):
# if ap.object_path == "/":
# return ""
#
# flags = ap.get_flags()
# wpa_flags = ap.get_wpa_flags()
# rsn_flags = ap.get_rsn_flags()
#
# sec_str = ""
#
# if ((flags & NetworkManager.80211ApFlags.PRIVACY) and
# wpa_flags == NetworkManager.80211ApSecurityFlags.NONE and
# rsn_flags == NetworkManager.80211ApSecurityFlags.NONE):
# sec_str += "%s, " % _("WEP")
#
# if wpa_flags != NetworkManager.80211ApSecurityFlags.NONE:
# sec_str += "%s, " % _("WPA")
#
# if rsn_flags != NetworkManager.80211ApSecurityFlags.NONE:
# sec_str += "%s, " % _("WPA2")
#
# if ((wpa_flags & NetworkManager.80211ApSecurityFlags.KEY_MGMT_802_1X) or
# (rsn_flags & NetworkManager.80211ApSecurityFlags.KEY_MGMT_802_1X)):
# sec_str += "%s, " % _("Enterprise")
#
# if sec_str:
# sec_str = sec_str[:-2]
# else:
# sec_str = _("None")
#
# return sec_str
def _ap_security_string_dbus(self, ap):
if ap.object_path == "/":
return ""
flags = getNMObjProperty(ap, ".AccessPoint", "Flags")
wpa_flags = getNMObjProperty(ap, ".AccessPoint", "WpaFlags")
rsn_flags = getNMObjProperty(ap, ".AccessPoint", "RsnFlags")
sec_str = ""
if ((flags & NM_802_11_AP_FLAGS_PRIVACY) and
wpa_flags == NM_802_11_AP_SEC_NONE and
rsn_flags == NM_802_11_AP_SEC_NONE):
sec_str += "%s, " % _("WEP")
if wpa_flags != NM_802_11_AP_SEC_NONE:
sec_str += "%s, " % _("WPA")
if rsn_flags != NM_802_11_AP_SEC_NONE:
sec_str += "%s, " % _("WPA2")
if ((wpa_flags & NM_802_11_AP_SEC_KEY_MGMT_802_1X) or
(rsn_flags & NM_802_11_AP_SEC_KEY_MGMT_802_1X)):
sec_str += "%s, " % _("Enterprise")
if sec_str:
sec_str = sec_str[:-2]
else:
sec_str = _("None")
return sec_str
@property
def listed_devices(self):
return [row[DEVICES_COLUMN_OBJECT] for
row in self.builder.get_object("liststore_devices")]
@property
def hostname(self):
return self.entry_hostname.get_text()
@hostname.setter
def hostname(self, value):
self.entry_hostname.set_text(value)
class SecretAgentDialog(GUIObject):
builderObjects = ["secret_agent_dialog"]
mainWidgetName = "secret_agent_dialog"
uiFile = "spokes/network.glade"
def __init__(self, *args, **kwargs):
self._content = kwargs.pop('content', {})
GUIObject.__init__(self, *args, **kwargs)
img = self.builder.get_object("image_password_dialog")
img.set_from_icon_name("dialog-password-symbolic", Gtk.IconSize.DIALOG)
self.builder.get_object("label_message").set_text(self._content['message'])
self.builder.get_object("label_title").set_use_markup(True)
self.builder.get_object("label_title").set_markup("<b>%s</b>" % self._content['title'])
self._connect_button = self.builder.get_object("connect_button")
def initialize(self):
self._entries = {}
grid = Gtk.Grid()
grid.set_row_spacing(6)
grid.set_column_spacing(6)
for row, secret in enumerate(self._content['secrets']):
label = Gtk.Label(secret['label'])
label.set_halign(Gtk.Align.START)
entry = Gtk.Entry()
entry.set_visibility(False)
entry.set_hexpand(True)
self._validate(entry, secret)
entry.connect("changed", self._validate, secret)
entry.connect("activate", self._password_entered_cb)
self._entries[secret['key']] = entry
label.set_use_underline(True)
label.set_mnemonic_widget(entry)
grid.attach(label, 0, row, 1, 1)
grid.attach(entry, 1, row, 1, 1)
self.builder.get_object("password_box").add(grid)
def run(self):
self.initialize()
self.window.show_all()
rc = self.window.run()
for secret in self._content['secrets']:
secret['value'] = self._entries[secret['key']].get_text()
self.window.destroy()
return rc
@property
def valid(self):
return all(secret['valid'] for secret in self._content['secrets'])
def _validate(self, entry, secret):
secret['value'] = entry.get_text()
if secret['validate']:
secret['valid'] = secret['validate'](secret)
else:
secret['valid'] = len(secret['value']) > 0
self._update_connect_button()
def _password_entered_cb(self, entry):
if self._connect_button.get_sensitive() and self.valid:
self.window.response(1)
def _update_connect_button(self):
self._connect_button.set_sensitive(self.valid)
secret_agent = None
class NotAuthorizedException(dbus.DBusException):
_dbus_error_name = SECRET_AGENT_IFACE + '.NotAuthorized'
class SecretAgent(dbus.service.Object):
def __init__(self, spoke):
self._bus = dbus.SystemBus()
self.spoke = spoke
dbus.service.Object.__init__(self, self._bus, "/org/freedesktop/NetworkManager/SecretAgent")
@dbus.service.method(SECRET_AGENT_IFACE,
in_signature='a{sa{sv}}osasb',
out_signature='a{sa{sv}}',
sender_keyword='sender')
def GetSecrets(self, connection_hash, connection_path, setting_name, hints, request_new, sender=None):
if not sender:
raise NotAuthorizedException("Internal error: couldn't get sender")
uid = self._bus.get_unix_user(sender)
if uid != 0:
raise NotAuthorizedException("UID %d not authorized" % uid)
log.debug("Secrets requested path '%s' setting '%s' hints '%s' new %d"
% (connection_path, setting_name, str(hints), request_new))
content = self._get_content(setting_name, connection_hash)
dialog = SecretAgentDialog(self.spoke.data, content=content)
with enlightbox(self.spoke.window, dialog.window):
rc = dialog.run()
secrets = dbus.Dictionary()
if rc == 1:
for secret in content['secrets']:
secrets[secret['key']] = secret['value']
settings = dbus.Dictionary({setting_name: secrets})
return settings
def _get_content(self, setting_name, connection_hash):
content = {}
connection_type = connection_hash['connection']['type']
if connection_type == "802-11-wireless":
content['title'] = _("Authentication required by wireless network")
content['message'] = _("Passwords or encryption keys are required to access\n"
"the wireless network '%(network_id)s'.") \
% {'network_id':str(connection_hash['connection']['id'])}
content['secrets'] = self._get_wireless_secrets(connection_hash[setting_name])
else:
log.info("Connection type %s not supported by secret agent" % connection_type)
return content
def _get_wireless_secrets(self, original_secrets):
secrets = []
key_mgmt = original_secrets['key-mgmt']
if key_mgmt in ['wpa-none', 'wpa-psk']:
secrets.append({'label' : _('_Password:'),
'key' : 'psk',
'value' : original_secrets.get('psk', ''),
'validate' : self._validate_wpapsk,
'password' : True})
# static WEP
elif key_mgmt == 'none':
key_idx = str(original_secrets.get('wep_tx_keyidx', '0'))
secrets.append({'label' : _('_Key:'),
'key' : 'wep-key%s' % key_idx,
'value' : original_secrets.get('wep-key%s' % key_idx, ''),
'wep_key_type': original_secrets.get('wep-key-type', ''),
'validate' : self._validate_staticwep,
'password' : True})
else:
log.info("Unsupported wireless key management: %s" % key_mgmt)
return secrets
def _validate_wpapsk(self, secret):
value = secret['value']
if len(value) == 64:
# must be composed of hexadecimal digits only
return all(c in string.hexdigits for c in value)
else:
return 8 <= len(value) <= 63
def _validate_staticwep(self, secret):
value = secret['value']
if secret['wep_key_type'] == NetworkManager.WepKeyType.KEY:
if len(value) in (10, 26):
return all(c in string.hexdigits for c in value)
elif len(value) in (5, 13):
return all(c in string.letters for c in value)
else:
return False
elif secret['wep_key_type'] == NetworkManager.WepKeyType.PASSPHRASE:
return 0 <= len(value) <= 64
else:
return True
def register_secret_agent(spoke):
if not flags.can_touch_runtime_system("register anaconda secret agent"):
return False
global secret_agent
if not secret_agent:
secret_agent = SecretAgent(spoke)
bus = dbus.SystemBus()
proxy = bus.get_object(NM_SERVICE, AGENT_MANAGER_PATH)
proxy.Register("anaconda", dbus_interface=AGENT_MANAGER_IFACE)
else:
secret_agent.spoke = spoke
return True
class NetworkSpoke(NormalSpoke):
builderObjects = ["networkWindow", "liststore_wireless_network", "liststore_devices", "add_device_dialog", "liststore_add_device"]
mainWidgetName = "networkWindow"
uiFile = "spokes/network.glade"
title = N_("_NETWORK CONFIGURATION")
icon = "network-transmit-receive-symbolic"
category = SoftwareCategory
def __init__(self, *args, **kwargs):
NormalSpoke.__init__(self, *args, **kwargs)
self.network_control_box = NetworkControlBox(self.builder, spoke=self)
self.network_control_box.hostname = self.data.network.hostname
self.network_control_box.client.connect("notify::%s" %
NMClient.CLIENT_STATE,
self.on_nm_state_changed)
for device in self.network_control_box.client.get_devices():
device.connect("state-changed", self.on_device_state_changed)
def apply(self):
_update_network_data(self.data, self.network_control_box)
log.debug("network: apply ksdata %s" % self.data.network)
self.network_control_box.kill_nmce(msg="leaving network spoke")
@property
def completed(self):
# TODO: check also if source requires updates when implemented
return (not flags.can_touch_runtime_system("require network connection")
or len(self.network_control_box.activated_connections()) > 0)
@property
def mandatory(self):
return self.data.method.method in ("url", "nfs")
@property
def status(self):
""" A short string describing which devices are connected. """
msg = _("Unknown")
state = self.network_control_box.client.get_state()
if state == NetworkManager.State.CONNECTING:
msg = _("Connecting...")
elif state == NetworkManager.State.DISCONNECTING:
msg = _("Disconnecting...")
else:
ac = self.network_control_box.activated_connections()
if ac:
# Don't show bond slaves
slaves = []
for name, type, info in ac:
if type == NetworkManager.DeviceType.BOND:
slaves.extend(info)
if slaves:
ac = [(name, type, info)
for name, type, info in ac
if name not in slaves]
if len(ac) == 1:
name, type, info = ac[0]
if type == NetworkManager.DeviceType.ETHERNET:
msg = _("Wired (%(interface_name)s) connected") \
% {"interface_name": name}
elif type == NetworkManager.DeviceType.WIFI:
msg = _("Wireless connected to %(access_point)s") \
% {"access_point" : info}
elif type == NetworkManager.DeviceType.BOND:
msg = _("Bond %(interface_name)s (%(list_of_slaves)s) connected") \
% {"interface_name": name, "list_of_slaves": ",".join(info)}
if type == NetworkManager.DeviceType.VLAN:
msg = _("Vlan %(interface_name)s (%(parent_device)s, ID %(vlanid)s) connected") \
% {"interface_name": name, "parent_device": info[0], "vlanid": info[1]}
else:
devlist = []
for name, type, info in ac:
if type == NetworkManager.DeviceType.ETHERNET:
devlist.append("%s" % name)
elif type == NetworkManager.DeviceType.WIFI:
devlist.append("%s" % info)
elif type == NetworkManager.DeviceType.BOND:
devlist.append("%s (%s)" % (name, ",".join(info)))
if type == NetworkManager.DeviceType.VLAN:
devlist.append("%s" % name)
msg = _("Connected: %(list_of_interface_names)s") \
% {"list_of_interface_names": ", ".join(devlist)}
else:
msg = _("Not connected")
if not self.network_control_box.listed_devices:
msg = _("No network devices available")
return msg
def initialize(self):
register_secret_agent(self)
NormalSpoke.initialize(self)
self.network_control_box.initialize()
if not flags.can_touch_runtime_system("hide hint to use network configuration in DE"):
self.builder.get_object("network_config_vbox").set_no_show_all(True)
self.builder.get_object("network_config_vbox").hide()
else:
self.builder.get_object("live_hint_label").set_no_show_all(True)
self.builder.get_object("live_hint_label").hide()
if not self.data.network.seen:
_update_network_data(self.data, self.network_control_box)
def refresh(self):
NormalSpoke.refresh(self)
self.network_control_box.refresh()
def on_nm_state_changed(self, *args):
gtk_call_once(self._update_status)
gtk_call_once(self._update_hostname)
def on_device_state_changed(self, *args):
new_state = args[1]
if new_state in (NetworkManager.DeviceState.ACTIVATED,
NetworkManager.DeviceState.DISCONNECTED,
NetworkManager.DeviceState.UNAVAILABLE):
gtk_call_once(self._update_status)
def _update_status(self):
hubQ.send_message(self.__class__.__name__, self.status)
def _update_hostname(self):
if self.network_control_box.hostname == network.DEFAULT_HOSTNAME:
hostname = network.getHostname()
network.update_hostname_data(self.data, hostname)
self.network_control_box.hostname = self.data.network.hostname
def on_back_clicked(self, button):
hostname = self.network_control_box.hostname
(valid, error) = network.sanityCheckHostname(hostname)
if not valid:
self.clear_info()
msg = _("Hostname is not valid: %s") % error
self.set_warning(msg)
self.network_control_box.entry_hostname.grab_focus()
self.window.show_all()
else:
self.clear_info()
NormalSpoke.on_back_clicked(self, button)
class NetworkStandaloneSpoke(StandaloneSpoke):
builderObjects = ["networkStandaloneWindow", "networkControlBox_vbox", "liststore_wireless_network", "liststore_devices", "add_device_dialog", "liststore_add_device"]
mainWidgetName = "networkStandaloneWindow"
uiFile = "spokes/network.glade"
#nkwin7 add by yuwan
#preForHub = SummaryHub
preForHub = None
#nkwin7 done
priority = 10
def __init__(self, *args, **kwargs):
StandaloneSpoke.__init__(self, *args, **kwargs)
self.network_control_box = NetworkControlBox(self.builder, spoke=self)
self.network_control_box.hostname = self.data.network.hostname
parent = self.builder.get_object("AnacondaStandaloneWindow-action_area5")
parent.add(self.network_control_box.vbox)
self.network_control_box.client.connect("notify::%s" %
NMClient.CLIENT_STATE,
self.on_nm_state_changed)
self._initially_available = self.completed
log.debug("network standalone spoke (init): completed: %s" % self._initially_available)
self._now_available = False
def apply(self):
_update_network_data(self.data, self.network_control_box)
log.debug("network: apply ksdata %s" % self.data.network)
self._now_available = self.completed
log.debug("network standalone spoke (apply) payload: %s completed: %s" % (self.payload.baseRepo, self._now_available))
if not self.payload.baseRepo and not self._initially_available and self._now_available:
from pyanaconda.packaging import payloadInitialize
from pyanaconda.threads import threadMgr, AnacondaThread
threadMgr.wait(constants.THREAD_PAYLOAD)
threadMgr.add(AnacondaThread(name=constants.THREAD_PAYLOAD, target=payloadInitialize, args=(self.storage, self.data, self.payload)))
self.network_control_box.kill_nmce(msg="leaving standalone network spoke")
@property
def completed(self):
return (not flags.can_touch_runtime_system("require network connection")
or len(self.network_control_box.activated_connections()) > 0)
def initialize(self):
register_secret_agent(self)
StandaloneSpoke.initialize(self)
self.network_control_box.initialize()
def refresh(self):
StandaloneSpoke.refresh(self)
self.network_control_box.refresh()
def _on_continue_clicked(self, cb):
hostname = self.network_control_box.hostname
(valid, error) = network.sanityCheckHostname(hostname)
if not valid:
self.clear_info()
msg = _("Hostname is not valid: %s") % error
self.set_warning(msg)
self.network_control_box.entry_hostname.grab_focus()
self.window.show_all()
else:
self.clear_info()
StandaloneSpoke._on_continue_clicked(self, cb)
# Use case: slow dhcp has connected when on spoke
def on_nm_state_changed(self, *args):
gtk_call_once(self._update_hostname)
def _update_hostname(self):
if self.network_control_box.hostname == network.DEFAULT_HOSTNAME:
hostname = network.getHostname()
network.update_hostname_data(self.data, hostname)
self.network_control_box.hostname = self.data.network.hostname
def _update_network_data(data, ncb):
data.network.network = []
for dev in ncb.listed_devices:
network_data = getKSNetworkData(dev)
if network_data is not None:
data.network.network.append(network_data)
hostname = ncb.hostname
network.update_hostname_data(data, hostname)
def getKSNetworkData(device):
retval = None
ifcfg_suffix = None
if device.get_device_type() == NetworkManager.DeviceType.ETHERNET:
ifcfg_suffix = device.get_iface()
elif device.get_device_type() == NetworkManager.DeviceType.WIFI:
ap = device.get_active_access_point()
if ap:
ifcfg_suffix = ap.get_ssid()
elif device.get_device_type() == NetworkManager.DeviceType.BOND:
ifcfg_suffix = network.get_bond_master_ifcfg_name(device.get_iface())[6:]
elif device.get_device_type() == NetworkManager.DeviceType.VLAN:
ifcfg_suffix = network.get_vlan_ifcfg_name(device.get_iface())[6:]
if ifcfg_suffix:
ifcfg_suffix = ifcfg_suffix.replace(' ', '_')
device_cfg = network.NetworkDevice(network.netscriptsDir, ifcfg_suffix)
try:
device_cfg.loadIfcfgFile()
except IOError as e:
log.debug("getKSNetworkData %s: %s" % (ifcfg_suffix, e))
return None
retval = network.kickstartNetworkData(ifcfg=device_cfg)
if retval and device.get_iface() in nm_activated_devices():
retval.activate = True
return retval
if __name__ == "__main__":
win = Gtk.Window()
win.connect("delete-event", Gtk.main_quit)
builder = Gtk.Builder()
import os
ui_file_path = os.environ.get('UIPATH')+'spokes/network.glade'
builder.add_from_file(ui_file_path)
n = NetworkControlBox(builder)
n.initialize()
n.refresh()
n.vbox.reparent(win)
win.show_all()
Gtk.main()
| gpl-2.0 |
cooniur/ansible-modules-core | system/service.py | 7 | 59487 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: service
author:
- "Ansible Core Team"
- "Michael DeHaan"
version_added: "0.1"
short_description: Manage services.
description:
- Controls services on remote hosts. Supported init systems include BSD init,
OpenRC, SysV, Solaris SMF, systemd, upstart.
options:
name:
required: true
description:
- Name of the service.
state:
required: false
choices: [ started, stopped, restarted, reloaded ]
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
service. C(reloaded) will always reload. B(At least one of state
and enabled are required.)
sleep:
required: false
version_added: "1.3"
description:
- If the service is being C(restarted) then sleep this many seconds
between the stop and start command. This helps to workaround badly
behaving init scripts that exit immediately after signaling a process
to stop.
pattern:
required: false
version_added: "0.7"
description:
- If the service does not respond to the status command, name a
substring to look for as would be found in the output of the I(ps)
command as a stand-in for a status result. If the string is found,
the service will be assumed to be running.
enabled:
required: false
choices: [ "yes", "no" ]
description:
- Whether the service should start on boot. B(At least one of state and
enabled are required.)
runlevel:
required: false
default: 'default'
description:
- "For OpenRC init scripts (ex: Gentoo) only. The runlevel that this service belongs to."
arguments:
description:
- Additional arguments provided on the command line
aliases: [ 'args' ]
use:
description:
- The service module actually uses system specific modules, normally through auto detection, this setting can force a specific module.
- Normally it uses the value of the 'ansible_service_mgr' fact and falls back to the old 'service' module when none matching is found.
default: 'auto'
version_added: 2.2
'''
EXAMPLES = '''
# Example action to start service httpd, if not running
- service:
name: httpd
state: started
# Example action to stop service httpd, if running
- service:
name: httpd
state: stopped
# Example action to restart service httpd, in all cases
- service:
name: httpd
state: restarted
# Example action to reload service httpd, in all cases
- service:
name: httpd
state: reloaded
# Example action to enable service httpd, and not touch the running state
- service:
name: httpd
enabled: yes
# Example action to start service foo, based on running process /usr/bin/foo
- service:
name: foo
pattern: /usr/bin/foo
state: started
# Example action to restart network service for interface eth0
- service:
name: network
state: restarted
args: eth0
'''
import platform
import os
import re
import tempfile
import shlex
import select
import time
import string
import glob
from ansible.module_utils.service import fail_if_missing
# The distutils module is not shipped with SUNWPython on Solaris.
# It's in the SUNWPython-devel package which also contains development files
# that don't belong on production boxes. Since our Solaris code doesn't
# depend on LooseVersion, do not import it on Solaris.
if platform.system() != 'SunOS':
from distutils.version import LooseVersion
class Service(object):
"""
This is the generic Service manipulation class that is subclassed
based on platform.
A subclass should override the following action methods:-
- get_service_tools
- service_enable
- get_service_status
- service_control
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Service, args, kwargs)
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.state = module.params['state']
self.sleep = module.params['sleep']
self.pattern = module.params['pattern']
self.enable = module.params['enabled']
self.runlevel = module.params['runlevel']
self.changed = False
self.running = None
self.crashed = None
self.action = None
self.svc_cmd = None
self.svc_initscript = None
self.svc_initctl = None
self.enable_cmd = None
self.arguments = module.params.get('arguments', '')
self.rcconf_file = None
self.rcconf_key = None
self.rcconf_value = None
self.svc_change = False
# ===========================================
# Platform specific methods (must be replaced by subclass).
def get_service_tools(self):
self.module.fail_json(msg="get_service_tools not implemented on target platform")
def service_enable(self):
self.module.fail_json(msg="service_enable not implemented on target platform")
def get_service_status(self):
self.module.fail_json(msg="get_service_status not implemented on target platform")
def service_control(self):
self.module.fail_json(msg="service_control not implemented on target platform")
# ===========================================
# Generic methods that should be used on all platforms.
def execute_command(self, cmd, daemonize=False):
# Most things don't need to be daemonized
if not daemonize:
return self.module.run_command(cmd)
# This is complex because daemonization is hard for people.
# What we do is daemonize a part of this module, the daemon runs the
# command, picks up the return code and output, and returns it to the
# main process.
pipe = os.pipe()
pid = os.fork()
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
if fd != 0:
os.dup2(fd, 0)
if fd != 1:
os.dup2(fd, 1)
if fd != 2:
os.dup2(fd, 2)
if fd not in (0, 1, 2):
os.close(fd)
# Make us a daemon. Yes, that's all it takes.
pid = os.fork()
if pid > 0:
os._exit(0)
os.setsid()
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# Start the command
if isinstance(cmd, basestring):
cmd = shlex.split(cmd)
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
stdout = ""
stderr = ""
fds = [p.stdout, p.stderr]
# Wait for all output, or until the main process is dead and its output is done.
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if not (rfd + wfd + efd) and p.poll() is not None:
break
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), 4096)
if not dat:
fds.remove(p.stdout)
stdout += dat
if p.stderr in rfd:
dat = os.read(p.stderr.fileno(), 4096)
if not dat:
fds.remove(p.stderr)
stderr += dat
p.wait()
# Return a JSON blob to parent
os.write(pipe[1], json.dumps([p.returncode, stdout, stderr]))
os.close(pipe[1])
os._exit(0)
elif pid == -1:
self.module.fail_json(msg="unable to fork")
else:
os.close(pipe[1])
os.waitpid(pid, 0)
# Wait for data from daemon process and process it.
data = ""
while True:
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
if pipe[0] in rfd:
dat = os.read(pipe[0], 4096)
if not dat:
break
data += dat
return json.loads(data)
def check_ps(self):
# Set ps flags
if platform.system() == 'SunOS':
psflags = '-ef'
else:
psflags = 'auxww'
# Find ps binary
psbin = self.module.get_bin_path('ps', True)
(rc, psout, pserr) = self.execute_command('%s %s' % (psbin, psflags))
# If rc is 0, set running as appropriate
if rc == 0:
self.running = False
lines = psout.split("\n")
for line in lines:
if self.pattern in line and not "pattern=" in line:
# so as to not confuse ./hacking/test-module
self.running = True
break
def check_service_changed(self):
if self.state and self.running is None:
self.module.fail_json(msg="failed determining service state, possible typo of service name?")
# Find out if state has changed
if not self.running and self.state in ["started", "running", "reloaded"]:
self.svc_change = True
elif self.running and self.state in ["stopped","reloaded"]:
self.svc_change = True
elif self.state == "restarted":
self.svc_change = True
if self.module.check_mode and self.svc_change:
self.module.exit_json(changed=True, msg='service state changed')
def modify_service_state(self):
# Only do something if state will change
if self.svc_change:
# Control service
if self.state in ['started', 'running']:
self.action = "start"
elif not self.running and self.state == 'reloaded':
self.action = "start"
elif self.state == 'stopped':
self.action = "stop"
elif self.state == 'reloaded':
self.action = "reload"
elif self.state == 'restarted':
self.action = "restart"
if self.module.check_mode:
self.module.exit_json(changed=True, msg='changing service state')
return self.service_control()
else:
# If nothing needs to change just say all is well
rc = 0
err = ''
out = ''
return rc, out, err
def service_enable_rcconf(self):
if self.rcconf_file is None or self.rcconf_key is None or self.rcconf_value is None:
self.module.fail_json(msg="service_enable_rcconf() requires rcconf_file, rcconf_key and rcconf_value")
self.changed = None
entry = '%s="%s"\n' % (self.rcconf_key, self.rcconf_value)
RCFILE = open(self.rcconf_file, "r")
new_rc_conf = []
# Build a list containing the possibly modified file.
for rcline in RCFILE:
# Parse line removing whitespaces, quotes, etc.
rcarray = shlex.split(rcline, comments=True)
if len(rcarray) >= 1 and '=' in rcarray[0]:
(key, value) = rcarray[0].split("=", 1)
if key == self.rcconf_key:
if value.upper() == self.rcconf_value:
# Since the proper entry already exists we can stop iterating.
self.changed = False
break
else:
# We found the key but the value is wrong, replace with new entry.
rcline = entry
self.changed = True
# Add line to the list.
new_rc_conf.append(rcline.strip() + '\n')
# We are done with reading the current rc.conf, close it.
RCFILE.close()
# If we did not see any trace of our entry we need to add it.
if self.changed is None:
new_rc_conf.append(entry)
self.changed = True
if self.changed is True:
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
# Create a temporary file next to the current rc.conf (so we stay on the same filesystem).
# This way the replacement operation is atomic.
rcconf_dir = os.path.dirname(self.rcconf_file)
rcconf_base = os.path.basename(self.rcconf_file)
(TMP_RCCONF, tmp_rcconf_file) = tempfile.mkstemp(dir=rcconf_dir, prefix="%s-" % rcconf_base)
# Write out the contents of the list into our temporary file.
for rcline in new_rc_conf:
os.write(TMP_RCCONF, rcline)
# Close temporary file.
os.close(TMP_RCCONF)
# Replace previous rc.conf.
self.module.atomic_move(tmp_rcconf_file, self.rcconf_file)
# ===========================================
# Subclass: Linux
class LinuxService(Service):
"""
This is the Linux Service manipulation class - it is currently supporting
a mixture of binaries and init scripts for controlling services started at
boot, as well as for controlling the current state.
"""
platform = 'Linux'
distribution = None
def get_service_tools(self):
paths = [ '/sbin', '/usr/sbin', '/bin', '/usr/bin' ]
binaries = [ 'service', 'chkconfig', 'update-rc.d', 'rc-service', 'rc-update', 'initctl', 'systemctl', 'start', 'stop', 'restart', 'insserv' ]
initpaths = [ '/etc/init.d' ]
location = dict()
for binary in binaries:
location[binary] = self.module.get_bin_path(binary, opt_dirs=paths)
for initdir in initpaths:
initscript = "%s/%s" % (initdir,self.name)
if os.path.isfile(initscript):
self.svc_initscript = initscript
def check_systemd():
# tools must be installed
if location.get('systemctl',False):
# this should show if systemd is the boot init system
# these mirror systemd's own sd_boot test http://www.freedesktop.org/software/systemd/man/sd_booted.html
for canary in ["/run/systemd/system/", "/dev/.run/systemd/", "/dev/.systemd/"]:
if os.path.exists(canary):
return True
# If all else fails, check if init is the systemd command, using comm as cmdline could be symlink
try:
f = open('/proc/1/comm', 'r')
except IOError:
# If comm doesn't exist, old kernel, no systemd
return False
for line in f:
if 'systemd' in line:
return True
return False
# Locate a tool to enable/disable a service
if check_systemd():
# service is managed by systemd
self.__systemd_unit = self.name
self.svc_cmd = location['systemctl']
self.enable_cmd = location['systemctl']
elif location.get('initctl', False) and os.path.exists("/etc/init/%s.conf" % self.name):
# service is managed by upstart
self.enable_cmd = location['initctl']
# set the upstart version based on the output of 'initctl version'
self.upstart_version = LooseVersion('0.0.0')
try:
version_re = re.compile(r'\(upstart (.*)\)')
rc,stdout,stderr = self.module.run_command('initctl version')
if rc == 0:
res = version_re.search(stdout)
if res:
self.upstart_version = LooseVersion(res.groups()[0])
except:
pass # we'll use the default of 0.0.0
if location.get('start', False):
# upstart -- rather than being managed by one command, start/stop/restart are actual commands
self.svc_cmd = ''
elif location.get('rc-service', False):
# service is managed by OpenRC
self.svc_cmd = location['rc-service']
self.enable_cmd = location['rc-update']
return # already have service start/stop tool too!
elif self.svc_initscript:
# service is managed by with SysV init scripts
if location.get('update-rc.d', False):
# and uses update-rc.d
self.enable_cmd = location['update-rc.d']
elif location.get('insserv', None):
# and uses insserv
self.enable_cmd = location['insserv']
elif location.get('chkconfig', False):
# and uses chkconfig
self.enable_cmd = location['chkconfig']
if self.enable_cmd is None:
fail_if_missing(self.module, False, self.name, msg='host')
# If no service control tool selected yet, try to see if 'service' is available
if self.svc_cmd is None and location.get('service', False):
self.svc_cmd = location['service']
# couldn't find anything yet
if self.svc_cmd is None and not self.svc_initscript:
self.module.fail_json(msg='cannot find \'service\' binary or init script for service, possible typo in service name?, aborting')
if location.get('initctl', False):
self.svc_initctl = location['initctl']
def get_systemd_service_enabled(self):
def sysv_exists(name):
script = '/etc/init.d/' + name
return os.access(script, os.X_OK)
def sysv_is_enabled(name):
return bool(glob.glob('/etc/rc?.d/S??' + name))
service_name = self.__systemd_unit
(rc, out, err) = self.execute_command("%s is-enabled %s" % (self.enable_cmd, service_name,))
if rc == 0:
return True
elif out.startswith('disabled'):
return False
elif sysv_exists(service_name):
return sysv_is_enabled(service_name)
else:
return False
def get_systemd_status_dict(self):
# Check status first as show will not fail if service does not exist
(rc, out, err) = self.execute_command("%s show '%s'" % (self.enable_cmd, self.__systemd_unit,))
if rc != 0:
self.module.fail_json(msg='failure %d running systemctl show for %r: %s' % (rc, self.__systemd_unit, err))
elif 'LoadState=not-found' in out:
self.module.fail_json(msg='systemd could not find the requested service "%r": %s' % (self.__systemd_unit, err))
key = None
value_buffer = []
status_dict = {}
for line in out.splitlines():
if '=' in line:
if not key:
key, value = line.split('=', 1)
# systemd fields that are shell commands can be multi-line
# We take a value that begins with a "{" as the start of
# a shell command and a line that ends with "}" as the end of
# the command
if value.lstrip().startswith('{'):
if value.rstrip().endswith('}'):
status_dict[key] = value
key = None
else:
value_buffer.append(value)
else:
status_dict[key] = value
key = None
else:
if line.rstrip().endswith('}'):
status_dict[key] = '\n'.join(value_buffer)
key = None
else:
value_buffer.append(value)
else:
value_buffer.append(value)
return status_dict
def get_systemd_service_status(self):
d = self.get_systemd_status_dict()
if d.get('ActiveState') == 'active':
# run-once services (for which a single successful exit indicates
# that they are running as designed) should not be restarted here.
# Thus, we are not checking d['SubState'].
self.running = True
self.crashed = False
elif d.get('ActiveState') == 'failed':
self.running = False
self.crashed = True
elif d.get('ActiveState') is None:
self.module.fail_json(msg='No ActiveState value in systemctl show output for %r' % (self.__systemd_unit,))
else:
self.running = False
self.crashed = False
return self.running
def get_service_status(self):
if self.svc_cmd and self.svc_cmd.endswith('systemctl'):
return self.get_systemd_service_status()
self.action = "status"
rc, status_stdout, status_stderr = self.service_control()
# if we have decided the service is managed by upstart, we check for some additional output...
if self.svc_initctl and self.running is None:
# check the job status by upstart response
initctl_rc, initctl_status_stdout, initctl_status_stderr = self.execute_command("%s status %s" % (self.svc_initctl, self.name))
if "stop/waiting" in initctl_status_stdout:
self.running = False
elif "start/running" in initctl_status_stdout:
self.running = True
if self.svc_cmd and self.svc_cmd.endswith("rc-service") and self.running is None:
openrc_rc, openrc_status_stdout, openrc_status_stderr = self.execute_command("%s %s status" % (self.svc_cmd, self.name))
self.running = "started" in openrc_status_stdout
self.crashed = "crashed" in openrc_status_stderr
# Prefer a non-zero return code. For reference, see:
# http://refspecs.linuxbase.org/LSB_4.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html
if self.running is None and rc in [1, 2, 3, 4, 69]:
self.running = False
# if the job status is still not known check it by status output keywords
# Only check keywords if there's only one line of output (some init
# scripts will output verbosely in case of error and those can emit
# keywords that are picked up as false positives
if self.running is None and status_stdout.count('\n') <= 1:
# first transform the status output that could irritate keyword matching
cleanout = status_stdout.lower().replace(self.name.lower(), '')
if "stop" in cleanout:
self.running = False
elif "run" in cleanout and "not" in cleanout:
self.running = False
elif "run" in cleanout and "not" not in cleanout:
self.running = True
elif "start" in cleanout and "not" not in cleanout:
self.running = True
elif 'could not access pid file' in cleanout:
self.running = False
elif 'is dead and pid file exists' in cleanout:
self.running = False
elif 'dead but subsys locked' in cleanout:
self.running = False
elif 'dead but pid file exists' in cleanout:
self.running = False
# if the job status is still not known and we got a zero for the
# return code, assume here that the service is running
if self.running is None and rc == 0:
self.running = True
# if the job status is still not known check it by special conditions
if self.running is None:
if self.name == 'iptables' and "ACCEPT" in status_stdout:
# iptables status command output is lame
# TODO: lookup if we can use a return code for this instead?
self.running = True
return self.running
def service_enable(self):
if self.enable_cmd is None:
self.module.fail_json(msg='cannot detect command to enable service %s, typo or init system potentially unknown' % self.name)
self.changed = True
action = None
#
# Upstart's initctl
#
if self.enable_cmd.endswith("initctl"):
def write_to_override_file(file_name, file_contents, ):
override_file = open(file_name, 'w')
override_file.write(file_contents)
override_file.close()
initpath = '/etc/init'
if self.upstart_version >= LooseVersion('0.6.7'):
manreg = re.compile('^manual\s*$', re.M | re.I)
config_line = 'manual\n'
else:
manreg = re.compile('^start on manual\s*$', re.M | re.I)
config_line = 'start on manual\n'
conf_file_name = "%s/%s.conf" % (initpath, self.name)
override_file_name = "%s/%s.override" % (initpath, self.name)
# Check to see if files contain the manual line in .conf and fail if True
if manreg.search(open(conf_file_name).read()):
self.module.fail_json(msg="manual stanza not supported in a .conf file")
self.changed = False
if os.path.exists(override_file_name):
override_file_contents = open(override_file_name).read()
# Remove manual stanza if present and service enabled
if self.enable and manreg.search(override_file_contents):
self.changed = True
override_state = manreg.sub('', override_file_contents)
# Add manual stanza if not present and service disabled
elif not (self.enable) and not (manreg.search(override_file_contents)):
self.changed = True
override_state = '\n'.join((override_file_contents, config_line))
# service already in desired state
else:
pass
# Add file with manual stanza if service disabled
elif not (self.enable):
self.changed = True
override_state = config_line
else:
# service already in desired state
pass
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
# The initctl method of enabling and disabling services is much
# different than for the other service methods. So actually
# committing the change is done in this conditional and then we
# skip the boilerplate at the bottom of the method
if self.changed:
try:
write_to_override_file(override_file_name, override_state)
except:
self.module.fail_json(msg='Could not modify override file')
return
#
# SysV's chkconfig
#
if self.enable_cmd.endswith("chkconfig"):
if self.enable:
action = 'on'
else:
action = 'off'
(rc, out, err) = self.execute_command("%s --list %s" % (self.enable_cmd, self.name))
if 'chkconfig --add %s' % self.name in err:
self.execute_command("%s --add %s" % (self.enable_cmd, self.name))
(rc, out, err) = self.execute_command("%s --list %s" % (self.enable_cmd, self.name))
if not self.name in out:
self.module.fail_json(msg="service %s does not support chkconfig" % self.name)
#TODO: look back on why this is here
#state = out.split()[-1]
# Check if we're already in the correct state
if "3:%s" % action in out and "5:%s" % action in out:
self.changed = False
return
#
# Systemd's systemctl
#
if self.enable_cmd.endswith("systemctl"):
if self.enable:
action = 'enable'
else:
action = 'disable'
# Check if we're already in the correct state
service_enabled = self.get_systemd_service_enabled()
# self.changed should already be true
if self.enable == service_enabled:
self.changed = False
return
#
# OpenRC's rc-update
#
if self.enable_cmd.endswith("rc-update"):
if self.enable:
action = 'add'
else:
action = 'delete'
(rc, out, err) = self.execute_command("%s show" % self.enable_cmd)
for line in out.splitlines():
service_name, runlevels = line.split('|')
service_name = service_name.strip()
if service_name != self.name:
continue
runlevels = re.split(r'\s+', runlevels)
# service already enabled for the runlevel
if self.enable and self.runlevel in runlevels:
self.changed = False
# service already disabled for the runlevel
elif not self.enable and self.runlevel not in runlevels:
self.changed = False
break
else:
# service already disabled altogether
if not self.enable:
self.changed = False
if not self.changed:
return
#
# update-rc.d style
#
if self.enable_cmd.endswith("update-rc.d"):
enabled = False
slinks = glob.glob('/etc/rc?.d/S??' + self.name)
if slinks:
enabled = True
if self.enable != enabled:
self.changed = True
if self.enable:
action = 'enable'
klinks = glob.glob('/etc/rc?.d/K??' + self.name)
if not klinks:
if not self.module.check_mode:
(rc, out, err) = self.execute_command("%s %s defaults" % (self.enable_cmd, self.name))
if rc != 0:
if err:
self.module.fail_json(msg=err)
else:
self.module.fail_json(msg=out) % (self.enable_cmd, self.name, action)
else:
action = 'disable'
if not self.module.check_mode:
(rc, out, err) = self.execute_command("%s %s %s" % (self.enable_cmd, self.name, action))
if rc != 0:
if err:
self.module.fail_json(msg=err)
else:
self.module.fail_json(msg=out) % (self.enable_cmd, self.name, action)
else:
self.changed = False
return
#
# insserv (Debian 7)
#
if self.enable_cmd.endswith("insserv"):
if self.enable:
(rc, out, err) = self.execute_command("%s -n %s" % (self.enable_cmd, self.name))
else:
(rc, out, err) = self.execute_command("%s -nr %s" % (self.enable_cmd, self.name))
self.changed = False
for line in err.splitlines():
if self.enable and line.find('enable service') != -1:
self.changed = True
break
if not self.enable and line.find('remove service') != -1:
self.changed = True
break
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
if not self.changed:
return
if self.enable:
(rc, out, err) = self.execute_command("%s %s" % (self.enable_cmd, self.name))
if (rc != 0) or (err != ''):
self.module.fail_json(msg=("Failed to install service. rc: %s, out: %s, err: %s" % (rc, out, err)))
return (rc, out, err)
else:
(rc, out, err) = self.execute_command("%s -r %s" % (self.enable_cmd, self.name))
if (rc != 0) or (err != ''):
self.module.fail_json(msg=("Failed to remove service. rc: %s, out: %s, err: %s" % (rc, out, err)))
return (rc, out, err)
#
# If we've gotten to the end, the service needs to be updated
#
self.changed = True
# we change argument order depending on real binary used:
# rc-update and systemctl need the argument order reversed
if self.enable_cmd.endswith("rc-update"):
args = (self.enable_cmd, action, self.name + " " + self.runlevel)
elif self.enable_cmd.endswith("systemctl"):
args = (self.enable_cmd, action, self.__systemd_unit)
else:
args = (self.enable_cmd, self.name, action)
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
(rc, out, err) = self.execute_command("%s %s %s" % args)
if rc != 0:
if err:
self.module.fail_json(msg="Error when trying to %s %s: rc=%s %s" % (action, self.name, rc, err))
else:
self.module.fail_json(msg="Failure for %s %s: rc=%s %s" % (action, self.name, rc, out))
return (rc, out, err)
def service_control(self):
# Decide what command to run
svc_cmd = ''
arguments = self.arguments
if self.svc_cmd:
if not self.svc_cmd.endswith("systemctl"):
# SysV and OpenRC take the form <cmd> <name> <action>
svc_cmd = "%s %s" % (self.svc_cmd, self.name)
else:
# systemd commands take the form <cmd> <action> <name>
svc_cmd = self.svc_cmd
arguments = "%s %s" % (self.__systemd_unit, arguments)
elif self.svc_cmd is None and self.svc_initscript:
# upstart
svc_cmd = "%s" % self.svc_initscript
# In OpenRC, if a service crashed, we need to reset its status to
# stopped with the zap command, before we can start it back.
if self.svc_cmd and self.svc_cmd.endswith('rc-service') and self.action == 'start' and self.crashed:
self.execute_command("%s zap" % svc_cmd, daemonize=True)
if self.action != "restart":
if svc_cmd != '':
# upstart or systemd or OpenRC
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (svc_cmd, self.action, arguments), daemonize=True)
else:
# SysV
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (self.action, self.name, arguments), daemonize=True)
elif self.svc_cmd and self.svc_cmd.endswith('rc-service'):
# All services in OpenRC support restart.
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (svc_cmd, self.action, arguments), daemonize=True)
else:
# In other systems, not all services support restart. Do it the hard way.
if svc_cmd != '':
# upstart or systemd
rc1, stdout1, stderr1 = self.execute_command("%s %s %s" % (svc_cmd, 'stop', arguments), daemonize=True)
else:
# SysV
rc1, stdout1, stderr1 = self.execute_command("%s %s %s" % ('stop', self.name, arguments), daemonize=True)
if self.sleep:
time.sleep(self.sleep)
if svc_cmd != '':
# upstart or systemd
rc2, stdout2, stderr2 = self.execute_command("%s %s %s" % (svc_cmd, 'start', arguments), daemonize=True)
else:
# SysV
rc2, stdout2, stderr2 = self.execute_command("%s %s %s" % ('start', self.name, arguments), daemonize=True)
# merge return information
if rc1 != 0 and rc2 == 0:
rc_state = rc2
stdout = stdout2
stderr = stderr2
else:
rc_state = rc1 + rc2
stdout = stdout1 + stdout2
stderr = stderr1 + stderr2
return(rc_state, stdout, stderr)
# ===========================================
# Subclass: FreeBSD
class FreeBsdService(Service):
"""
This is the FreeBSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot and the 'service' binary to
check status and perform direct service manipulation.
"""
platform = 'FreeBSD'
distribution = None
def get_service_tools(self):
self.svc_cmd = self.module.get_bin_path('service', True)
if not self.svc_cmd:
self.module.fail_json(msg='unable to find service binary')
self.sysrc_cmd = self.module.get_bin_path('sysrc')
def get_service_status(self):
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, 'onestatus', self.arguments))
if self.name == "pf":
self.running = "Enabled" in stdout
else:
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = [ '/etc/rc.conf','/etc/rc.conf.local', '/usr/local/etc/rc.conf' ]
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, 'rcvar', self.arguments))
try:
rcvars = shlex.split(stdout, comments=True)
except:
#TODO: add a warning to the output with the failure
pass
if not rcvars:
self.module.fail_json(msg="unable to determine rcvar", stdout=stdout, stderr=stderr)
# In rare cases, i.e. sendmail, rcvar can return several key=value pairs
# Usually there is just one, however. In other rare cases, i.e. uwsgi,
# rcvar can return extra uncommented data that is not at all related to
# the rcvar. We will just take the first key=value pair we come across
# and hope for the best.
for rcvar in rcvars:
if '=' in rcvar:
self.rcconf_key, default_rcconf_value = rcvar.split('=', 1)
break
if self.rcconf_key is None:
self.module.fail_json(msg="unable to determine rcvar", stdout=stdout, stderr=stderr)
if self.sysrc_cmd: # FreeBSD >= 9.2
rc, current_rcconf_value, stderr = self.execute_command("%s -n %s" % (self.sysrc_cmd, self.rcconf_key))
# it can happen that rcvar is not set (case of a system coming from the ports collection)
# so we will fallback on the default
if rc != 0:
current_rcconf_value = default_rcconf_value
if current_rcconf_value.strip().upper() != self.rcconf_value:
self.changed = True
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
rc, change_stdout, change_stderr = self.execute_command("%s %s=\"%s\"" % (self.sysrc_cmd, self.rcconf_key, self.rcconf_value ) )
if rc != 0:
self.module.fail_json(msg="unable to set rcvar using sysrc", stdout=change_stdout, stderr=change_stderr)
# sysrc does not exit with code 1 on permission error => validate successful change using service(8)
rc, check_stdout, check_stderr = self.execute_command("%s %s %s" % (self.svc_cmd, self.name, "enabled"))
if self.enable != (rc == 0): # rc = 0 indicates enabled service, rc = 1 indicates disabled service
self.module.fail_json(msg="unable to set rcvar: sysrc did not change value", stdout=change_stdout, stderr=change_stderr)
else:
self.changed = False
else: # Legacy (FreeBSD < 9.2)
try:
return self.service_enable_rcconf()
except Exception:
self.module.fail_json(msg='unable to set rcvar')
def service_control(self):
if self.action == "start":
self.action = "onestart"
if self.action == "stop":
self.action = "onestop"
if self.action == "reload":
self.action = "onereload"
ret = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, self.action, self.arguments))
if self.sleep:
time.sleep(self.sleep)
return ret
# ===========================================
# Subclass: OpenBSD
class OpenBsdService(Service):
"""
This is the OpenBSD Service manipulation class - it uses rcctl(8) or
/etc/rc.d scripts for service control. Enabling a service is
only supported if rcctl is present.
"""
platform = 'OpenBSD'
distribution = None
def get_service_tools(self):
self.enable_cmd = self.module.get_bin_path('rcctl')
if self.enable_cmd:
self.svc_cmd = self.enable_cmd
else:
rcdir = '/etc/rc.d'
rc_script = "%s/%s" % (rcdir, self.name)
if os.path.isfile(rc_script):
self.svc_cmd = rc_script
if not self.svc_cmd:
self.module.fail_json(msg='unable to find svc_cmd')
def get_service_status(self):
if self.enable_cmd:
rc, stdout, stderr = self.execute_command("%s %s %s" % (self.svc_cmd, 'check', self.name))
else:
rc, stdout, stderr = self.execute_command("%s %s" % (self.svc_cmd, 'check'))
if stderr:
self.module.fail_json(msg=stderr)
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_control(self):
if self.enable_cmd:
return self.execute_command("%s -f %s %s" % (self.svc_cmd, self.action, self.name))
else:
return self.execute_command("%s -f %s" % (self.svc_cmd, self.action))
def service_enable(self):
if not self.enable_cmd:
return super(OpenBsdService, self).service_enable()
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'getdef', self.name, 'flags'))
if stderr:
self.module.fail_json(msg=stderr)
getdef_string = stdout.rstrip()
# Depending on the service the string returned from 'getdef' may be
# either a set of flags or the boolean YES/NO
if getdef_string == "YES" or getdef_string == "NO":
default_flags = ''
else:
default_flags = getdef_string
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'get', self.name, 'flags'))
if stderr:
self.module.fail_json(msg=stderr)
get_string = stdout.rstrip()
# Depending on the service the string returned from 'get' may be
# either a set of flags or the boolean YES/NO
if get_string == "YES" or get_string == "NO":
current_flags = ''
else:
current_flags = get_string
# If there are arguments from the user we use these as flags unless
# they are already set.
if self.arguments and self.arguments != current_flags:
changed_flags = self.arguments
# If the user has not supplied any arguments and the current flags
# differ from the default we reset them.
elif not self.arguments and current_flags != default_flags:
changed_flags = ' '
# Otherwise there is no need to modify flags.
else:
changed_flags = ''
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'get', self.name, 'status'))
if self.enable:
if rc == 0 and not changed_flags:
return
if rc != 0:
status_action = "set %s status on" % (self.name)
else:
status_action = ''
if changed_flags:
flags_action = "set %s flags %s" % (self.name, changed_flags)
else:
flags_action = ''
else:
if rc == 1:
return
status_action = "set %s status off" % self.name
flags_action = ''
# Verify state assumption
if not status_action and not flags_action:
self.module.fail_json(msg="neither status_action or status_flags is set, this should never happen")
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
status_modified = 0
if status_action:
rc, stdout, stderr = self.execute_command("%s %s" % (self.enable_cmd, status_action))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg="rcctl failed to modify service status")
status_modified = 1
if flags_action:
rc, stdout, stderr = self.execute_command("%s %s" % (self.enable_cmd, flags_action))
if rc != 0:
if stderr:
if status_modified:
error_message = "rcctl modified service status but failed to set flags: " + stderr
else:
error_message = stderr
else:
if status_modified:
error_message = "rcctl modified service status but failed to set flags"
else:
error_message = "rcctl failed to modify service flags"
self.module.fail_json(msg=error_message)
self.changed = True
# ===========================================
# Subclass: NetBSD
class NetBsdService(Service):
"""
This is the NetBSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot, check status and perform
direct service manipulation. Init scripts in /etc/rcd are used for
controlling services (start/stop) as well as for controlling the current
state.
"""
platform = 'NetBSD'
distribution = None
def get_service_tools(self):
initpaths = [ '/etc/rc.d' ] # better: $rc_directories - how to get in here? Run: sh -c '. /etc/rc.conf ; echo $rc_directories'
for initdir in initpaths:
initscript = "%s/%s" % (initdir,self.name)
if os.path.isfile(initscript):
self.svc_initscript = initscript
if not self.svc_initscript:
self.module.fail_json(msg='unable to find rc.d script')
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = [ '/etc/rc.conf' ] # Overkill?
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
self.rcconf_key = "%s" % string.replace(self.name,"-","_")
return self.service_enable_rcconf()
def get_service_status(self):
self.svc_cmd = "%s" % self.svc_initscript
rc, stdout, stderr = self.execute_command("%s %s" % (self.svc_cmd, 'onestatus'))
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_control(self):
if self.action == "start":
self.action = "onestart"
if self.action == "stop":
self.action = "onestop"
self.svc_cmd = "%s" % self.svc_initscript
return self.execute_command("%s %s" % (self.svc_cmd, self.action), daemonize=True)
# ===========================================
# Subclass: SunOS
class SunOSService(Service):
"""
This is the SunOS Service manipulation class - it uses the svcadm
command for controlling services, and svcs command for checking status.
It also tries to be smart about taking the service out of maintenance
state if necessary.
"""
platform = 'SunOS'
distribution = None
def get_service_tools(self):
self.svcs_cmd = self.module.get_bin_path('svcs', True)
if not self.svcs_cmd:
self.module.fail_json(msg='unable to find svcs binary')
self.svcadm_cmd = self.module.get_bin_path('svcadm', True)
if not self.svcadm_cmd:
self.module.fail_json(msg='unable to find svcadm binary')
def get_service_status(self):
status = self.get_sunos_svcs_status()
# Only 'online' is considered properly running. Everything else is off
# or has some sort of problem.
if status == 'online':
self.running = True
else:
self.running = False
def get_sunos_svcs_status(self):
rc, stdout, stderr = self.execute_command("%s %s" % (self.svcs_cmd, self.name))
if rc == 1:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
lines = stdout.rstrip("\n").split("\n")
status = lines[-1].split(" ")[0]
# status is one of: online, offline, degraded, disabled, maintenance, uninitialized
# see man svcs(1)
return status
def service_enable(self):
# Get current service enablement status
rc, stdout, stderr = self.execute_command("%s -l %s" % (self.svcs_cmd, self.name))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
enabled = False
temporary = False
# look for enabled line, which could be one of:
# enabled true (temporary)
# enabled false (temporary)
# enabled true
# enabled false
for line in stdout.split("\n"):
if line.startswith("enabled"):
if "true" in line:
enabled = True
if "temporary" in line:
temporary = True
startup_enabled = (enabled and not temporary) or (not enabled and temporary)
if self.enable and startup_enabled:
return
elif (not self.enable) and (not startup_enabled):
return
# Mark service as started or stopped (this will have the side effect of
# actually stopping or starting the service)
if self.enable:
subcmd = "enable -rs"
else:
subcmd = "disable -s"
rc, stdout, stderr = self.execute_command("%s %s %s" % (self.svcadm_cmd, subcmd, self.name))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
self.changed = True
def service_control(self):
status = self.get_sunos_svcs_status()
# if starting or reloading, clear maintenance states
if self.action in ['start', 'reload', 'restart'] and status in ['maintenance', 'degraded']:
rc, stdout, stderr = self.execute_command("%s clear %s" % (self.svcadm_cmd, self.name))
if rc != 0:
return rc, stdout, stderr
status = self.get_sunos_svcs_status()
if status in ['maintenance', 'degraded']:
self.module.fail_json(msg="Failed to bring service out of %s status." % status)
if self.action == 'start':
subcmd = "enable -rst"
elif self.action == 'stop':
subcmd = "disable -st"
elif self.action == 'reload':
subcmd = "refresh -s"
elif self.action == 'restart' and status == 'online':
subcmd = "restart -s"
elif self.action == 'restart' and status != 'online':
subcmd = "enable -rst"
return self.execute_command("%s %s %s" % (self.svcadm_cmd, subcmd, self.name))
# ===========================================
# Subclass: AIX
class AIX(Service):
"""
This is the AIX Service (SRC) manipulation class - it uses lssrc, startsrc, stopsrc
and refresh for service control. Enabling a service is currently not supported.
Would require to add an entry in the /etc/inittab file (mkitab, chitab and rmitab
commands)
"""
platform = 'AIX'
distribution = None
def get_service_tools(self):
self.lssrc_cmd = self.module.get_bin_path('lssrc', True)
if not self.lssrc_cmd:
self.module.fail_json(msg='unable to find lssrc binary')
self.startsrc_cmd = self.module.get_bin_path('startsrc', True)
if not self.startsrc_cmd:
self.module.fail_json(msg='unable to find startsrc binary')
self.stopsrc_cmd = self.module.get_bin_path('stopsrc', True)
if not self.stopsrc_cmd:
self.module.fail_json(msg='unable to find stopsrc binary')
self.refresh_cmd = self.module.get_bin_path('refresh', True)
if not self.refresh_cmd:
self.module.fail_json(msg='unable to find refresh binary')
def get_service_status(self):
status = self.get_aix_src_status()
# Only 'active' is considered properly running. Everything else is off
# or has some sort of problem.
if status == 'active':
self.running = True
else:
self.running = False
def get_aix_src_status(self):
rc, stdout, stderr = self.execute_command("%s -s %s" % (self.lssrc_cmd, self.name))
if rc == 1:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
lines = stdout.rstrip("\n").split("\n")
status = lines[-1].split(" ")[-1]
# status is one of: active, inoperative
return status
def service_control(self):
if self.action == 'start':
srccmd = self.startsrc_cmd
elif self.action == 'stop':
srccmd = self.stopsrc_cmd
elif self.action == 'reload':
srccmd = self.refresh_cmd
elif self.action == 'restart':
self.execute_command("%s -s %s" % (self.stopsrc_cmd, self.name))
srccmd = self.startsrc_cmd
if self.arguments and self.action == 'start':
return self.execute_command("%s -a \"%s\" -s %s" % (srccmd, self.arguments, self.name))
else:
return self.execute_command("%s -s %s" % (srccmd, self.name))
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(choices=['running', 'started', 'stopped', 'restarted', 'reloaded']),
sleep = dict(required=False, type='int', default=None),
pattern = dict(required=False, default=None),
enabled = dict(type='bool'),
runlevel = dict(required=False, default='default'),
arguments = dict(aliases=['args'], default=''),
),
supports_check_mode=True,
required_one_of=[['state', 'enabled']],
)
service = Service(module)
module.debug('Service instantiated - platform %s' % service.platform)
if service.distribution:
module.debug('Service instantiated - distribution %s' % service.distribution)
rc = 0
out = ''
err = ''
result = {}
result['name'] = service.name
# Find service management tools
service.get_service_tools()
# Enable/disable service startup at boot if requested
if service.module.params['enabled'] is not None:
# FIXME: ideally this should detect if we need to toggle the enablement state, though
# it's unlikely the changed handler would need to fire in this case so it's a minor thing.
service.service_enable()
result['enabled'] = service.enable
if module.params['state'] is None:
# Not changing the running state, so bail out now.
result['changed'] = service.changed
module.exit_json(**result)
result['state'] = service.state
# Collect service status
if service.pattern:
service.check_ps()
else:
service.get_service_status()
# Calculate if request will change service state
service.check_service_changed()
# Modify service state if necessary
(rc, out, err) = service.modify_service_state()
if rc != 0:
if err and "Job is already running" in err:
# upstart got confused, one such possibility is MySQL on Ubuntu 12.04
# where status may report it has no start/stop links and we could
# not get accurate status
pass
else:
if err:
module.fail_json(msg=err)
else:
module.fail_json(msg=out)
result['changed'] = service.changed | service.svc_change
if service.module.params['enabled'] is not None:
result['enabled'] = service.module.params['enabled']
if not service.module.params['state']:
status = service.get_service_status()
if status is None:
result['state'] = 'absent'
elif status is False:
result['state'] = 'started'
else:
result['state'] = 'stopped'
else:
# as we may have just bounced the service the service command may not
# report accurate state at this moment so just show what we ran
if service.module.params['state'] in ['started','restarted','running','reloaded']:
result['state'] = 'started'
else:
result['state'] = 'stopped'
module.exit_json(**result)
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
stvstnfrd/edx-platform | openedx/core/djangoapps/xblock/runtime/runtime.py | 1 | 19276 | """
Common base classes for all new XBlock runtimes.
"""
import logging
import crum
from completion.waffle import ENABLE_COMPLETION_TRACKING_SWITCH
from completion.models import BlockCompletion
from completion.services import CompletionService
from django.contrib.auth import get_user_model
from django.core.exceptions import PermissionDenied
from django.utils.lru_cache import lru_cache
from eventtracking import tracker
from six.moves.urllib.parse import urljoin # pylint: disable=import-error
from web_fragments.fragment import Fragment
from xblock.exceptions import NoSuchServiceError
from xblock.field_data import SplitFieldData
from xblock.fields import Scope
from xblock.runtime import KvsFieldData, MemoryIdManager, Runtime
from common.djangoapps.track import contexts as track_contexts
from common.djangoapps.track import views as track_views
from lms.djangoapps.courseware.model_data import DjangoKeyValueStore, FieldDataCache
from lms.djangoapps.grades.api import signals as grades_signals
from openedx.core.djangoapps.xblock.apps import get_xblock_app_config
from openedx.core.djangoapps.xblock.runtime.blockstore_field_data import BlockstoreChildrenData, BlockstoreFieldData
from openedx.core.djangoapps.xblock.runtime.ephemeral_field_data import EphemeralKeyValueStore
from openedx.core.djangoapps.xblock.runtime.mixin import LmsBlockMixin
from openedx.core.djangoapps.xblock.utils import get_xblock_id_for_anonymous_user
from openedx.core.lib.xblock_utils import wrap_fragment, xblock_local_resource_url
from common.djangoapps.static_replace import process_static_urls
from xmodule.errortracker import make_error_tracker
from xmodule.modulestore.django import ModuleI18nService
from .id_managers import OpaqueKeyReader
from .shims import RuntimeShim, XBlockShim
log = logging.getLogger(__name__)
User = get_user_model()
def make_track_function():
"""
Make a tracking function that logs what happened, for XBlock events.
"""
current_request = crum.get_current_request()
def function(event_type, event):
return track_views.server_track(current_request, event_type, event, page='x_module')
return function
class XBlockRuntime(RuntimeShim, Runtime):
"""
This class manages one or more instantiated XBlocks for a particular user,
providing those XBlocks with the standard XBlock runtime API (and some
Open edX-specific additions) so that it can interact with the platform,
and the platform can interact with it.
The main reason we cannot make the runtime a long-lived singleton is that
the XBlock runtime API requires 'user_id' to be a property of the runtime,
not an argument passed in when loading particular blocks.
"""
# ** Do not add any XModule compatibility code to this class **
# Add it to RuntimeShim instead, to help keep legacy code isolated.
# Feature flags:
# This runtime can save state for users who aren't logged in:
suppports_state_for_anonymous_users = True
def __init__(self, system, user):
super(XBlockRuntime, self).__init__( # lint-amnesty, pylint: disable=super-with-arguments
id_reader=system.id_reader,
mixins=(
LmsBlockMixin, # Adds Non-deprecated LMS/Studio functionality
XBlockShim, # Adds deprecated LMS/Studio functionality / backwards compatibility
),
services={
"i18n": ModuleI18nService(),
},
default_class=None,
select=None,
id_generator=system.id_generator,
)
self.system = system
self.user = user
# self.user_id must be set as a separate attribute since base class sets it:
if self.user is None:
self.user_id = None
elif self.user.is_anonymous:
self.user_id = get_xblock_id_for_anonymous_user(user)
else:
self.user_id = self.user.id
self.block_field_datas = {} # dict of FieldData stores for our loaded XBlocks. Key is the block's scope_ids.
self.django_field_data_caches = {} # dict of FieldDataCache objects for XBlock with database-based user state
def handler_url(self, block, handler_name, suffix='', query='', thirdparty=False):
"""
Get the URL to a specific handler.
"""
if thirdparty:
log.warning("thirdparty handlers are not supported by this runtime for XBlock %s.", type(block))
url = self.system.handler_url(usage_key=block.scope_ids.usage_id, handler_name=handler_name, user=self.user)
if suffix:
if not url.endswith('/'):
url += '/'
url += suffix
if query:
url += '&' if '?' in url else '?'
url += query
return url
def resource_url(self, resource):
raise NotImplementedError("resource_url is not supported by Open edX.")
def local_resource_url(self, block, uri):
"""
Get the absolute URL to a resource file (like a CSS/JS file or an image)
that is part of an XBlock's python module.
"""
relative_url = xblock_local_resource_url(block, uri)
site_root_url = get_xblock_app_config().get_site_root_url()
absolute_url = urljoin(site_root_url, relative_url)
return absolute_url
def publish(self, block, event_type, event_data):
""" Handle XBlock events like grades and completion """
special_handler = self.get_event_handler(event_type)
if special_handler:
special_handler(block, event_data)
else:
self.log_event_to_tracking_log(block, event_type, event_data)
def get_event_handler(self, event_type):
"""
Return an appropriate function to handle the event.
Returns None if no special processing is required.
"""
if self.user_id is None:
# We don't/cannot currently record grades or completion for anonymous users.
return None
# In the future when/if we support masquerading, need to be careful here not to affect the user's grades
if event_type == 'grade':
return self.handle_grade_event
elif event_type == 'completion':
return self.handle_completion_event
return None
def log_event_to_tracking_log(self, block, event_type, event_data):
"""
Log this XBlock event to the tracking log
"""
log_context = track_contexts.context_dict_for_learning_context(block.scope_ids.usage_id.context_key)
if self.user_id:
log_context['user_id'] = self.user_id
log_context['asides'] = {}
track_function = make_track_function()
with tracker.get_tracker().context(event_type, log_context):
track_function(event_type, event_data)
def handle_grade_event(self, block, event):
"""
Submit a grade for the block.
"""
if not self.user.is_anonymous:
grades_signals.SCORE_PUBLISHED.send(
sender=None,
block=block,
user=self.user,
raw_earned=event['value'],
raw_possible=event['max_value'],
only_if_higher=event.get('only_if_higher'),
score_deleted=event.get('score_deleted'),
grader_response=event.get('grader_response')
)
def handle_completion_event(self, block, event):
"""
Submit a completion object for the block.
"""
if not ENABLE_COMPLETION_TRACKING_SWITCH.is_enabled():
return
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=block.scope_ids.usage_id,
completion=event['completion'],
)
def applicable_aside_types(self, block):
""" Disable XBlock asides in this runtime """
return []
def parse_xml_file(self, fileobj, id_generator=None):
# Deny access to the inherited method
raise NotImplementedError("XML Serialization is only supported with BlockstoreXBlockRuntime")
def add_node_as_child(self, block, node, id_generator=None):
"""
Called by XBlock.parse_xml to treat a child node as a child block.
"""
# Deny access to the inherited method
raise NotImplementedError("XML Serialization is only supported with BlockstoreXBlockRuntime")
def service(self, block, service_name):
"""
Return a service, or None.
Services are objects implementing arbitrary other interfaces.
"""
# TODO: Do these declarations actually help with anything? Maybe this check should
# be removed from here and from XBlock.runtime
declaration = block.service_declaration(service_name)
if declaration is None:
raise NoSuchServiceError("Service {!r} was not requested.".format(service_name))
# Most common service is field-data so check that first:
if service_name == "field-data":
if block.scope_ids not in self.block_field_datas:
try:
self.block_field_datas[block.scope_ids] = self._init_field_data_for_block(block)
except:
# Don't try again pointlessly every time another field is accessed
self.block_field_datas[block.scope_ids] = None
raise
return self.block_field_datas[block.scope_ids]
elif service_name == "completion":
context_key = block.scope_ids.usage_id.context_key
return CompletionService(user=self.user, context_key=context_key)
# Check if the XBlockRuntimeSystem wants to handle this:
service = self.system.get_service(block, service_name)
# Otherwise, fall back to the base implementation which loads services
# defined in the constructor:
if service is None:
service = super(XBlockRuntime, self).service(block, service_name) # lint-amnesty, pylint: disable=super-with-arguments
return service
def _init_field_data_for_block(self, block):
"""
Initialize the FieldData implementation for the specified XBlock
"""
if self.user is None:
# No user is specified, so we want to throw an error if anything attempts to read/write user-specific fields
student_data_store = None
elif self.user.is_anonymous:
# This is an anonymous (non-registered) user:
assert self.user_id.startswith("anon")
kvs = EphemeralKeyValueStore()
student_data_store = KvsFieldData(kvs)
elif self.system.student_data_mode == XBlockRuntimeSystem.STUDENT_DATA_EPHEMERAL:
# We're in an environment like Studio where we want to let the
# author test blocks out but not permanently save their state.
kvs = EphemeralKeyValueStore()
student_data_store = KvsFieldData(kvs)
else:
# Use database-backed field data (i.e. store user_state in StudentModule)
context_key = block.scope_ids.usage_id.context_key
if context_key not in self.django_field_data_caches:
field_data_cache = FieldDataCache(
[block], course_id=context_key, user=self.user, asides=None, read_only=False,
)
self.django_field_data_caches[context_key] = field_data_cache
else:
field_data_cache = self.django_field_data_caches[context_key]
field_data_cache.add_descriptors_to_cache([block])
student_data_store = KvsFieldData(kvs=DjangoKeyValueStore(field_data_cache))
return SplitFieldData({
Scope.content: self.system.authored_data_store,
Scope.settings: self.system.authored_data_store,
Scope.parent: self.system.authored_data_store,
Scope.children: self.system.children_data_store,
Scope.user_state_summary: student_data_store,
Scope.user_state: student_data_store,
Scope.user_info: student_data_store,
Scope.preferences: student_data_store,
})
def render(self, block, view_name, context=None):
"""
Render a specific view of an XBlock.
"""
# Users who aren't logged in are not allowed to view any views other
# than public_view. They may call any handlers though.
if (self.user is None or self.user.is_anonymous) and view_name != 'public_view':
raise PermissionDenied
# We also need to override this method because some XBlocks in the
# edx-platform codebase use methods like add_webpack_to_fragment()
# which create relative URLs (/static/studio/bundles/webpack-foo.js).
# We want all resource URLs to be absolute, such as is done when
# local_resource_url() is used.
fragment = super(XBlockRuntime, self).render(block, view_name, context) # lint-amnesty, pylint: disable=super-with-arguments
needs_fix = False
for resource in fragment.resources:
if resource.kind == 'url' and resource.data.startswith('/'):
needs_fix = True
break
if needs_fix:
log.warning("XBlock %s returned relative resource URLs, which are deprecated", block.scope_ids.usage_id)
# The Fragment API is mostly immutable, so changing a resource requires this:
frag_data = fragment.to_dict()
for resource in frag_data['resources']:
if resource['kind'] == 'url' and resource['data'].startswith('/'):
log.debug("-> Relative resource URL: %s", resource['data'])
resource['data'] = get_xblock_app_config().get_site_root_url() + resource['data']
fragment = Fragment.from_dict(frag_data)
# Apply any required transforms to the fragment.
# We could move to doing this in wrap_xblock() and/or use an array of
# wrapper methods like the ConfigurableFragmentWrapper mixin does.
fragment = wrap_fragment(fragment, self.transform_static_paths_to_urls(block, fragment.content))
return fragment
def transform_static_paths_to_urls(self, block, html_str):
"""
Given an HTML string, replace any static file paths like
/static/foo.png
(which are really pointing to block-specific assets stored in blockstore)
with working absolute URLs like
https://s3.example.com/blockstore/bundle17/this-block/assets/324.png
See common/djangoapps/static_replace/__init__.py
This is generally done automatically for the HTML rendered by XBlocks,
but if an XBlock wants to have correct URLs in data returned by its
handlers, the XBlock must call this API directly.
Note that the paths are only replaced if they are in "quotes" such as if
they are an HTML attribute or JSON data value. Thus, to transform only a
single path string on its own, you must pass html_str=f'"{path}"'
"""
def replace_static_url(original, prefix, quote, rest): # pylint: disable=unused-argument
"""
Replace a single matched url.
"""
original_url = prefix + rest
# Don't mess with things that end in '?raw'
if rest.endswith('?raw'):
new_url = original_url
else:
new_url = self._lookup_asset_url(block, rest) or original_url
return "".join([quote, new_url, quote])
return process_static_urls(html_str, replace_static_url)
def _lookup_asset_url(self, block, asset_path): # pylint: disable=unused-argument
"""
Return an absolute URL for the specified static asset file that may
belong to this XBlock.
e.g. if the XBlock settings have a field value like "/static/foo.png"
then this method will be called with asset_path="foo.png" and should
return a URL like https://cdn.none/xblock/f843u89789/static/foo.png
If the asset file is not recognized, return None
"""
# Subclasses should override this
return None
class XBlockRuntimeSystem(object):
"""
This class is essentially a factory for XBlockRuntimes. This is a
long-lived object which provides the behavior specific to the application
that wants to use XBlocks. Unlike XBlockRuntime, a single instance of this
class can be used with many different XBlocks, whereas each XBlock gets its
own instance of XBlockRuntime.
"""
STUDENT_DATA_EPHEMERAL = 'ephemeral'
STUDENT_DATA_PERSISTED = 'persisted'
def __init__(
self,
handler_url, # type: (Callable[[UsageKey, str, Union[int, ANONYMOUS_USER]], str]
student_data_mode, # type: Union[STUDENT_DATA_EPHEMERAL, STUDENT_DATA_PERSISTED]
runtime_class, # type: XBlockRuntime
):
"""
args:
handler_url: A method to get URLs to call XBlock handlers. It must
implement this signature:
handler_url(
usage_key: UsageKey,
handler_name: str,
user_id: Union[int, str],
)
student_data_mode: Specifies whether student data should be kept
in a temporary in-memory store (e.g. Studio) or persisted
forever in the database.
runtime_class: What runtime to use, e.g. BlockstoreXBlockRuntime
"""
self.handler_url = handler_url
self.id_reader = OpaqueKeyReader()
self.id_generator = MemoryIdManager() # We don't really use id_generator until we need to support asides
self.runtime_class = runtime_class
self.authored_data_store = BlockstoreFieldData()
self.children_data_store = BlockstoreChildrenData(self.authored_data_store)
assert student_data_mode in (self.STUDENT_DATA_EPHEMERAL, self.STUDENT_DATA_PERSISTED)
self.student_data_mode = student_data_mode
self._error_trackers = {}
def get_runtime(self, user):
"""
Get the XBlock runtime for the specified Django user. The user can be
a regular user, an AnonymousUser, or None.
"""
return self.runtime_class(self, user)
def get_service(self, block, service_name):
"""
Get a runtime service
Runtime services may come from this XBlockRuntimeSystem,
or if this method returns None, they may come from the
XBlockRuntime.
"""
if service_name == 'error_tracker':
return self.get_error_tracker_for_context(block.scope_ids.usage_id.context_key)
return None # None means see if XBlockRuntime offers this service
@lru_cache(maxsize=32)
def get_error_tracker_for_context(self, context_key): # pylint: disable=unused-argument
"""
Get an error tracker for the specified context.
lru_cache makes this error tracker long-lived, for
up to 32 contexts that have most recently been used.
"""
return make_error_tracker()
| agpl-3.0 |
wagnerand/olympia | scripts/crontab/gen-cron.py | 6 | 1448 | #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
dogwrap_path = '/usr/local/bin/amo_cron_dogwrap'
ctx = {
"django": "cd %s; %s %s manage.py" % (opts.zamboni,
dogwrap_path,
opts.python)
}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
| bsd-3-clause |
isra17/DIE | DIE/UI/ParserView.py | 8 | 2400 | from DIE.Lib import DataParser
from idaapi import PluginForm
from PySide import QtGui, QtCore
class ParserView(PluginForm):
"""
DIE Value View
"""
def __init__(self):
super(ParserView, self).__init__()
self.data_parser = None
self.ptable_widget = None
def Show(self):
return PluginForm.Show(self,
"Parser View",
options=PluginForm.FORM_PERSIST)
def OnCreate(self, form):
"""
Called when the view is created
"""
self.data_parser = DataParser.getParser()
self.ptable_widget = QtGui.QTreeWidget()
# Get parent widget
self.parent = self.FormToPySideWidget(form)
self._add_parser_data()
layout = QtGui.QGridLayout()
layout.addWidget(self.ptable_widget)
self.parent.setLayout(layout)
def _add_parser_data(self):
"""
Add parser data to the parser widget model
@return:
"""
row = 0
parser_list = self.data_parser.get_parser_list()
if not "headers" in parser_list:
return
header_list = parser_list["headers"]
header_list.insert(0, "Plugin Name")
del parser_list["headers"] # Remove headers item
self.ptable_widget.setHeaderLabels(header_list)
self.ptable_widget.setColumnWidth(0, 200)
self.ptable_widget.setColumnWidth(1, 500)
self.ptable_widget.setColumnWidth(2, 80)
self.ptable_widget.setColumnWidth(3, 80)
self.ptable_widget.setColumnWidth(4, 200)
root_item = self.ptable_widget.invisibleRootItem()
for parser in parser_list:
current_row_item = QtGui.QTreeWidgetItem()
current_row_item.setFlags(QtCore.Qt.ItemIsEnabled)
current_row_item.setText(0, parser)
num_columns = len(parser_list[parser])
for column in xrange(0, num_columns):
currext_text = str(parser_list[parser][column])
current_row_item.setText(column+1, currext_text)
root_item.insertChild(row, current_row_item)
row +=1
_parser_view = None
def initialize():
global _parser_view
_parser_view = ParserView()
def get_view():
return _parser_view
| mit |
pmuellr/cf-buildpack-nodejs | bin/semver.py | 2 | 2559 | # from: https://github.com/k-bx/python-semver/blob/master/semver.py
# license: BSD, it appears
# -*- coding: utf-8 -*-
import re
_REGEX = re.compile('^(?P<major>[0-9]+)'
'\.(?P<minor>[0-9]+)'
'\.(?P<patch>[0-9]+)'
'(\-(?P<prerelease>[0-9A-Za-z]+(\.[0-9A-Za-z]+)*))?'
'(\+(?P<build>[0-9A-Za-z]+(\.[0-9A-Za-z]+)*))?$')
if 'cmp' not in __builtins__:
cmp = lambda a,b: (a > b) - (a < b)
def parse(version):
"""
Parse version to major, minor, patch, pre-release, build parts.
"""
match = _REGEX.match(version)
if match is None:
raise ValueError('%s is not valid SemVer string' % version)
verinfo = match.groupdict()
verinfo['major'] = int(verinfo['major'])
verinfo['minor'] = int(verinfo['minor'])
verinfo['patch'] = int(verinfo['patch'])
return verinfo
def compare(ver1, ver2):
def nat_cmp(a, b):
a, b = a or '', b or ''
convert = lambda text: text.isdigit() and int(text) or text.lower()
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return cmp(alphanum_key(a), alphanum_key(b))
def compare_by_keys(d1, d2):
for key in ['major', 'minor', 'patch']:
v = cmp(d1.get(key), d2.get(key))
if v:
return v
rc1, rc2 = d1.get('prerelease'), d2.get('prerelease')
build1, build2 = d1.get('build'), d2.get('build')
rccmp = nat_cmp(rc1, rc2)
buildcmp = nat_cmp(build1, build2)
if not (rc1 or rc2):
return buildcmp
elif not rc1:
return 1
elif not rc2:
return -1
return rccmp or buildcmp or 0
v1, v2 = parse(ver1), parse(ver2)
return compare_by_keys(v1, v2)
def match(version, match_expr):
prefix = match_expr[:2]
if prefix in ('>=', '<=', '=='):
match_version = match_expr[2:]
elif prefix and prefix[0] in ('>', '<', '='):
prefix = prefix[0]
match_version = match_expr[1:]
else:
raise ValueError("match_expr parameter should be in format <op><ver>, "
"where <op> is one of ['<', '>', '==', '<=', '>=']. "
"You provided: %r" % match_expr)
possibilities_dict = {
'>': (1,),
'<': (-1,),
'==': (0,),
'>=': (0, 1),
'<=': (-1, 0)
}
possibilities = possibilities_dict[prefix]
cmp_res = compare(version, match_version)
return cmp_res in possibilities | apache-2.0 |
zsiciarz/django | django/contrib/gis/geos/prototypes/prepared.py | 178 | 1184 | from ctypes import c_char
from django.contrib.gis.geos.libgeos import (
GEOM_PTR, PREPGEOM_PTR, GEOSFuncFactory,
)
from django.contrib.gis.geos.prototypes.errcheck import check_predicate
# Prepared geometry constructor and destructors.
geos_prepare = GEOSFuncFactory('GEOSPrepare', argtypes=[GEOM_PTR], restype=PREPGEOM_PTR)
prepared_destroy = GEOSFuncFactory('GEOSPreparedGeom_destroy', argtypes=[PREPGEOM_PTR])
# Prepared geometry binary predicate support.
class PreparedPredicate(GEOSFuncFactory):
argtypes = [PREPGEOM_PTR, GEOM_PTR]
restype = c_char
errcheck = staticmethod(check_predicate)
prepared_contains = PreparedPredicate('GEOSPreparedContains')
prepared_contains_properly = PreparedPredicate('GEOSPreparedContainsProperly')
prepared_covers = PreparedPredicate('GEOSPreparedCovers')
prepared_crosses = PreparedPredicate('GEOSPreparedCrosses')
prepared_disjoint = PreparedPredicate('GEOSPreparedDisjoint')
prepared_intersects = PreparedPredicate('GEOSPreparedIntersects')
prepared_overlaps = PreparedPredicate('GEOSPreparedOverlaps')
prepared_touches = PreparedPredicate('GEOSPreparedTouches')
prepared_within = PreparedPredicate('GEOSPreparedWithin')
| bsd-3-clause |
AOSPU/external_chromium_org | sync/tools/testserver/xmppserver.py | 150 | 19562 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A bare-bones and non-compliant XMPP server.
Just enough of the protocol is implemented to get it to work with
Chrome's sync notification system.
"""
import asynchat
import asyncore
import base64
import re
import socket
from xml.dom import minidom
# pychecker complains about the use of fileno(), which is implemented
# by asyncore by forwarding to an internal object via __getattr__.
__pychecker__ = 'no-classattr'
class Error(Exception):
"""Error class for this module."""
pass
class UnexpectedXml(Error):
"""Raised when an unexpected XML element has been encountered."""
def __init__(self, xml_element):
xml_text = xml_element.toxml()
Error.__init__(self, 'Unexpected XML element', xml_text)
def ParseXml(xml_string):
"""Parses the given string as XML and returns a minidom element
object.
"""
dom = minidom.parseString(xml_string)
# minidom handles xmlns specially, but there's a bug where it sets
# the attribute value to None, which causes toxml() or toprettyxml()
# to break.
def FixMinidomXmlnsBug(xml_element):
if xml_element.getAttribute('xmlns') is None:
xml_element.setAttribute('xmlns', '')
def ApplyToAllDescendantElements(xml_element, fn):
fn(xml_element)
for node in xml_element.childNodes:
if node.nodeType == node.ELEMENT_NODE:
ApplyToAllDescendantElements(node, fn)
root = dom.documentElement
ApplyToAllDescendantElements(root, FixMinidomXmlnsBug)
return root
def CloneXml(xml):
"""Returns a deep copy of the given XML element.
Args:
xml: The XML element, which should be something returned from
ParseXml() (i.e., a root element).
"""
return xml.ownerDocument.cloneNode(True).documentElement
class StanzaParser(object):
"""A hacky incremental XML parser.
StanzaParser consumes data incrementally via FeedString() and feeds
its delegate complete parsed stanzas (i.e., XML documents) via
FeedStanza(). Any stanzas passed to FeedStanza() are unlinked after
the callback is done.
Use like so:
class MyClass(object):
...
def __init__(self, ...):
...
self._parser = StanzaParser(self)
...
def SomeFunction(self, ...):
...
self._parser.FeedString(some_data)
...
def FeedStanza(self, stanza):
...
print stanza.toprettyxml()
...
"""
# NOTE(akalin): The following regexps are naive, but necessary since
# none of the existing Python 2.4/2.5 XML libraries support
# incremental parsing. This works well enough for our purposes.
#
# The regexps below assume that any present XML element starts at
# the beginning of the string, but there may be trailing whitespace.
# Matches an opening stream tag (e.g., '<stream:stream foo="bar">')
# (assumes that the stream XML namespace is defined in the tag).
_stream_re = re.compile(r'^(<stream:stream [^>]*>)\s*')
# Matches an empty element tag (e.g., '<foo bar="baz"/>').
_empty_element_re = re.compile(r'^(<[^>]*/>)\s*')
# Matches a non-empty element (e.g., '<foo bar="baz">quux</foo>').
# Does *not* handle nested elements.
_non_empty_element_re = re.compile(r'^(<([^ >]*)[^>]*>.*?</\2>)\s*')
# The closing tag for a stream tag. We have to insert this
# ourselves since all XML stanzas are children of the stream tag,
# which is never closed until the connection is closed.
_stream_suffix = '</stream:stream>'
def __init__(self, delegate):
self._buffer = ''
self._delegate = delegate
def FeedString(self, data):
"""Consumes the given string data, possibly feeding one or more
stanzas to the delegate.
"""
self._buffer += data
while (self._ProcessBuffer(self._stream_re, self._stream_suffix) or
self._ProcessBuffer(self._empty_element_re) or
self._ProcessBuffer(self._non_empty_element_re)):
pass
def _ProcessBuffer(self, regexp, xml_suffix=''):
"""If the buffer matches the given regexp, removes the match from
the buffer, appends the given suffix, parses it, and feeds it to
the delegate.
Returns:
Whether or not the buffer matched the given regexp.
"""
results = regexp.match(self._buffer)
if not results:
return False
xml_text = self._buffer[:results.end()] + xml_suffix
self._buffer = self._buffer[results.end():]
stanza = ParseXml(xml_text)
self._delegate.FeedStanza(stanza)
# Needed because stanza may have cycles.
stanza.unlink()
return True
class Jid(object):
"""Simple struct for an XMPP jid (essentially an e-mail address with
an optional resource string).
"""
def __init__(self, username, domain, resource=''):
self.username = username
self.domain = domain
self.resource = resource
def __str__(self):
jid_str = "%s@%s" % (self.username, self.domain)
if self.resource:
jid_str += '/' + self.resource
return jid_str
def GetBareJid(self):
return Jid(self.username, self.domain)
class IdGenerator(object):
"""Simple class to generate unique IDs for XMPP messages."""
def __init__(self, prefix):
self._prefix = prefix
self._id = 0
def GetNextId(self):
next_id = "%s.%s" % (self._prefix, self._id)
self._id += 1
return next_id
class HandshakeTask(object):
"""Class to handle the initial handshake with a connected XMPP
client.
"""
# The handshake states in order.
(_INITIAL_STREAM_NEEDED,
_AUTH_NEEDED,
_AUTH_STREAM_NEEDED,
_BIND_NEEDED,
_SESSION_NEEDED,
_FINISHED) = range(6)
# Used when in the _INITIAL_STREAM_NEEDED and _AUTH_STREAM_NEEDED
# states. Not an XML object as it's only the opening tag.
#
# The from and id attributes are filled in later.
_STREAM_DATA = (
'<stream:stream from="%s" id="%s" '
'version="1.0" xmlns:stream="http://etherx.jabber.org/streams" '
'xmlns="jabber:client">')
# Used when in the _INITIAL_STREAM_NEEDED state.
_AUTH_STANZA = ParseXml(
'<stream:features xmlns:stream="http://etherx.jabber.org/streams">'
' <mechanisms xmlns="urn:ietf:params:xml:ns:xmpp-sasl">'
' <mechanism>PLAIN</mechanism>'
' <mechanism>X-GOOGLE-TOKEN</mechanism>'
' <mechanism>X-OAUTH2</mechanism>'
' </mechanisms>'
'</stream:features>')
# Used when in the _AUTH_NEEDED state.
_AUTH_SUCCESS_STANZA = ParseXml(
'<success xmlns="urn:ietf:params:xml:ns:xmpp-sasl"/>')
# Used when in the _AUTH_NEEDED state.
_AUTH_FAILURE_STANZA = ParseXml(
'<failure xmlns="urn:ietf:params:xml:ns:xmpp-sasl"/>')
# Used when in the _AUTH_STREAM_NEEDED state.
_BIND_STANZA = ParseXml(
'<stream:features xmlns:stream="http://etherx.jabber.org/streams">'
' <bind xmlns="urn:ietf:params:xml:ns:xmpp-bind"/>'
' <session xmlns="urn:ietf:params:xml:ns:xmpp-session"/>'
'</stream:features>')
# Used when in the _BIND_NEEDED state.
#
# The id and jid attributes are filled in later.
_BIND_RESULT_STANZA = ParseXml(
'<iq id="" type="result">'
' <bind xmlns="urn:ietf:params:xml:ns:xmpp-bind">'
' <jid/>'
' </bind>'
'</iq>')
# Used when in the _SESSION_NEEDED state.
#
# The id attribute is filled in later.
_IQ_RESPONSE_STANZA = ParseXml('<iq id="" type="result"/>')
def __init__(self, connection, resource_prefix, authenticated):
self._connection = connection
self._id_generator = IdGenerator(resource_prefix)
self._username = ''
self._domain = ''
self._jid = None
self._authenticated = authenticated
self._resource_prefix = resource_prefix
self._state = self._INITIAL_STREAM_NEEDED
def FeedStanza(self, stanza):
"""Inspects the given stanza and changes the handshake state if needed.
Called when a stanza is received from the client. Inspects the
stanza to make sure it has the expected attributes given the
current state, advances the state if needed, and sends a reply to
the client if needed.
"""
def ExpectStanza(stanza, name):
if stanza.tagName != name:
raise UnexpectedXml(stanza)
def ExpectIq(stanza, type, name):
ExpectStanza(stanza, 'iq')
if (stanza.getAttribute('type') != type or
stanza.firstChild.tagName != name):
raise UnexpectedXml(stanza)
def GetStanzaId(stanza):
return stanza.getAttribute('id')
def HandleStream(stanza):
ExpectStanza(stanza, 'stream:stream')
domain = stanza.getAttribute('to')
if domain:
self._domain = domain
SendStreamData()
def SendStreamData():
next_id = self._id_generator.GetNextId()
stream_data = self._STREAM_DATA % (self._domain, next_id)
self._connection.SendData(stream_data)
def GetUserDomain(stanza):
encoded_username_password = stanza.firstChild.data
username_password = base64.b64decode(encoded_username_password)
(_, username_domain, _) = username_password.split('\0')
# The domain may be omitted.
#
# If we were using python 2.5, we'd be able to do:
#
# username, _, domain = username_domain.partition('@')
# if not domain:
# domain = self._domain
at_pos = username_domain.find('@')
if at_pos != -1:
username = username_domain[:at_pos]
domain = username_domain[at_pos+1:]
else:
username = username_domain
domain = self._domain
return (username, domain)
def Finish():
self._state = self._FINISHED
self._connection.HandshakeDone(self._jid)
if self._state == self._INITIAL_STREAM_NEEDED:
HandleStream(stanza)
self._connection.SendStanza(self._AUTH_STANZA, False)
self._state = self._AUTH_NEEDED
elif self._state == self._AUTH_NEEDED:
ExpectStanza(stanza, 'auth')
(self._username, self._domain) = GetUserDomain(stanza)
if self._authenticated:
self._connection.SendStanza(self._AUTH_SUCCESS_STANZA, False)
self._state = self._AUTH_STREAM_NEEDED
else:
self._connection.SendStanza(self._AUTH_FAILURE_STANZA, False)
Finish()
elif self._state == self._AUTH_STREAM_NEEDED:
HandleStream(stanza)
self._connection.SendStanza(self._BIND_STANZA, False)
self._state = self._BIND_NEEDED
elif self._state == self._BIND_NEEDED:
ExpectIq(stanza, 'set', 'bind')
stanza_id = GetStanzaId(stanza)
resource_element = stanza.getElementsByTagName('resource')[0]
resource = resource_element.firstChild.data
full_resource = '%s.%s' % (self._resource_prefix, resource)
response = CloneXml(self._BIND_RESULT_STANZA)
response.setAttribute('id', stanza_id)
self._jid = Jid(self._username, self._domain, full_resource)
jid_text = response.parentNode.createTextNode(str(self._jid))
response.getElementsByTagName('jid')[0].appendChild(jid_text)
self._connection.SendStanza(response)
self._state = self._SESSION_NEEDED
elif self._state == self._SESSION_NEEDED:
ExpectIq(stanza, 'set', 'session')
stanza_id = GetStanzaId(stanza)
xml = CloneXml(self._IQ_RESPONSE_STANZA)
xml.setAttribute('id', stanza_id)
self._connection.SendStanza(xml)
Finish()
def AddrString(addr):
return '%s:%d' % addr
class XmppConnection(asynchat.async_chat):
"""A single XMPP client connection.
This class handles the connection to a single XMPP client (via a
socket). It does the XMPP handshake and also implements the (old)
Google notification protocol.
"""
# Used for acknowledgements to the client.
#
# The from and id attributes are filled in later.
_IQ_RESPONSE_STANZA = ParseXml('<iq from="" id="" type="result"/>')
def __init__(self, sock, socket_map, delegate, addr, authenticated):
"""Starts up the xmpp connection.
Args:
sock: The socket to the client.
socket_map: A map from sockets to their owning objects.
delegate: The delegate, which is notified when the XMPP
handshake is successful, when the connection is closed, and
when a notification has to be broadcast.
addr: The host/port of the client.
"""
# We do this because in versions of python < 2.6,
# async_chat.__init__ doesn't take a map argument nor pass it to
# dispatcher.__init__. We rely on the fact that
# async_chat.__init__ calls dispatcher.__init__ as the last thing
# it does, and that calling dispatcher.__init__ with socket=None
# and map=None is essentially a no-op.
asynchat.async_chat.__init__(self)
asyncore.dispatcher.__init__(self, sock, socket_map)
self.set_terminator(None)
self._delegate = delegate
self._parser = StanzaParser(self)
self._jid = None
self._addr = addr
addr_str = AddrString(self._addr)
self._handshake_task = HandshakeTask(self, addr_str, authenticated)
print 'Starting connection to %s' % self
def __str__(self):
if self._jid:
return str(self._jid)
else:
return AddrString(self._addr)
# async_chat implementation.
def collect_incoming_data(self, data):
self._parser.FeedString(data)
# This is only here to make pychecker happy.
def found_terminator(self):
asynchat.async_chat.found_terminator(self)
def close(self):
print "Closing connection to %s" % self
self._delegate.OnXmppConnectionClosed(self)
asynchat.async_chat.close(self)
# Called by self._parser.FeedString().
def FeedStanza(self, stanza):
if self._handshake_task:
self._handshake_task.FeedStanza(stanza)
elif stanza.tagName == 'iq' and stanza.getAttribute('type') == 'result':
# Ignore all client acks.
pass
elif (stanza.firstChild and
stanza.firstChild.namespaceURI == 'google:push'):
self._HandlePushCommand(stanza)
else:
raise UnexpectedXml(stanza)
# Called by self._handshake_task.
def HandshakeDone(self, jid):
if jid:
self._jid = jid
self._handshake_task = None
self._delegate.OnXmppHandshakeDone(self)
print "Handshake done for %s" % self
else:
print "Handshake failed for %s" % self
self.close()
def _HandlePushCommand(self, stanza):
if stanza.tagName == 'iq' and stanza.firstChild.tagName == 'subscribe':
# Subscription request.
self._SendIqResponseStanza(stanza)
elif stanza.tagName == 'message' and stanza.firstChild.tagName == 'push':
# Send notification request.
self._delegate.ForwardNotification(self, stanza)
else:
raise UnexpectedXml(command_xml)
def _SendIqResponseStanza(self, iq):
stanza = CloneXml(self._IQ_RESPONSE_STANZA)
stanza.setAttribute('from', str(self._jid.GetBareJid()))
stanza.setAttribute('id', iq.getAttribute('id'))
self.SendStanza(stanza)
def SendStanza(self, stanza, unlink=True):
"""Sends a stanza to the client.
Args:
stanza: The stanza to send.
unlink: Whether to unlink stanza after sending it. (Pass in
False if stanza is a constant.)
"""
self.SendData(stanza.toxml())
if unlink:
stanza.unlink()
def SendData(self, data):
"""Sends raw data to the client.
"""
# We explicitly encode to ascii as that is what the client expects
# (some minidom library functions return unicode strings).
self.push(data.encode('ascii'))
def ForwardNotification(self, notification_stanza):
"""Forwards a notification to the client."""
notification_stanza.setAttribute('from', str(self._jid.GetBareJid()))
notification_stanza.setAttribute('to', str(self._jid))
self.SendStanza(notification_stanza, False)
class XmppServer(asyncore.dispatcher):
"""The main XMPP server class.
The XMPP server starts accepting connections on the given address
and spawns off XmppConnection objects for each one.
Use like so:
socket_map = {}
xmpp_server = xmppserver.XmppServer(socket_map, ('127.0.0.1', 5222))
asyncore.loop(30.0, False, socket_map)
"""
# Used when sending a notification.
_NOTIFICATION_STANZA = ParseXml(
'<message>'
' <push xmlns="google:push">'
' <data/>'
' </push>'
'</message>')
def __init__(self, socket_map, addr):
asyncore.dispatcher.__init__(self, None, socket_map)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind(addr)
self.listen(5)
self._socket_map = socket_map
self._connections = set()
self._handshake_done_connections = set()
self._notifications_enabled = True
self._authenticated = True
def handle_accept(self):
(sock, addr) = self.accept()
xmpp_connection = XmppConnection(
sock, self._socket_map, self, addr, self._authenticated)
self._connections.add(xmpp_connection)
# Return the new XmppConnection for testing.
return xmpp_connection
def close(self):
# A copy is necessary since calling close on each connection
# removes it from self._connections.
for connection in self._connections.copy():
connection.close()
asyncore.dispatcher.close(self)
def EnableNotifications(self):
self._notifications_enabled = True
def DisableNotifications(self):
self._notifications_enabled = False
def MakeNotification(self, channel, data):
"""Makes a notification from the given channel and encoded data.
Args:
channel: The channel on which to send the notification.
data: The notification payload.
"""
notification_stanza = CloneXml(self._NOTIFICATION_STANZA)
push_element = notification_stanza.getElementsByTagName('push')[0]
push_element.setAttribute('channel', channel)
data_element = push_element.getElementsByTagName('data')[0]
encoded_data = base64.b64encode(data)
data_text = notification_stanza.parentNode.createTextNode(encoded_data)
data_element.appendChild(data_text)
return notification_stanza
def SendNotification(self, channel, data):
"""Sends a notification to all connections.
Args:
channel: The channel on which to send the notification.
data: The notification payload.
"""
notification_stanza = self.MakeNotification(channel, data)
self.ForwardNotification(None, notification_stanza)
notification_stanza.unlink()
def SetAuthenticated(self, auth_valid):
self._authenticated = auth_valid
# We check authentication only when establishing new connections. We close
# all existing connections here to make sure previously connected clients
# pick up on the change. It's a hack, but it works well enough for our
# purposes.
if not self._authenticated:
for connection in self._handshake_done_connections:
connection.close()
def GetAuthenticated(self):
return self._authenticated
# XmppConnection delegate methods.
def OnXmppHandshakeDone(self, xmpp_connection):
self._handshake_done_connections.add(xmpp_connection)
def OnXmppConnectionClosed(self, xmpp_connection):
self._connections.discard(xmpp_connection)
self._handshake_done_connections.discard(xmpp_connection)
def ForwardNotification(self, unused_xmpp_connection, notification_stanza):
if self._notifications_enabled:
for connection in self._handshake_done_connections:
print 'Sending notification to %s' % connection
connection.ForwardNotification(notification_stanza)
else:
print 'Notifications disabled; dropping notification'
| bsd-3-clause |
wrouesnel/ansible | test/units/modules/network/netscaler/test_netscaler_server.py | 23 | 24833 |
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible.compat.tests.mock import patch, Mock, MagicMock, call
from units.modules.utils import set_module_args
from .netscaler_module import TestModule, nitro_base_patcher
import sys
if sys.version_info[:2] != (2, 6):
import requests
class TestNetscalerServerModule(TestModule):
@classmethod
def setUpClass(cls):
class MockException(Exception):
pass
cls.MockException = MockException
m = MagicMock()
cls.server_mock = MagicMock()
cls.server_mock.__class__ = MagicMock(add=Mock())
nssrc_modules_mock = {
'nssrc.com.citrix.netscaler.nitro.resource.config.basic': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.basic.server': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.basic.server.server': cls.server_mock,
}
cls.nitro_specific_patcher = patch.dict(sys.modules, nssrc_modules_mock)
cls.nitro_base_patcher = nitro_base_patcher
@classmethod
def tearDownClass(cls):
cls.nitro_base_patcher.stop()
cls.nitro_specific_patcher.stop()
def setUp(self):
super(TestNetscalerServerModule, self).setUp()
self.nitro_base_patcher.start()
self.nitro_specific_patcher.start()
# Setup minimal required arguments to pass AnsibleModule argument parsing
def tearDown(self):
super(TestNetscalerServerModule, self).tearDown()
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
def test_graceful_nitro_api_import_error(self):
# Stop nitro api patching to cause ImportError
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
from ansible.modules.network.netscaler import netscaler_server
self.module = netscaler_server
result = self.failed()
self.assertEqual(result['msg'], 'Could not load nitro python sdk')
def test_graceful_nitro_error_on_login(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
client_mock = Mock()
client_mock.login = Mock(side_effect=MockException)
m = Mock(return_value=client_mock)
with patch('ansible.modules.network.netscaler.netscaler_server.get_nitro_client', m):
with patch('ansible.modules.network.netscaler.netscaler_server.nitro_exception', MockException):
self.module = netscaler_server
result = self.failed()
self.assertTrue(result['msg'].startswith('nitro exception'), msg='nitro exception during login not handled properly')
def test_graceful_no_connection_error(self):
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
class MockException(Exception):
pass
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.ConnectionError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
nitro_exception=MockException,
):
self.module = netscaler_server
result = self.failed()
self.assertTrue(result['msg'].startswith('Connection error'), msg='Connection error was not handled gracefully')
def test_graceful_login_error(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
class MockException(Exception):
pass
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.SSLError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
nitro_exception=MockException,
):
self.module = netscaler_server
result = self.failed()
self.assertTrue(result['msg'].startswith('SSL Error'), msg='SSL Error was not handled gracefully')
def test_save_config_called_on_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
server_exists=Mock(side_effect=[False, True]),
ConfigProxy=Mock(return_value=server_proxy_mock),
diff_list=Mock(return_value={}),
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
self.assertIn(call.save_config(), client_mock.mock_calls)
def test_save_config_called_on_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
server_exists=Mock(side_effect=[True, False]),
ConfigProxy=Mock(return_value=server_proxy_mock),
diff_list=Mock(return_value={}),
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
self.assertIn(call.save_config(), client_mock.mock_calls)
def test_save_config_not_called_on_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
server_exists=Mock(side_effect=[False, True]),
ConfigProxy=Mock(return_value=server_proxy_mock),
diff_list=Mock(return_value={}),
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
self.assertNotIn(call.save_config(), client_mock.mock_calls)
def test_save_config_not_called_on_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
server_exists=Mock(side_effect=[True, False]),
ConfigProxy=Mock(return_value=server_proxy_mock),
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
self.assertNotIn(call.save_config(), client_mock.mock_calls)
def test_do_state_change_fail(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
nitro_exception=self.MockException,
get_nitro_client=m,
server_exists=Mock(side_effect=[True, False]),
ConfigProxy=Mock(return_value=server_proxy_mock),
diff_list=Mock(return_value={}),
do_state_change=Mock(return_value=Mock(errorcode=1, message='Failed on purpose'))
):
self.module = netscaler_server
result = self.failed()
self.assertEqual(result['msg'], 'Error when setting disabled state. errorcode: 1 message: Failed on purpose')
def test_disable_server_graceful(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
disabled=True,
graceful=True
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_mock = Mock()
d = {
'graceful': True,
'delay': 20,
}
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value=d),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[True, True]),
ConfigProxy=Mock(return_value=server_proxy_mock),
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
result = self.exited()
self.assertEqual(d, {}, 'Graceful disable options were not discarded from the diff_list with the actual object')
def test_new_server_execution_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
server_exists=Mock(side_effect=[False, True]),
server_identical=Mock(side_effect=[True]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
server_proxy_mock.assert_has_calls([call.add()])
def test_modified_server_execution_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[True, True]),
server_identical=Mock(side_effect=[False, True]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
server_proxy_mock.assert_has_calls([call.update()])
def test_absent_server_execution_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[True, False]),
server_identical=Mock(side_effect=[False, True]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
server_proxy_mock.assert_has_calls([call.delete()])
def test_present_server_identical_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[True, True]),
server_identical=Mock(side_effect=[True, True]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
server_proxy_mock.assert_not_called()
def test_absent_server_noop_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[False, False]),
server_identical=Mock(side_effect=[False, False]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
self.exited()
server_proxy_mock.assert_not_called()
def test_present_server_failed_update(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[True, True]),
server_identical=Mock(side_effect=[False, False]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
result = self.failed()
self.assertEqual(result['msg'], 'Server is not configured according to parameters given')
self.assertTrue(result['failed'])
def test_present_server_failed_create(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[False, False]),
server_identical=Mock(side_effect=[False, False]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
result = self.failed()
self.assertEqual(result['msg'], 'Server does not seem to exist')
self.assertTrue(result['failed'])
def test_present_server_update_immutable_attribute(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=['domain']),
server_exists=Mock(side_effect=[True, True]),
server_identical=Mock(side_effect=[False, False]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
result = self.failed()
self.assertEqual(result['msg'], 'Cannot update immutable attributes [\'domain\']')
self.assertTrue(result['failed'])
def test_absent_server_failed_delete(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_server
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
server_proxy_mock = Mock()
server_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=server_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
server_exists=Mock(side_effect=[True, True]),
server_identical=Mock(side_effect=[False, False]),
ConfigProxy=config_proxy_mock,
do_state_change=Mock(return_value=Mock(errorcode=0))
):
self.module = netscaler_server
result = self.failed()
self.assertEqual(result['msg'], 'Server seems to be present')
self.assertTrue(result['failed'])
def test_graceful_nitro_exception_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_server
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
m = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
server_exists=m,
nitro_exception=MockException
):
self.module = netscaler_server
result = self.failed()
self.assertTrue(
result['msg'].startswith('nitro exception'),
msg='Nitro exception not caught on operation absent'
)
def test_graceful_nitro_exception_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_server
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
m = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_server',
server_exists=m,
nitro_exception=MockException
):
self.module = netscaler_server
result = self.failed()
self.assertTrue(
result['msg'].startswith('nitro exception'),
msg='Nitro exception not caught on operation absent'
)
| gpl-3.0 |
DevChun/htc-runnymede-ics-kernel | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
rosarior/mayan | apps/common/templatetags/subtemplates_tags.py | 2 | 1983 | import re
from django.template import Node, TemplateSyntaxError, Library, \
Variable, Context
from django.template.loader import get_template
register = Library()
class RenderSubtemplateNode(Node):
def __init__(self, template_name, template_context, var_name):
self.template_name = template_name
self.template_context = template_context
self.var_name = var_name
def render(self, context):
template_name = Variable(self.template_name).resolve(context)
template_context = Variable(self.template_context).resolve(context)
new_context = Context(context)
new_context.update(Context(template_context, autoescape=context.autoescape))
csrf_token = context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
context[self.var_name] = get_template(template_name).render(new_context)
return ''
@register.tag
def render_subtemplate(parser, token):
# This version uses a regular expression to parse tag contents.
try:
# Splitting by None == splitting by spaces.
tag_name, arg = token.contents.split(None, 1)
except ValueError:
raise TemplateSyntaxError('%r tag requires arguments' % token.contents.split()[0])
m = re.search(r'(.*?) (.*?) as (\w+)', arg)
if not m:
raise TemplateSyntaxError('%r tag had invalid arguments' % tag_name)
template_name, template_context, var_name = m.groups()
if (template_name[0] == template_name[-1] and template_name[0] in ('"', "'")):
raise TemplateSyntaxError('%r tag\'s template name argument should not be in quotes' % tag_name)
if (template_context[0] == template_context[-1] and template_context[0] in ('"', "'")):
raise TemplateSyntaxError('%r tag\'s template context argument should not be in quotes' % tag_name)
return RenderSubtemplateNode(template_name, template_context, var_name)
#format_string[1:-1]
| gpl-3.0 |
pyfisch/servo | tests/wpt/web-platform-tests/cookies/resources/helpers.py | 26 | 2020 | import urlparse
def setNoCacheAndCORSHeaders(request, response):
"""Set Cache-Control, CORS and Content-Type headers appropriate for the cookie tests."""
headers = [("Content-Type", "application/json"),
("Access-Control-Allow-Credentials", "true")]
origin = "*"
if "origin" in request.headers:
origin = request.headers["origin"]
headers.append(("Access-Control-Allow-Origin", origin))
#headers.append(("Access-Control-Allow-Credentials", "true"))
headers.append(("Cache-Control", "no-cache"))
headers.append(("Expires", "Fri, 01 Jan 1990 00:00:00 GMT"))
return headers
def makeCookieHeader(name, value, otherAttrs):
"""Make a Set-Cookie header for a cookie with the name, value and attributes provided."""
def makeAV(a, v):
if None == v or "" == v:
return a
return "%s=%s" % (a, v)
# ensure cookie name is always first
attrs = ["%s=%s" % (name, value)]
attrs.extend(makeAV(a, v) for (a,v) in otherAttrs.iteritems())
return ("Set-Cookie", "; ".join(attrs))
def makeDropCookie(name, secure):
attrs = {"MaxAge": 0, "path": "/"}
if secure:
attrs["secure"] = ""
return makeCookieHeader(name, "", attrs)
def readParameter(request, paramName, requireValue):
"""Read a parameter from the request. Raise if requireValue is set and the
parameter has an empty value or is not present."""
params = urlparse.parse_qs(request.url_parts.query)
param = params[paramName][0].strip()
if len(param) == 0:
raise Exception("Empty or missing name parameter.")
return param
def readCookies(request):
"""Read the cookies from the client present in the request."""
cookies = {}
for key in request.cookies:
for cookie in request.cookies.get_list(key):
# do we care we'll clobber cookies here? If so, do we
# need to modify the test to take cookie names and value lists?
cookies[key] = cookie.value
return cookies
| mpl-2.0 |
haudren/scipy | scipy/signal/tests/test_savitzky_golay.py | 105 | 9924 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (run_module_suite, assert_allclose, assert_equal,
assert_almost_equal, assert_array_equal,
assert_array_almost_equal)
from scipy.ndimage import convolve1d
from scipy.signal import savgol_coeffs, savgol_filter
from scipy.signal._savitzky_golay import _polyder
def check_polyder(p, m, expected):
dp = _polyder(p, m)
assert_array_equal(dp, expected)
def test_polyder():
cases = [
([5], 0, [5]),
([5], 1, [0]),
([3, 2, 1], 0, [3, 2, 1]),
([3, 2, 1], 1, [6, 2]),
([3, 2, 1], 2, [6]),
([3, 2, 1], 3, [0]),
([[3, 2, 1], [5, 6, 7]], 0, [[3, 2, 1], [5, 6, 7]]),
([[3, 2, 1], [5, 6, 7]], 1, [[6, 2], [10, 6]]),
([[3, 2, 1], [5, 6, 7]], 2, [[6], [10]]),
([[3, 2, 1], [5, 6, 7]], 3, [[0], [0]]),
]
for p, m, expected in cases:
yield check_polyder, np.array(p).T, m, np.array(expected).T
#--------------------------------------------------------------------
# savgol_coeffs tests
#--------------------------------------------------------------------
def alt_sg_coeffs(window_length, polyorder, pos):
"""This is an alternative implementation of the SG coefficients.
It uses numpy.polyfit and numpy.polyval. The results should be
equivalent to those of savgol_coeffs(), but this implementation
is slower.
window_length should be odd.
"""
if pos is None:
pos = window_length // 2
t = np.arange(window_length)
unit = (t == pos).astype(int)
h = np.polyval(np.polyfit(t, unit, polyorder), t)
return h
def test_sg_coeffs_trivial():
# Test a trivial case of savgol_coeffs: polyorder = window_length - 1
h = savgol_coeffs(1, 0)
assert_allclose(h, [1])
h = savgol_coeffs(3, 2)
assert_allclose(h, [0, 1, 0], atol=1e-10)
h = savgol_coeffs(5, 4)
assert_allclose(h, [0, 0, 1, 0, 0], atol=1e-10)
h = savgol_coeffs(5, 4, pos=1)
assert_allclose(h, [0, 0, 0, 1, 0], atol=1e-10)
h = savgol_coeffs(5, 4, pos=1, use='dot')
assert_allclose(h, [0, 1, 0, 0, 0], atol=1e-10)
def compare_coeffs_to_alt(window_length, order):
# For the given window_length and order, compare the results
# of savgol_coeffs and alt_sg_coeffs for pos from 0 to window_length - 1.
# Also include pos=None.
for pos in [None] + list(range(window_length)):
h1 = savgol_coeffs(window_length, order, pos=pos, use='dot')
h2 = alt_sg_coeffs(window_length, order, pos=pos)
assert_allclose(h1, h2, atol=1e-10,
err_msg=("window_length = %d, order = %d, pos = %s" %
(window_length, order, pos)))
def test_sg_coeffs_compare():
# Compare savgol_coeffs() to alt_sg_coeffs().
for window_length in range(1, 8, 2):
for order in range(window_length):
yield compare_coeffs_to_alt, window_length, order
def test_sg_coeffs_exact():
polyorder = 4
window_length = 9
halflen = window_length // 2
x = np.linspace(0, 21, 43)
delta = x[1] - x[0]
# The data is a cubic polynomial. We'll use an order 4
# SG filter, so the filtered values should equal the input data
# (except within half window_length of the edges).
y = 0.5 * x ** 3 - x
h = savgol_coeffs(window_length, polyorder)
y0 = convolve1d(y, h)
assert_allclose(y0[halflen:-halflen], y[halflen:-halflen])
# Check the same input, but use deriv=1. dy is the exact result.
dy = 1.5 * x ** 2 - 1
h = savgol_coeffs(window_length, polyorder, deriv=1, delta=delta)
y1 = convolve1d(y, h)
assert_allclose(y1[halflen:-halflen], dy[halflen:-halflen])
# Check the same input, but use deriv=2. d2y is the exact result.
d2y = 3.0 * x
h = savgol_coeffs(window_length, polyorder, deriv=2, delta=delta)
y2 = convolve1d(y, h)
assert_allclose(y2[halflen:-halflen], d2y[halflen:-halflen])
def test_sg_coeffs_deriv():
# The data in `x` is a sampled parabola, so using savgol_coeffs with an
# order 2 or higher polynomial should give exact results.
i = np.array([-2.0, 0.0, 2.0, 4.0, 6.0])
x = i ** 2 / 4
dx = i / 2
d2x = 0.5 * np.ones_like(i)
for pos in range(x.size):
coeffs0 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot')
assert_allclose(coeffs0.dot(x), x[pos], atol=1e-10)
coeffs1 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=1)
assert_allclose(coeffs1.dot(x), dx[pos], atol=1e-10)
coeffs2 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=2)
assert_allclose(coeffs2.dot(x), d2x[pos], atol=1e-10)
def test_sg_coeffs_large():
# Test that for large values of window_length and polyorder the array of
# coefficients returned is symmetric. The aim is to ensure that
# no potential numeric overflow occurs.
coeffs0 = savgol_coeffs(31, 9)
assert_array_almost_equal(coeffs0, coeffs0[::-1])
coeffs1 = savgol_coeffs(31, 9, deriv=1)
assert_array_almost_equal(coeffs1, -coeffs1[::-1])
#--------------------------------------------------------------------
# savgol_filter tests
#--------------------------------------------------------------------
def test_sg_filter_trivial():
""" Test some trivial edge cases for savgol_filter()."""
x = np.array([1.0])
y = savgol_filter(x, 1, 0)
assert_equal(y, [1.0])
# Input is a single value. With a window length of 3 and polyorder 1,
# the value in y is from the straight-line fit of (-1,0), (0,3) and
# (1, 0) at 0. This is just the average of the three values, hence 1.0.
x = np.array([3.0])
y = savgol_filter(x, 3, 1, mode='constant')
assert_almost_equal(y, [1.0], decimal=15)
x = np.array([3.0])
y = savgol_filter(x, 3, 1, mode='nearest')
assert_almost_equal(y, [3.0], decimal=15)
x = np.array([1.0] * 3)
y = savgol_filter(x, 3, 1, mode='wrap')
assert_almost_equal(y, [1.0, 1.0, 1.0], decimal=15)
def test_sg_filter_basic():
# Some basic test cases for savgol_filter().
x = np.array([1.0, 2.0, 1.0])
y = savgol_filter(x, 3, 1, mode='constant')
assert_allclose(y, [1.0, 4.0 / 3, 1.0])
y = savgol_filter(x, 3, 1, mode='mirror')
assert_allclose(y, [5.0 / 3, 4.0 / 3, 5.0 / 3])
y = savgol_filter(x, 3, 1, mode='wrap')
assert_allclose(y, [4.0 / 3, 4.0 / 3, 4.0 / 3])
def test_sg_filter_2d():
x = np.array([[1.0, 2.0, 1.0],
[2.0, 4.0, 2.0]])
expected = np.array([[1.0, 4.0 / 3, 1.0],
[2.0, 8.0 / 3, 2.0]])
y = savgol_filter(x, 3, 1, mode='constant')
assert_allclose(y, expected)
y = savgol_filter(x.T, 3, 1, mode='constant', axis=0)
assert_allclose(y, expected.T)
def test_sg_filter_interp_edges():
# Another test with low degree polynomial data, for which we can easily
# give the exact results. In this test, we use mode='interp', so
# savgol_filter should match the exact solution for the entire data set,
# including the edges.
t = np.linspace(-5, 5, 21)
delta = t[1] - t[0]
# Polynomial test data.
x = np.array([t,
3 * t ** 2,
t ** 3 - t])
dx = np.array([np.ones_like(t),
6 * t,
3 * t ** 2 - 1.0])
d2x = np.array([np.zeros_like(t),
6 * np.ones_like(t),
6 * t])
window_length = 7
y = savgol_filter(x, window_length, 3, axis=-1, mode='interp')
assert_allclose(y, x, atol=1e-12)
y1 = savgol_filter(x, window_length, 3, axis=-1, mode='interp',
deriv=1, delta=delta)
assert_allclose(y1, dx, atol=1e-12)
y2 = savgol_filter(x, window_length, 3, axis=-1, mode='interp',
deriv=2, delta=delta)
assert_allclose(y2, d2x, atol=1e-12)
# Transpose everything, and test again with axis=0.
x = x.T
dx = dx.T
d2x = d2x.T
y = savgol_filter(x, window_length, 3, axis=0, mode='interp')
assert_allclose(y, x, atol=1e-12)
y1 = savgol_filter(x, window_length, 3, axis=0, mode='interp',
deriv=1, delta=delta)
assert_allclose(y1, dx, atol=1e-12)
y2 = savgol_filter(x, window_length, 3, axis=0, mode='interp',
deriv=2, delta=delta)
assert_allclose(y2, d2x, atol=1e-12)
def test_sg_filter_interp_edges_3d():
# Test mode='interp' with a 3-D array.
t = np.linspace(-5, 5, 21)
delta = t[1] - t[0]
x1 = np.array([t, -t])
x2 = np.array([t ** 2, 3 * t ** 2 + 5])
x3 = np.array([t ** 3, 2 * t ** 3 + t ** 2 - 0.5 * t])
dx1 = np.array([np.ones_like(t), -np.ones_like(t)])
dx2 = np.array([2 * t, 6 * t])
dx3 = np.array([3 * t ** 2, 6 * t ** 2 + 2 * t - 0.5])
# z has shape (3, 2, 21)
z = np.array([x1, x2, x3])
dz = np.array([dx1, dx2, dx3])
y = savgol_filter(z, 7, 3, axis=-1, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=-1, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
# z has shape (3, 21, 2)
z = np.array([x1.T, x2.T, x3.T])
dz = np.array([dx1.T, dx2.T, dx3.T])
y = savgol_filter(z, 7, 3, axis=1, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=1, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
# z has shape (21, 3, 2)
z = z.swapaxes(0, 1).copy()
dz = dz.swapaxes(0, 1).copy()
y = savgol_filter(z, 7, 3, axis=0, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=0, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
williammc/gtest | test/gtest_filter_unittest.py | 2826 | 21261 | #!/usr/bin/env python
#
# Copyright 2005 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test test filters.
A user can specify which test(s) in a Google Test program to run via either
the GTEST_FILTER environment variable or the --gtest_filter flag.
This script tests such functionality by invoking
gtest_filter_unittest_ (a program written with Google Test) with different
environments and command line flags.
Note that test sharding may also influence which tests are filtered. Therefore,
we test that here also.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
import gtest_test_utils
# Constants.
# Checks if this platform can pass empty environment variables to child
# processes. We set an env variable to an empty string and invoke a python
# script in a subprocess to print whether the variable is STILL in
# os.environ. We then use 'eval' to parse the child's output so that an
# exception is thrown if the input is anything other than 'True' nor 'False'.
os.environ['EMPTY_VAR'] = ''
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ'])
CAN_PASS_EMPTY_ENV = eval(child.output)
# Check if this platform can unset environment variables in child processes.
# We set an env variable to a non-empty string, unset it, and invoke
# a python script in a subprocess to print whether the variable
# is NO LONGER in os.environ.
# We use 'eval' to parse the child's output so that an exception
# is thrown if the input is neither 'True' nor 'False'.
os.environ['UNSET_VAR'] = 'X'
del os.environ['UNSET_VAR']
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ'])
CAN_UNSET_ENV = eval(child.output)
# Checks if we should test with an empty filter. This doesn't
# make sense on platforms that cannot pass empty env variables (Win32)
# and on platforms that cannot unset variables (since we cannot tell
# the difference between "" and NULL -- Borland and Solaris < 5.10)
CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
# The environment variable for specifying the test filters.
FILTER_ENV_VAR = 'GTEST_FILTER'
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
# The command line flag for specifying the test filters.
FILTER_FLAG = 'gtest_filter'
# The command line flag for including disabled tests.
ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
# Command to run the gtest_filter_unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
# Regex for determining whether parameterized tests are enabled in the binary.
PARAM_TEST_REGEX = re.compile(r'/ParamTest')
# Regex for parsing test case names from Google Test's output.
TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
[COMMAND, LIST_TESTS_FLAG]).output
# Full names of all tests in gtest_filter_unittests_.
PARAM_TESTS = [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
]
if SUPPORTS_DEATH_TESTS:
DEATH_TESTS = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
]
else:
DEATH_TESTS = []
# All the non-disabled tests.
ACTIVE_TESTS = [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS
param_tests_present = None
# Utilities.
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def RunAndReturnOutput(args = None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([COMMAND] + (args or []),
env=environ).output
def RunAndExtractTestList(args = None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
tests_run = []
test_case = ''
test = ''
for line in p.output.split('\n'):
match = TEST_CASE_REGEX.match(line)
if match is not None:
test_case = match.group(1)
else:
match = TEST_REGEX.match(line)
if match is not None:
test = match.group(1)
tests_run.append(test_case + '.' + test)
return (tests_run, p.exit_code)
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
original_env = environ.copy()
environ.update(extra_env)
return function(*args, **kwargs)
finally:
environ.clear()
environ.update(original_env)
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
TOTAL_SHARDS_ENV_VAR: str(total_shards)}
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
# The unit test.
class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Tests the env variable or the command line flag to filter tests."""
# Utilities.
def AssertSetEqual(self, lhs, rhs):
"""Asserts that two sets are equal."""
for elem in lhs:
self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
full_partition = []
for slice_var in list_of_sets:
full_partition.extend(slice_var)
self.assertEqual(len(set_var), len(full_partition))
self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
def AdjustForParameterizedTests(self, tests_to_run):
"""Adjust tests_to_run in case value parameterized tests are disabled."""
global param_tests_present
if not param_tests_present:
return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
else:
return tests_to_run
def RunAndVerify(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for a given filter."""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# First, tests using the environment variable.
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
# pylint: enable-msg=C6403
# Next, tests using the command line flag.
if gtest_filter is None:
args = []
else:
args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
args=None, check_exit_0=False):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of gtest_filter_unittest_ with the given filter, and
verifies that the right set of tests were run. The union of tests run
on each shard should be identical to tests_to_run, without duplicates.
Args:
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
args : Arguments to pass to the to the test binary.
check_exit_0: When set to a true value, make sure that all shards
return 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
for i in range(0, total_shards):
(tests_run, exit_code) = RunWithSharding(total_shards, i, args)
if check_exit_0:
self.assertEqual(0, exit_code)
partition.append(tests_run)
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
# pylint: enable-msg=C6403
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
Runs gtest_filter_unittest_ with the given filter, and enables
disabled tests. Verifies that the right set of tests were run.
Args:
gtest_filter: A filter to apply to the tests.
tests_to_run: A set of tests expected to run.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Construct the command line.
args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
if gtest_filter is not None:
args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def setUp(self):
"""Sets up test case.
Determines whether value-parameterized tests are enabled in the binary and
sets the flags accordingly.
"""
global param_tests_present
if param_tests_present is None:
param_tests_present = PARAM_TEST_REGEX.search(
RunAndReturnOutput()) is not None
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
self.RunAndVerify(None, ACTIVE_TESTS)
def testDefaultBehaviorWithShards(self):
"""Tests the behavior without the filter, with sharding enabled."""
self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
def testEmptyFilter(self):
"""Tests an empty filter."""
self.RunAndVerify('', [])
self.RunAndVerifyWithSharding('', 1, [])
self.RunAndVerifyWithSharding('', 2, [])
def testBadFilter(self):
"""Tests a filter that matches nothing."""
self.RunAndVerify('BadFilter', [])
self.RunAndVerifyAllowingDisabled('BadFilter', [])
def testFullName(self):
"""Tests filtering by full name."""
self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
def testUniversalFilters(self):
"""Tests filters that match everything."""
self.RunAndVerify('*', ACTIVE_TESTS)
self.RunAndVerify('*.*', ACTIVE_TESTS)
self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
def testFilterByTestCase(self):
"""Tests filtering by test case name."""
self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
self.RunAndVerifyAllowingDisabled('BazTest.*',
BAZ_TESTS + ['BazTest.DISABLED_TestC'])
def testFilterByTest(self):
"""Tests filtering by test name."""
self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
def testFilterDisabledTests(self):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
['DISABLED_FoobarTest.Test1'])
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.DISABLED_Test2',
])
self.RunAndVerify('DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('DISABLED_*', [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
])
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
self.RunAndVerify('*a*.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
self.RunAndVerify('*z*', [
'FooTest.Xyz',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
])
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
self.RunAndVerify('Foo*.*:*A*', [
'FooTest.Abc',
'FooTest.Xyz',
'BazTest.TestA',
])
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
self.RunAndVerify('*oo*:*A*:*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
'BazTest.TestA',
])
# The 2nd pattern is empty.
self.RunAndVerify('*oo*::*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
])
# The last 2 patterns are empty.
self.RunAndVerify('*oo*::', [
'FooTest.Abc',
'FooTest.Xyz',
])
def testNegativeFilters(self):
self.RunAndVerify('*-BazTest.TestOne', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('BarTest.*-BarTest.TestOne', [
'BarTest.TestTwo',
'BarTest.TestThree',
])
# Tests without leading '*'.
self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
self.RunAndVerify('SeqP/*', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
])
# Value parameterized tests filtering by the test name.
self.RunAndVerify('*/0', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestY/0',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestY/0',
])
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
SetEnvVar(FILTER_ENV_VAR, 'Foo*')
args = ['--%s=%s' % (FILTER_FLAG, '*One')]
tests_run = RunAndExtractTestList(args)[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with the "list_tests" flag."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file2')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
output = InvokeWithModifiedEnv(extra_env,
RunAndReturnOutput,
[LIST_TESTS_FLAG])
finally:
# This assertion ensures that Google Test enumerated the tests as
# opposed to running them.
self.assert_('[==========]' not in output,
'Unexpected output during test enumeration.\n'
'Please ensure that LIST_TESTS_FLAG is assigned the\n'
'correct flag value for listing Google Test tests.')
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
if SUPPORTS_DEATH_TESTS:
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
gtest_filter = 'HasDeathTest.*:SeqP/*'
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
]
for flag in ['--gtest_death_test_style=threadsafe',
'--gtest_death_test_style=fast']:
self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
check_exit_0=True, args=[flag])
self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
check_exit_0=True, args=[flag])
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
flyingk/mavlink | pymavlink/generator/mavgen_javascript.py | 35 | 17546 | #!/usr/bin/env python
'''
parse a MAVLink protocol XML file and generate a Node.js javascript module implementation
Based on original work Copyright Andrew Tridgell 2011
Released under GNU GPL version 3 or later
'''
import sys, textwrap, os
from . import mavparse, mavtemplate
from shutil import copyfile
t = mavtemplate.MAVTemplate()
def generate_preamble(outf, msgs, args, xml):
print("Generating preamble")
t.write(outf, """
/*
MAVLink protocol implementation for node.js (auto-generated by mavgen_javascript.py)
Generated from: ${FILELIST}
Note: this file has been auto-generated. DO NOT EDIT
*/
jspack = require("jspack").jspack,
_ = require("underscore"),
events = require("events"),
util = require("util");
// Add a convenience method to Buffer
Buffer.prototype.toByteArray = function () {
return Array.prototype.slice.call(this, 0)
}
mavlink = function(){};
// Implement the X25CRC function (present in the Python version through the mavutil.py package)
mavlink.x25Crc = function(buffer, crc) {
var bytes = buffer;
var crc = crc || 0xffff;
_.each(bytes, function(e) {
var tmp = e ^ (crc & 0xff);
tmp = (tmp ^ (tmp << 4)) & 0xff;
crc = (crc >> 8) ^ (tmp << 8) ^ (tmp << 3) ^ (tmp >> 4);
crc = crc & 0xffff;
});
return crc;
}
mavlink.WIRE_PROTOCOL_VERSION = "${WIRE_PROTOCOL_VERSION}";
mavlink.MAVLINK_TYPE_CHAR = 0
mavlink.MAVLINK_TYPE_UINT8_T = 1
mavlink.MAVLINK_TYPE_INT8_T = 2
mavlink.MAVLINK_TYPE_UINT16_T = 3
mavlink.MAVLINK_TYPE_INT16_T = 4
mavlink.MAVLINK_TYPE_UINT32_T = 5
mavlink.MAVLINK_TYPE_INT32_T = 6
mavlink.MAVLINK_TYPE_UINT64_T = 7
mavlink.MAVLINK_TYPE_INT64_T = 8
mavlink.MAVLINK_TYPE_FLOAT = 9
mavlink.MAVLINK_TYPE_DOUBLE = 10
// Mavlink headers incorporate sequence, source system (platform) and source component.
mavlink.header = function(msgId, mlen, seq, srcSystem, srcComponent) {
this.mlen = ( typeof mlen === 'undefined' ) ? 0 : mlen;
this.seq = ( typeof seq === 'undefined' ) ? 0 : seq;
this.srcSystem = ( typeof srcSystem === 'undefined' ) ? 0 : srcSystem;
this.srcComponent = ( typeof srcComponent === 'undefined' ) ? 0 : srcComponent;
this.msgId = msgId
}
mavlink.header.prototype.pack = function() {
return jspack.Pack('BBBBBB', [${PROTOCOL_MARKER}, this.mlen, this.seq, this.srcSystem, this.srcComponent, this.msgId]);
}
// Base class declaration: mavlink.message will be the parent class for each
// concrete implementation in mavlink.messages.
mavlink.message = function() {};
// Convenience setter to facilitate turning the unpacked array of data into member properties
mavlink.message.prototype.set = function(args) {
_.each(this.fieldnames, function(e, i) {
this[e] = args[i];
}, this);
};
// This pack function builds the header and produces a complete MAVLink message,
// including header and message CRC.
mavlink.message.prototype.pack = function(mav, crc_extra, payload) {
this.payload = payload;
this.header = new mavlink.header(this.id, payload.length, mav.seq, mav.srcSystem, mav.srcComponent);
this.msgbuf = this.header.pack().concat(payload);
var crc = mavlink.x25Crc(this.msgbuf.slice(1));
// For now, assume always using crc_extra = True. TODO: check/fix this.
crc = mavlink.x25Crc([crc_extra], crc);
this.msgbuf = this.msgbuf.concat(jspack.Pack('<H', [crc] ) );
return this.msgbuf;
}
""", {'FILELIST' : ",".join(args),
'PROTOCOL_MARKER' : xml.protocol_marker,
'crc_extra' : xml.crc_extra,
'WIRE_PROTOCOL_VERSION' : xml.wire_protocol_version })
def generate_enums(outf, enums):
print("Generating enums")
outf.write("\n// enums\n")
wrapper = textwrap.TextWrapper(initial_indent="", subsequent_indent=" // ")
for e in enums:
outf.write("\n// %s\n" % e.name)
for entry in e.entry:
outf.write("mavlink.%s = %u // %s\n" % (entry.name, entry.value, wrapper.fill(entry.description)))
def generate_message_ids(outf, msgs):
print("Generating message IDs")
outf.write("\n// message IDs\n")
outf.write("mavlink.MAVLINK_MSG_ID_BAD_DATA = -1\n")
for m in msgs:
outf.write("mavlink.MAVLINK_MSG_ID_%s = %u\n" % (m.name.upper(), m.id))
def generate_classes(outf, msgs):
"""
Generate the implementations of the classes representing MAVLink messages.
"""
print("Generating class definitions")
wrapper = textwrap.TextWrapper(initial_indent="", subsequent_indent="")
outf.write("\nmavlink.messages = {};\n\n");
def field_descriptions(fields):
ret = ""
for f in fields:
ret += " %-18s : %s (%s)\n" % (f.name, f.description.strip(), f.type)
return ret
for m in msgs:
comment = "%s\n\n%s" % (wrapper.fill(m.description.strip()), field_descriptions(m.fields))
selffieldnames = 'self, '
for f in m.fields:
# if f.omit_arg:
# selffieldnames += '%s=%s, ' % (f.name, f.const_value)
#else:
# -- Omitting the code above because it is rarely used (only once?) and would need some special handling
# in javascript. Specifically, inside the method definition, it needs to check for a value then assign
# a default.
selffieldnames += '%s, ' % f.name
selffieldnames = selffieldnames[:-2]
sub = {'NAMELOWER' : m.name.lower(),
'SELFFIELDNAMES' : selffieldnames,
'COMMENT' : comment,
'FIELDNAMES' : ", ".join(m.fieldnames)}
t.write(outf, """
/*
${COMMENT}
*/
""", sub)
# function signature + declaration
outf.write("mavlink.messages.%s = function(" % (m.name.lower()))
if len(m.fields) != 0:
outf.write(", ".join(m.fieldnames))
outf.write(") {")
# body: set message type properties
outf.write("""
this.format = '%s';
this.id = mavlink.MAVLINK_MSG_ID_%s;
this.order_map = %s;
this.crc_extra = %u;
this.name = '%s';
""" % (m.fmtstr, m.name.upper(), m.order_map, m.crc_extra, m.name.upper()))
# body: set own properties
if len(m.fieldnames) != 0:
outf.write(" this.fieldnames = ['%s'];\n" % "', '".join(m.fieldnames))
outf.write("""
this.set(arguments);
}
""")
# inherit methods from the base message class
outf.write("""
mavlink.messages.%s.prototype = new mavlink.message;
""" % m.name.lower())
# Implement the pack() function for this message
outf.write("""
mavlink.messages.%s.prototype.pack = function(mav) {
return mavlink.message.prototype.pack.call(this, mav, this.crc_extra, jspack.Pack(this.format""" % m.name.lower())
if len(m.fields) != 0:
outf.write(", [ this." + ", this.".join(m.ordered_fieldnames) + ']')
outf.write("));\n}\n\n")
def mavfmt(field):
'''work out the struct format for a type'''
map = {
'float' : 'f',
'double' : 'd',
'char' : 'c',
'int8_t' : 'b',
'uint8_t' : 'B',
'uint8_t_mavlink_version' : 'B',
'int16_t' : 'h',
'uint16_t' : 'H',
'int32_t' : 'i',
'uint32_t' : 'I',
'int64_t' : 'q',
'uint64_t' : 'Q',
}
if field.array_length:
if field.type in ['char', 'int8_t', 'uint8_t']:
return str(field.array_length)+'s'
return str(field.array_length)+map[field.type]
return map[field.type]
def generate_mavlink_class(outf, msgs, xml):
print("Generating MAVLink class")
# Write mapper to enable decoding based on the integer message type
outf.write("\n\nmavlink.map = {\n");
for m in msgs:
outf.write(" %s: { format: '%s', type: mavlink.messages.%s, order_map: %s, crc_extra: %u },\n" % (
m.id, m.fmtstr, m.name.lower(), m.order_map, m.crc_extra))
outf.write("}\n\n")
t.write(outf, """
// Special mavlink message to capture malformed data packets for debugging
mavlink.messages.bad_data = function(data, reason) {
this.id = mavlink.MAVLINK_MSG_ID_BAD_DATA;
this.data = data;
this.reason = reason;
this.msgbuf = data;
}
/* MAVLink protocol handling class */
MAVLink = function(logger, srcSystem, srcComponent) {
this.logger = logger;
this.seq = 0;
this.buf = new Buffer(0);
this.bufInError = new Buffer(0);
this.srcSystem = (typeof srcSystem === 'undefined') ? 0 : srcSystem;
this.srcComponent = (typeof srcComponent === 'undefined') ? 0 : srcComponent;
// The first packet we expect is a valid header, 6 bytes.
this.expected_length = 6;
this.have_prefix_error = false;
this.protocol_marker = 254;
this.little_endian = true;
this.crc_extra = true;
this.sort_fields = true;
this.total_packets_sent = 0;
this.total_bytes_sent = 0;
this.total_packets_received = 0;
this.total_bytes_received = 0;
this.total_receive_errors = 0;
this.startup_time = Date.now();
}
// Implements EventEmitter
util.inherits(MAVLink, events.EventEmitter);
// If the logger exists, this function will add a message to it.
// Assumes the logger is a winston object.
MAVLink.prototype.log = function(message) {
if(this.logger) {
this.logger.info(message);
}
}
MAVLink.prototype.log = function(level, message) {
if(this.logger) {
this.logger.log(level, message);
}
}
MAVLink.prototype.send = function(mavmsg) {
buf = mavmsg.pack(this);
this.file.write(buf);
this.seq = (this.seq + 1) % 256;
this.total_packets_sent +=1;
this.total_bytes_sent += buf.length;
}
// return number of bytes needed for next parsing stage
MAVLink.prototype.bytes_needed = function() {
ret = this.expected_length - this.buf.length;
return ( ret <= 0 ) ? 1 : ret;
}
// add data to the local buffer
MAVLink.prototype.pushBuffer = function(data) {
if(data) {
this.buf = Buffer.concat([this.buf, data]);
this.total_bytes_received += data.length;
}
}
// Decode prefix. Elides the prefix.
MAVLink.prototype.parsePrefix = function() {
// Test for a message prefix.
if( this.buf.length >= 1 && this.buf[0] != 254 ) {
// Strip the offending initial byte and throw an error.
var badPrefix = this.buf[0];
this.bufInError = this.buf.slice(0,1);
this.buf = this.buf.slice(1);
this.expected_length = 6;
// TODO: enable subsequent prefix error suppression if robust_parsing is implemented
//if(!this.have_prefix_error) {
// this.have_prefix_error = true;
throw new Error("Bad prefix ("+badPrefix+")");
//}
}
//else if( this.buf.length >= 1 && this.buf[0] == 254 ) {
// this.have_prefix_error = false;
//}
}
// Determine the length. Leaves buffer untouched.
MAVLink.prototype.parseLength = function() {
if( this.buf.length >= 2 ) {
var unpacked = jspack.Unpack('BB', this.buf.slice(0, 2));
this.expected_length = unpacked[1] + 8; // length of message + header + CRC
}
}
// input some data bytes, possibly returning a new message
MAVLink.prototype.parseChar = function(c) {
var m = null;
try {
this.pushBuffer(c);
this.parsePrefix();
this.parseLength();
m = this.parsePayload();
} catch(e) {
this.log('error', e.message);
this.total_receive_errors += 1;
m = new mavlink.messages.bad_data(this.bufInError, e.message);
this.bufInError = new Buffer(0);
}
if(null != m) {
this.emit(m.name, m);
this.emit('message', m);
}
return m;
}
MAVLink.prototype.parsePayload = function() {
var m = null;
// If we have enough bytes to try and read it, read it.
if( this.expected_length >= 8 && this.buf.length >= this.expected_length ) {
// Slice off the expected packet length, reset expectation to be to find a header.
var mbuf = this.buf.slice(0, this.expected_length);
// TODO: slicing off the buffer should depend on the error produced by the decode() function
// - if a message we find a well formed message, cut-off the expected_length
// - if the message is not well formed (correct prefix by accident), cut-off 1 char only
this.buf = this.buf.slice(this.expected_length);
this.expected_length = 6;
// w.info("Attempting to parse packet, message candidate buffer is ["+mbuf.toByteArray()+"]");
try {
m = this.decode(mbuf);
this.total_packets_received += 1;
}
catch(e) {
// Set buffer in question and re-throw to generic error handling
this.bufInError = mbuf;
throw e;
}
}
return m;
}
// input some data bytes, possibly returning an array of new messages
MAVLink.prototype.parseBuffer = function(s) {
// Get a message, if one is available in the stream.
var m = this.parseChar(s);
// No messages available, bail.
if ( null === m ) {
return null;
}
// While more valid messages can be read from the existing buffer, add
// them to the array of new messages and return them.
var ret = [m];
while(true) {
m = this.parseChar();
if ( null === m ) {
// No more messages left.
return ret;
}
ret.push(m);
}
return ret;
}
/* decode a buffer as a MAVLink message */
MAVLink.prototype.decode = function(msgbuf) {
var magic, mlen, seq, srcSystem, srcComponent, unpacked, msgId;
// decode the header
try {
unpacked = jspack.Unpack('cBBBBB', msgbuf.slice(0, 6));
magic = unpacked[0];
mlen = unpacked[1];
seq = unpacked[2];
srcSystem = unpacked[3];
srcComponent = unpacked[4];
msgId = unpacked[5];
}
catch(e) {
throw new Error('Unable to unpack MAVLink header: ' + e.message);
}
if (magic.charCodeAt(0) != 254) {
throw new Error("Invalid MAVLink prefix ("+magic.charCodeAt(0)+")");
}
if( mlen != msgbuf.length - 8 ) {
throw new Error("Invalid MAVLink message length. Got " + (msgbuf.length - 8) + " expected " + mlen + ", msgId=" + msgId);
}
if( false === _.has(mavlink.map, msgId) ) {
throw new Error("Unknown MAVLink message ID (" + msgId + ")");
}
// decode the payload
// refs: (fmt, type, order_map, crc_extra) = mavlink.map[msgId]
var decoder = mavlink.map[msgId];
// decode the checksum
try {
var receivedChecksum = jspack.Unpack('<H', msgbuf.slice(msgbuf.length - 2));
} catch (e) {
throw new Error("Unable to unpack MAVLink CRC: " + e.message);
}
var messageChecksum = mavlink.x25Crc(msgbuf.slice(1, msgbuf.length - 2));
// Assuming using crc_extra = True. See the message.prototype.pack() function.
messageChecksum = mavlink.x25Crc([decoder.crc_extra], messageChecksum);
if ( receivedChecksum != messageChecksum ) {
throw new Error('invalid MAVLink CRC in msgID ' +msgId+ ', got 0x' + receivedChecksum + ' checksum, calculated payload checkum as 0x'+messageChecksum );
}
// Decode the payload and reorder the fields to match the order map.
try {
var t = jspack.Unpack(decoder.format, msgbuf.slice(6, msgbuf.length));
}
catch (e) {
throw new Error('Unable to unpack MAVLink payload type='+decoder.type+' format='+decoder.format+' payloadLength='+ msgbuf.slice(6, -2).length +': '+ e.message);
}
// Reorder the fields to match the order map
var args = [];
_.each(t, function(e, i, l) {
args[i] = t[decoder.order_map[i]]
});
// construct the message object
try {
var m = new decoder.type(args);
m.set.call(m, args);
}
catch (e) {
throw new Error('Unable to instantiate MAVLink message of type '+decoder.type+' : ' + e.message);
}
m.msgbuf = msgbuf;
m.payload = msgbuf.slice(6);
m.crc = receivedChecksum;
m.header = new mavlink.header(msgId, mlen, seq, srcSystem, srcComponent);
this.log(m);
return m;
}
""", xml)
def generate_footer(outf):
t.write(outf, """
// Expose this code as a module
module.exports = mavlink;
""")
def generate(basename, xml):
'''generate complete javascript implementation'''
if basename.endswith('.js'):
filename = basename
else:
filename = basename + '.js'
msgs = []
enums = []
filelist = []
for x in xml:
msgs.extend(x.message)
enums.extend(x.enum)
filelist.append(os.path.basename(x.filename))
for m in msgs:
if xml[0].little_endian:
m.fmtstr = '<'
else:
m.fmtstr = '>'
for f in m.ordered_fields:
m.fmtstr += mavfmt(f)
m.order_map = [ 0 ] * len(m.fieldnames)
for i in range(0, len(m.fieldnames)):
m.order_map[i] = m.ordered_fieldnames.index(m.fieldnames[i])
print("Generating %s" % filename)
outf = open(filename, "w")
generate_preamble(outf, msgs, filelist, xml[0])
generate_enums(outf, enums)
generate_message_ids(outf, msgs)
generate_classes(outf, msgs)
generate_mavlink_class(outf, msgs, xml[0])
generate_footer(outf)
outf.close()
print("Generated %s OK" % filename)
| lgpl-3.0 |
grhawk/ASE | ase/calculators/aims.py | 2 | 15283 | """This module defines an ASE interface to FHI-aims.
Felix Hanke hanke@liverpool.ac.uk
Jonas Bjork j.bjork@liverpool.ac.uk
"""
import os
import numpy as np
from ase.io.aims import write_aims, read_aims
from ase.data import atomic_numbers
from ase.calculators.calculator import FileIOCalculator, Parameters, kpts2mp, \
ReadError
float_keys = [
'charge',
'charge_mix_param',
'default_initial_moment',
'fixed_spin_moment',
'hartree_convergence_parameter',
'harmonic_length_scale',
'ini_linear_mix_param',
'ini_spin_mix_parma',
'initial_moment',
'MD_MB_init',
'MD_time_step',
'prec_mix_param',
'set_vacuum_level',
'spin_mix_param',
]
exp_keys = [
'basis_threshold',
'occupation_thr',
'sc_accuracy_eev',
'sc_accuracy_etot',
'sc_accuracy_forces',
'sc_accuracy_rho',
'sc_accuracy_stress',
]
string_keys = [
'communication_type',
'density_update_method',
'KS_method',
'mixer',
'output_level',
'packed_matrix_format',
'relax_unit_cell',
'restart',
'restart_read_only',
'restart_write_only',
'spin',
'total_energy_method',
'qpe_calc',
'xc',
'species_dir',
]
int_keys = [
'empty_states',
'ini_linear_mixing',
'max_relaxation_steps',
'max_zeroin',
'multiplicity',
'n_max_pulay',
'sc_iter_limit',
'walltime',
]
bool_keys = [
'collect_eigenvectors',
'compute_forces',
'compute_kinetic',
'compute_numerical_stress',
'compute_analytical_stress',
'distributed_spline_storage',
'evaluate_work_function',
'final_forces_cleaned',
'hessian_to_restart_geometry',
'load_balancing',
'MD_clean_rotations',
'MD_restart',
'override_illconditioning',
'override_relativity',
'restart_relaxations',
'squeeze_memory',
'symmetry_reduced_k_grid',
'use_density_matrix',
'use_dipole_correction',
'use_local_index',
'use_logsbt',
'vdw_correction_hirshfeld',
]
list_keys = [
'init_hess',
'k_grid',
'k_offset',
'MD_run',
'MD_schedule',
'MD_segment',
'mixer_threshold',
'occupation_type',
'output',
'cube',
'preconditioner',
'relativistic',
'relax_geometry',
]
class Aims(FileIOCalculator):
command = 'aims.version.serial.x > aims.out'
implemented_properties = ['energy', 'forces', 'stress', 'dipole']
def __init__(self, restart=None, ignore_bad_restart_file=False,
label=os.curdir, atoms=None, cubes=None, **kwargs):
"""Construct FHI-aims calculator.
The keyword arguments (kwargs) can be one of the ASE standard
keywords: 'xc', 'kpts' and 'smearing' or any of FHI-aims'
native keywords.
Additional arguments:
cubes: AimsCube object
Cube file specification.
"""
FileIOCalculator.__init__(self, restart, ignore_bad_restart_file,
label, atoms, **kwargs)
self.cubes = cubes
def set_label(self, label):
self.label = label
self.directory = label
self.prefix = ''
self.out = os.path.join(label, 'aims.out')
def check_state(self, atoms):
system_changes = FileIOCalculator.check_state(self, atoms)
# Ignore unit cell for molecules:
if not atoms.pbc.any() and 'cell' in system_changes:
system_changes.remove('cell')
return system_changes
def set(self, **kwargs):
xc = kwargs.get('xc')
if xc:
kwargs['xc'] = {'LDA': 'pw-lda', 'PBE': 'pbe'}.get(xc, xc)
changed_parameters = FileIOCalculator.set(self, **kwargs)
if changed_parameters:
self.reset()
return changed_parameters
def write_input(self, atoms, properties=None, system_changes=None):
FileIOCalculator.write_input(self, atoms, properties, system_changes)
have_lattice_vectors = atoms.pbc.any()
have_k_grid = ('k_grid' in self.parameters or
'kpts' in self.parameters)
if have_lattice_vectors and not have_k_grid:
raise RuntimeError('Found lattice vectors but no k-grid!')
if not have_lattice_vectors and have_k_grid:
raise RuntimeError('Found k-grid but no lattice vectors!')
write_aims(os.path.join(self.directory, 'geometry.in'), atoms)
self.write_control(atoms, os.path.join(self.directory, 'control.in'))
self.write_species(atoms, os.path.join(self.directory, 'control.in'))
self.parameters.write(os.path.join(self.directory, 'parameters.ase'))
def write_control(self, atoms, filename):
output = open(filename, 'w')
for line in ['=====================================================',
'FHI-aims file: ' + filename,
'Created using the Atomic Simulation Environment (ASE)',
'',
'List of parameters used to initialize the calculator:',
'=====================================================']:
output.write('#' + line + '\n')
assert not ('kpts' in self.parameters and 'k_grid' in self.parameters)
assert not ('smearing' in self.parameters and
'occupation_type' in self.parameters)
for key, value in self.parameters.items():
if key == 'kpts':
mp = kpts2mp(atoms, self.parameters.kpts)
output.write('%-35s%d %d %d\n' % (('k_grid',) + tuple(mp)))
dk = 0.5 - 0.5 / np.array(mp)
output.write('%-35s%f %f %f\n' % (('k_offset',) + tuple(dk)))
elif key == 'species_dir':
continue
elif key == 'smearing':
name = self.parameters.smearing[0].lower()
if name == 'fermi-dirac':
name = 'fermi'
width = self.parameters.smearing[1]
output.write('%-35s%s %f' % ('occupation_type', name, width))
if name == 'methfessel-paxton':
order = self.parameters.smearing[2]
output.write(' %d' % order)
output.write('\n' % order)
elif key == 'output':
for output_type in value:
output.write('%-35s%s\n' % (key, output_type))
elif key == 'vdw_correction_hirshfeld' and value:
output.write('%-35s\n' % key)
elif key in bool_keys:
output.write('%-35s.%s.\n' % (key, repr(bool(value)).lower()))
elif isinstance(value, (tuple, list)):
output.write('%-35s%s\n' %
(key, ' '.join(str(x) for x in value)))
elif isinstance(value, str):
output.write('%-35s%s\n' % (key, value))
else:
output.write('%-35s%r\n' % (key, value))
if self.cubes:
self.cubes.write(output)
output.write(
'#=======================================================\n\n')
output.close()
def read(self, label):
FileIOCalculator.read(self, label)
geometry = os.path.join(self.directory, 'geometry.in')
control = os.path.join(self.directory, 'control.in')
for filename in [geometry, control, self.out]:
if not os.path.isfile(filename):
raise ReadError
self.atoms = read_aims(geometry)
self.parameters = Parameters.read(os.path.join(self.directory,
'parameters.ase'))
self.read_results()
def read_results(self):
converged = self.read_convergence()
if not converged:
os.system('tail -20 ' + self.out)
raise RuntimeError('FHI-aims did not converge!\n' +
'The last lines of output are printed above ' +
'and should give an indication why.')
self.read_energy()
if ('compute_forces' in self.parameters or
'sc_accuracy_forces' in self.parameters):
self.read_forces()
if ('compute_numerical_stress' in self.parameters or
'compute_analytical_stress' in self.parameters):
self.read_stress()
if ('dipole' in self.parameters.get('output', []) and
not self.atoms.pbc.any()):
self.read_dipole()
def write_species(self, atoms, filename='control.in'):
species_path = self.parameters.get('species_dir')
if species_path is None:
species_path = os.environ.get('AIMS_SPECIES_DIR')
if species_path is None:
raise RuntimeError(
'Missing species directory! Use species_dir ' +
'parameter or set $AIMS_SPECIES_DIR environment variable.')
control = open(filename, 'a')
symbols = atoms.get_chemical_symbols()
symbols2 = []
for n, symbol in enumerate(symbols):
if symbol not in symbols2:
symbols2.append(symbol)
for symbol in symbols2:
fd = os.path.join(species_path, '%02i_%s_default' %
(atomic_numbers[symbol], symbol))
for line in open(fd, 'r'):
control.write(line)
control.close()
def get_dipole_moment(self, atoms):
if ('dipole' not in self.parameters.get('output', []) or
atoms.pbc.any()):
raise NotImplementedError
return FileIOCalculator.get_dipole_moment(self, atoms)
def get_stress(self, atoms):
if ('compute_numerical_stress' not in self.parameters and
'compute_analytical_stress' not in self.parameters):
raise NotImplementedError
return FileIOCalculator.get_stress(self, atoms)
def get_forces(self, atoms):
if ('compute_forces' not in self.parameters and
'sc_accuracy_forces' not in self.parameters):
raise NotImplementedError
return FileIOCalculator.get_forces(self, atoms)
def read_dipole(self):
"Method that reads the electric dipole moment from the output file."
for line in open(self.out, 'r'):
if line.rfind('Total dipole moment [eAng]') > -1:
dipolemoment = np.array([float(f)
for f in line.split()[6:9]])
self.results['dipole'] = dipolemoment
def read_energy(self):
for line in open(self.out, 'r'):
if line.rfind('Total energy corrected') > -1:
E0 = float(line.split()[5])
elif line.rfind('Total energy uncorrected') > -1:
F = float(line.split()[5])
self.results['free_energy'] = F
self.results['energy'] = E0
def read_forces(self):
"""Method that reads forces from the output file.
If 'all' is switched on, the forces for all ionic steps
in the output file will be returned, in other case only the
forces for the last ionic configuration are returned."""
lines = open(self.out, 'r').readlines()
forces = np.zeros([len(self.atoms), 3])
for n, line in enumerate(lines):
if line.rfind('Total atomic forces') > -1:
for iatom in range(len(self.atoms)):
data = lines[n + iatom + 1].split()
for iforce in range(3):
forces[iatom, iforce] = float(data[2 + iforce])
self.results['forces'] = forces
def read_stress(self):
lines = open(self.out, 'r').readlines()
stress = None
for n, line in enumerate(lines):
if (line.rfind('| Analytical stress tensor') > -1 or
line.rfind('Numerical stress tensor') > -1):
stress = []
for i in [n + 5, n + 6, n + 7]:
data = lines[i].split()
stress += [float(data[2]), float(data[3]), float(data[4])]
# rearrange in 6-component form and return
self.results['stress'] = np.array([stress[0], stress[4], stress[8],
stress[5], stress[2], stress[1]])
def read_convergence(self):
converged = False
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('Have a nice day') > -1:
converged = True
return converged
class AimsCube:
"Object to ensure the output of cube files, can be attached to Aims object"
def __init__(self, origin=(0, 0, 0),
edges=[(0.1, 0.0, 0.0), (0.0, 0.1, 0.0), (0.0, 0.0, 0.1)],
points=(50, 50, 50), plots=None):
"""parameters:
origin, edges, points = same as in the FHI-aims output
plots: what to print, same names as in FHI-aims """
self.name = 'AimsCube'
self.origin = origin
self.edges = edges
self.points = points
self.plots = plots
def ncubes(self):
"""returns the number of cube files to output """
if self.plots:
number = len(self.plots)
else:
number = 0
return number
def set(self, **kwargs):
""" set any of the parameters ... """
# NOT IMPLEMENTED AT THE MOMENT!
def move_to_base_name(self, basename):
""" when output tracking is on or the base namem is not standard,
this routine will rename add the base to the cube file output for
easier tracking """
for plot in self.plots:
found = False
cube = plot.split()
if (cube[0] == 'total_density' or
cube[0] == 'spin_density' or
cube[0] == 'delta_density'):
found = True
old_name = cube[0] + '.cube'
new_name = basename + '.' + old_name
if cube[0] == 'eigenstate' or cube[0] == 'eigenstate_density':
found = True
state = int(cube[1])
s_state = cube[1]
for i in [10, 100, 1000, 10000]:
if state < i:
s_state = '0' + s_state
old_name = cube[0] + '_' + s_state + '_spin_1.cube'
new_name = basename + '.' + old_name
if found:
os.system('mv ' + old_name + ' ' + new_name)
def add_plot(self, name):
""" in case you forgot one ... """
self.plots += [name]
def write(self, file):
""" write the necessary output to the already opened control.in """
file.write('output cube ' + self.plots[0] + '\n')
file.write(' cube origin ')
for ival in self.origin:
file.write(str(ival) + ' ')
file.write('\n')
for i in range(3):
file.write(' cube edge ' + str(self.points[i]) + ' ')
for ival in self.edges[i]:
file.write(str(ival) + ' ')
file.write('\n')
if self.ncubes() > 1:
for i in range(self.ncubes() - 1):
file.write('output cube ' + self.plots[i + 1] + '\n')
| gpl-2.0 |
Nikoli/youtube-dl | youtube_dl/extractor/muenchentv.py | 160 | 2133 | # coding: utf-8
from __future__ import unicode_literals
import json
from .common import InfoExtractor
from ..utils import (
determine_ext,
int_or_none,
js_to_json,
)
class MuenchenTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?muenchen\.tv/livestream'
IE_DESC = 'münchen.tv'
_TEST = {
'url': 'http://www.muenchen.tv/livestream/',
'info_dict': {
'id': '5334',
'display_id': 'live',
'ext': 'mp4',
'title': 're:^münchen.tv-Livestream [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'is_live': True,
'thumbnail': 're:^https?://.*\.jpg$'
},
'params': {
'skip_download': True,
}
}
def _real_extract(self, url):
display_id = 'live'
webpage = self._download_webpage(url, display_id)
title = self._live_title(self._og_search_title(webpage))
data_js = self._search_regex(
r'(?s)\nplaylist:\s*(\[.*?}\]),related:',
webpage, 'playlist configuration')
data_json = js_to_json(data_js)
data = json.loads(data_json)[0]
video_id = data['mediaid']
thumbnail = data.get('image')
formats = []
for format_num, s in enumerate(data['sources']):
ext = determine_ext(s['file'], None)
label_str = s.get('label')
if label_str is None:
label_str = '_%d' % format_num
if ext is None:
format_id = label_str
else:
format_id = '%s-%s' % (ext, label_str)
formats.append({
'url': s['file'],
'tbr': int_or_none(s.get('label')),
'ext': 'mp4',
'format_id': format_id,
'preference': -100 if '.smil' in s['file'] else 0,
})
self._sort_formats(formats)
return {
'id': video_id,
'display_id': display_id,
'title': title,
'formats': formats,
'is_live': True,
'thumbnail': thumbnail,
}
| unlicense |
somic/paasta | paasta_tools/cli/cmds/docker_inspect.py | 1 | 1797 | #!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import sys
from paasta_tools.cli.utils import get_container_name
from paasta_tools.cli.utils import get_subparser
from paasta_tools.cli.utils import get_task_from_instance
from paasta_tools.cli.utils import PaastaTaskNotFound
def add_subparser(subparsers):
get_subparser(
description="'paasta docker_inspect' works by picking a container running your service "
"at random. It then runs docker docker_inspect <container_id> ",
help_text="Docker inspect against a container running your service",
command='docker_inspect',
function=paasta_docker_inspect,
subparsers=subparsers,
)
def paasta_docker_inspect(args):
try:
task = get_task_from_instance(
cluster=args.cluster,
service=args.service,
instance=args.instance,
slave_hostname=args.host,
task_id=args.mesos_id,
)
except PaastaTaskNotFound:
sys.exit(1)
container = get_container_name(task)
slave = task.slave['hostname']
command = "sudo docker inspect {}".format(container)
subprocess.call(["ssh", "-o", "LogLevel=QUIET", "-tA", slave, command])
| apache-2.0 |
hdinsight/hue | desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/Cipher/Blowfish.py | 116 | 4460 | # -*- coding: utf-8 -*-
#
# Cipher/Blowfish.py : Blowfish
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Blowfish symmetric cipher
Blowfish_ is a symmetric block cipher designed by Bruce Schneier.
It has a fixed data block size of 8 bytes and its keys can vary in length
from 32 to 448 bits (4 to 56 bytes).
Blowfish is deemed secure and it is fast. However, its keys should be chosen
to be big enough to withstand a brute force attack (e.g. at least 16 bytes).
As an example, encryption can be done as follows:
>>> from Crypto.Cipher import Blowfish
>>> from Crypto import Random
>>> from struct import pack
>>>
>>> bs = Blowfish.block_size
>>> key = b'An arbitrarily long key'
>>> iv = Random.new().read(bs)
>>> cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv)
>>> plaintext = b'docendo discimus '
>>> plen = bs - divmod(len(plaintext),bs)[1]
>>> padding = [plen]*plen
>>> padding = pack('b'*plen, *padding)
>>> msg = iv + cipher.encrypt(plaintext + padding)
.. _Blowfish: http://www.schneier.com/blowfish.html
:undocumented: __revision__, __package__
"""
__revision__ = "$Id$"
from Crypto.Cipher import blockalgo
from Crypto.Cipher import _Blowfish
class BlowfishCipher (blockalgo.BlockAlgo):
"""Blowfish cipher object"""
def __init__(self, key, *args, **kwargs):
"""Initialize a Blowfish cipher object
See also `new()` at the module level."""
blockalgo.BlockAlgo.__init__(self, _Blowfish, key, *args, **kwargs)
def new(key, *args, **kwargs):
"""Create a new Blowfish cipher
:Parameters:
key : byte string
The secret key to use in the symmetric cipher.
Its length can vary from 4 to 56 bytes.
:Keywords:
mode : a *MODE_** constant
The chaining mode to use for encryption or decryption.
Default is `MODE_ECB`.
IV : byte string
The initialization vector to use for encryption or decryption.
It is ignored for `MODE_ECB` and `MODE_CTR`.
For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption
and `block_size` +2 bytes for decryption (in the latter case, it is
actually the *encrypted* IV which was prefixed to the ciphertext).
It is mandatory.
For all other modes, it must be `block_size` bytes longs. It is optional and
when not present it will be given a default value of all zeroes.
counter : callable
(*Only* `MODE_CTR`). A stateful function that returns the next
*counter block*, which is a byte string of `block_size` bytes.
For better performance, use `Crypto.Util.Counter`.
segment_size : integer
(*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext
are segmented in.
It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8.
:Return: a `BlowfishCipher` object
"""
return BlowfishCipher(key, *args, **kwargs)
#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`.
MODE_ECB = 1
#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`.
MODE_CBC = 2
#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`.
MODE_CFB = 3
#: This mode should not be used.
MODE_PGP = 4
#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`.
MODE_OFB = 5
#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`.
MODE_CTR = 6
#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`.
MODE_OPENPGP = 7
#: Size of a data block (in bytes)
block_size = 8
#: Size of a key (in bytes)
key_size = xrange(4,56+1)
| apache-2.0 |
vianuevm/Webparser | ENV/Lib/site-packages/openid/message.py | 146 | 21562 | """Extension argument processing code
"""
__all__ = ['Message', 'NamespaceMap', 'no_default', 'registerNamespaceAlias',
'OPENID_NS', 'BARE_NS', 'OPENID1_NS', 'OPENID2_NS', 'SREG_URI',
'IDENTIFIER_SELECT']
import copy
import warnings
import urllib
from openid import oidutil
from openid import kvform
try:
ElementTree = oidutil.importElementTree()
except ImportError:
# No elementtree found, so give up, but don't fail to import,
# since we have fallbacks.
ElementTree = None
# This doesn't REALLY belong here, but where is better?
IDENTIFIER_SELECT = 'http://specs.openid.net/auth/2.0/identifier_select'
# URI for Simple Registration extension, the only commonly deployed
# OpenID 1.x extension, and so a special case
SREG_URI = 'http://openid.net/sreg/1.0'
# The OpenID 1.X namespace URI
OPENID1_NS = 'http://openid.net/signon/1.0'
THE_OTHER_OPENID1_NS = 'http://openid.net/signon/1.1'
OPENID1_NAMESPACES = OPENID1_NS, THE_OTHER_OPENID1_NS
# The OpenID 2.0 namespace URI
OPENID2_NS = 'http://specs.openid.net/auth/2.0'
# The namespace consisting of pairs with keys that are prefixed with
# "openid." but not in another namespace.
NULL_NAMESPACE = oidutil.Symbol('Null namespace')
# The null namespace, when it is an allowed OpenID namespace
OPENID_NS = oidutil.Symbol('OpenID namespace')
# The top-level namespace, excluding all pairs with keys that start
# with "openid."
BARE_NS = oidutil.Symbol('Bare namespace')
# Limit, in bytes, of identity provider and return_to URLs, including
# response payload. See OpenID 1.1 specification, Appendix D.
OPENID1_URL_LIMIT = 2047
# All OpenID protocol fields. Used to check namespace aliases.
OPENID_PROTOCOL_FIELDS = [
'ns', 'mode', 'error', 'return_to', 'contact', 'reference',
'signed', 'assoc_type', 'session_type', 'dh_modulus', 'dh_gen',
'dh_consumer_public', 'claimed_id', 'identity', 'realm',
'invalidate_handle', 'op_endpoint', 'response_nonce', 'sig',
'assoc_handle', 'trust_root', 'openid',
]
class UndefinedOpenIDNamespace(ValueError):
"""Raised if the generic OpenID namespace is accessed when there
is no OpenID namespace set for this message."""
class InvalidOpenIDNamespace(ValueError):
"""Raised if openid.ns is not a recognized value.
For recognized values, see L{Message.allowed_openid_namespaces}
"""
def __str__(self):
s = "Invalid OpenID Namespace"
if self.args:
s += " %r" % (self.args[0],)
return s
# Sentinel used for Message implementation to indicate that getArg
# should raise an exception instead of returning a default.
no_default = object()
# Global namespace / alias registration map. See
# registerNamespaceAlias.
registered_aliases = {}
class NamespaceAliasRegistrationError(Exception):
"""
Raised when an alias or namespace URI has already been registered.
"""
pass
def registerNamespaceAlias(namespace_uri, alias):
"""
Registers a (namespace URI, alias) mapping in a global namespace
alias map. Raises NamespaceAliasRegistrationError if either the
namespace URI or alias has already been registered with a
different value. This function is required if you want to use a
namespace with an OpenID 1 message.
"""
global registered_aliases
if registered_aliases.get(alias) == namespace_uri:
return
if namespace_uri in registered_aliases.values():
raise NamespaceAliasRegistrationError, \
'Namespace uri %r already registered' % (namespace_uri,)
if alias in registered_aliases:
raise NamespaceAliasRegistrationError, \
'Alias %r already registered' % (alias,)
registered_aliases[alias] = namespace_uri
class Message(object):
"""
In the implementation of this object, None represents the global
namespace as well as a namespace with no key.
@cvar namespaces: A dictionary specifying specific
namespace-URI to alias mappings that should be used when
generating namespace aliases.
@ivar ns_args: two-level dictionary of the values in this message,
grouped by namespace URI. The first level is the namespace
URI.
"""
allowed_openid_namespaces = [OPENID1_NS, THE_OTHER_OPENID1_NS, OPENID2_NS]
def __init__(self, openid_namespace=None):
"""Create an empty Message.
@raises InvalidOpenIDNamespace: if openid_namespace is not in
L{Message.allowed_openid_namespaces}
"""
self.args = {}
self.namespaces = NamespaceMap()
if openid_namespace is None:
self._openid_ns_uri = None
else:
implicit = openid_namespace in OPENID1_NAMESPACES
self.setOpenIDNamespace(openid_namespace, implicit)
def fromPostArgs(cls, args):
"""Construct a Message containing a set of POST arguments.
"""
self = cls()
# Partition into "openid." args and bare args
openid_args = {}
for key, value in args.items():
if isinstance(value, list):
raise TypeError("query dict must have one value for each key, "
"not lists of values. Query is %r" % (args,))
try:
prefix, rest = key.split('.', 1)
except ValueError:
prefix = None
if prefix != 'openid':
self.args[(BARE_NS, key)] = value
else:
openid_args[rest] = value
self._fromOpenIDArgs(openid_args)
return self
fromPostArgs = classmethod(fromPostArgs)
def fromOpenIDArgs(cls, openid_args):
"""Construct a Message from a parsed KVForm message.
@raises InvalidOpenIDNamespace: if openid.ns is not in
L{Message.allowed_openid_namespaces}
"""
self = cls()
self._fromOpenIDArgs(openid_args)
return self
fromOpenIDArgs = classmethod(fromOpenIDArgs)
def _fromOpenIDArgs(self, openid_args):
ns_args = []
# Resolve namespaces
for rest, value in openid_args.iteritems():
try:
ns_alias, ns_key = rest.split('.', 1)
except ValueError:
ns_alias = NULL_NAMESPACE
ns_key = rest
if ns_alias == 'ns':
self.namespaces.addAlias(value, ns_key)
elif ns_alias == NULL_NAMESPACE and ns_key == 'ns':
# null namespace
self.setOpenIDNamespace(value, False)
else:
ns_args.append((ns_alias, ns_key, value))
# Implicitly set an OpenID namespace definition (OpenID 1)
if not self.getOpenIDNamespace():
self.setOpenIDNamespace(OPENID1_NS, True)
# Actually put the pairs into the appropriate namespaces
for (ns_alias, ns_key, value) in ns_args:
ns_uri = self.namespaces.getNamespaceURI(ns_alias)
if ns_uri is None:
# we found a namespaced arg without a namespace URI defined
ns_uri = self._getDefaultNamespace(ns_alias)
if ns_uri is None:
ns_uri = self.getOpenIDNamespace()
ns_key = '%s.%s' % (ns_alias, ns_key)
else:
self.namespaces.addAlias(ns_uri, ns_alias, implicit=True)
self.setArg(ns_uri, ns_key, value)
def _getDefaultNamespace(self, mystery_alias):
"""OpenID 1 compatibility: look for a default namespace URI to
use for this alias."""
global registered_aliases
# Only try to map an alias to a default if it's an
# OpenID 1.x message.
if self.isOpenID1():
return registered_aliases.get(mystery_alias)
else:
return None
def setOpenIDNamespace(self, openid_ns_uri, implicit):
"""Set the OpenID namespace URI used in this message.
@raises InvalidOpenIDNamespace: if the namespace is not in
L{Message.allowed_openid_namespaces}
"""
if openid_ns_uri not in self.allowed_openid_namespaces:
raise InvalidOpenIDNamespace(openid_ns_uri)
self.namespaces.addAlias(openid_ns_uri, NULL_NAMESPACE, implicit)
self._openid_ns_uri = openid_ns_uri
def getOpenIDNamespace(self):
return self._openid_ns_uri
def isOpenID1(self):
return self.getOpenIDNamespace() in OPENID1_NAMESPACES
def isOpenID2(self):
return self.getOpenIDNamespace() == OPENID2_NS
def fromKVForm(cls, kvform_string):
"""Create a Message from a KVForm string"""
return cls.fromOpenIDArgs(kvform.kvToDict(kvform_string))
fromKVForm = classmethod(fromKVForm)
def copy(self):
return copy.deepcopy(self)
def toPostArgs(self):
"""Return all arguments with openid. in front of namespaced arguments.
"""
args = {}
# Add namespace definitions to the output
for ns_uri, alias in self.namespaces.iteritems():
if self.namespaces.isImplicit(ns_uri):
continue
if alias == NULL_NAMESPACE:
ns_key = 'openid.ns'
else:
ns_key = 'openid.ns.' + alias
args[ns_key] = ns_uri
for (ns_uri, ns_key), value in self.args.iteritems():
key = self.getKey(ns_uri, ns_key)
args[key] = value.encode('UTF-8')
return args
def toArgs(self):
"""Return all namespaced arguments, failing if any
non-namespaced arguments exist."""
# FIXME - undocumented exception
post_args = self.toPostArgs()
kvargs = {}
for k, v in post_args.iteritems():
if not k.startswith('openid.'):
raise ValueError(
'This message can only be encoded as a POST, because it '
'contains arguments that are not prefixed with "openid."')
else:
kvargs[k[7:]] = v
return kvargs
def toFormMarkup(self, action_url, form_tag_attrs=None,
submit_text="Continue"):
"""Generate HTML form markup that contains the values in this
message, to be HTTP POSTed as x-www-form-urlencoded UTF-8.
@param action_url: The URL to which the form will be POSTed
@type action_url: str
@param form_tag_attrs: Dictionary of attributes to be added to
the form tag. 'accept-charset' and 'enctype' have defaults
that can be overridden. If a value is supplied for
'action' or 'method', it will be replaced.
@type form_tag_attrs: {unicode: unicode}
@param submit_text: The text that will appear on the submit
button for this form.
@type submit_text: unicode
@returns: A string containing (X)HTML markup for a form that
encodes the values in this Message object.
@rtype: str or unicode
"""
if ElementTree is None:
raise RuntimeError('This function requires ElementTree.')
assert action_url is not None
form = ElementTree.Element('form')
if form_tag_attrs:
for name, attr in form_tag_attrs.iteritems():
form.attrib[name] = attr
form.attrib['action'] = action_url
form.attrib['method'] = 'post'
form.attrib['accept-charset'] = 'UTF-8'
form.attrib['enctype'] = 'application/x-www-form-urlencoded'
for name, value in self.toPostArgs().iteritems():
attrs = {'type': 'hidden',
'name': name,
'value': value}
form.append(ElementTree.Element('input', attrs))
submit = ElementTree.Element(
'input', {'type':'submit', 'value':submit_text})
form.append(submit)
return ElementTree.tostring(form)
def toURL(self, base_url):
"""Generate a GET URL with the parameters in this message
attached as query parameters."""
return oidutil.appendArgs(base_url, self.toPostArgs())
def toKVForm(self):
"""Generate a KVForm string that contains the parameters in
this message. This will fail if the message contains arguments
outside of the 'openid.' prefix.
"""
return kvform.dictToKV(self.toArgs())
def toURLEncoded(self):
"""Generate an x-www-urlencoded string"""
args = self.toPostArgs().items()
args.sort()
return urllib.urlencode(args)
def _fixNS(self, namespace):
"""Convert an input value into the internally used values of
this object
@param namespace: The string or constant to convert
@type namespace: str or unicode or BARE_NS or OPENID_NS
"""
if namespace == OPENID_NS:
if self._openid_ns_uri is None:
raise UndefinedOpenIDNamespace('OpenID namespace not set')
else:
namespace = self._openid_ns_uri
if namespace != BARE_NS and type(namespace) not in [str, unicode]:
raise TypeError(
"Namespace must be BARE_NS, OPENID_NS or a string. got %r"
% (namespace,))
if namespace != BARE_NS and ':' not in namespace:
fmt = 'OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r'
warnings.warn(fmt % (namespace,), DeprecationWarning)
if namespace == 'sreg':
fmt = 'Using %r instead of "sreg" as namespace'
warnings.warn(fmt % (SREG_URI,), DeprecationWarning,)
return SREG_URI
return namespace
def hasKey(self, namespace, ns_key):
namespace = self._fixNS(namespace)
return (namespace, ns_key) in self.args
def getKey(self, namespace, ns_key):
"""Get the key for a particular namespaced argument"""
namespace = self._fixNS(namespace)
if namespace == BARE_NS:
return ns_key
ns_alias = self.namespaces.getAlias(namespace)
# No alias is defined, so no key can exist
if ns_alias is None:
return None
if ns_alias == NULL_NAMESPACE:
tail = ns_key
else:
tail = '%s.%s' % (ns_alias, ns_key)
return 'openid.' + tail
def getArg(self, namespace, key, default=None):
"""Get a value for a namespaced key.
@param namespace: The namespace in the message for this key
@type namespace: str
@param key: The key to get within this namespace
@type key: str
@param default: The value to use if this key is absent from
this message. Using the special value
openid.message.no_default will result in this method
raising a KeyError instead of returning the default.
@rtype: str or the type of default
@raises KeyError: if default is no_default
@raises UndefinedOpenIDNamespace: if the message has not yet
had an OpenID namespace set
"""
namespace = self._fixNS(namespace)
args_key = (namespace, key)
try:
return self.args[args_key]
except KeyError:
if default is no_default:
raise KeyError((namespace, key))
else:
return default
def getArgs(self, namespace):
"""Get the arguments that are defined for this namespace URI
@returns: mapping from namespaced keys to values
@returntype: dict
"""
namespace = self._fixNS(namespace)
return dict([
(ns_key, value)
for ((pair_ns, ns_key), value)
in self.args.iteritems()
if pair_ns == namespace
])
def updateArgs(self, namespace, updates):
"""Set multiple key/value pairs in one call
@param updates: The values to set
@type updates: {unicode:unicode}
"""
namespace = self._fixNS(namespace)
for k, v in updates.iteritems():
self.setArg(namespace, k, v)
def setArg(self, namespace, key, value):
"""Set a single argument in this namespace"""
assert key is not None
assert value is not None
namespace = self._fixNS(namespace)
self.args[(namespace, key)] = value
if not (namespace is BARE_NS):
self.namespaces.add(namespace)
def delArg(self, namespace, key):
namespace = self._fixNS(namespace)
del self.args[(namespace, key)]
def __repr__(self):
return "<%s.%s %r>" % (self.__class__.__module__,
self.__class__.__name__,
self.args)
def __eq__(self, other):
return self.args == other.args
def __ne__(self, other):
return not (self == other)
def getAliasedArg(self, aliased_key, default=None):
if aliased_key == 'ns':
return self.getOpenIDNamespace()
if aliased_key.startswith('ns.'):
uri = self.namespaces.getNamespaceURI(aliased_key[3:])
if uri is None:
if default == no_default:
raise KeyError
else:
return default
else:
return uri
try:
alias, key = aliased_key.split('.', 1)
except ValueError:
# need more than x values to unpack
ns = None
else:
ns = self.namespaces.getNamespaceURI(alias)
if ns is None:
key = aliased_key
ns = self.getOpenIDNamespace()
return self.getArg(ns, key, default)
class NamespaceMap(object):
"""Maintains a bijective map between namespace uris and aliases.
"""
def __init__(self):
self.alias_to_namespace = {}
self.namespace_to_alias = {}
self.implicit_namespaces = []
def getAlias(self, namespace_uri):
return self.namespace_to_alias.get(namespace_uri)
def getNamespaceURI(self, alias):
return self.alias_to_namespace.get(alias)
def iterNamespaceURIs(self):
"""Return an iterator over the namespace URIs"""
return iter(self.namespace_to_alias)
def iterAliases(self):
"""Return an iterator over the aliases"""
return iter(self.alias_to_namespace)
def iteritems(self):
"""Iterate over the mapping
@returns: iterator of (namespace_uri, alias)
"""
return self.namespace_to_alias.iteritems()
def addAlias(self, namespace_uri, desired_alias, implicit=False):
"""Add an alias from this namespace URI to the desired alias
"""
# Check that desired_alias is not an openid protocol field as
# per the spec.
assert desired_alias not in OPENID_PROTOCOL_FIELDS, \
"%r is not an allowed namespace alias" % (desired_alias,)
# Check that desired_alias does not contain a period as per
# the spec.
if type(desired_alias) in [str, unicode]:
assert '.' not in desired_alias, \
"%r must not contain a dot" % (desired_alias,)
# Check that there is not a namespace already defined for
# the desired alias
current_namespace_uri = self.alias_to_namespace.get(desired_alias)
if (current_namespace_uri is not None
and current_namespace_uri != namespace_uri):
fmt = ('Cannot map %r to alias %r. '
'%r is already mapped to alias %r')
msg = fmt % (
namespace_uri,
desired_alias,
current_namespace_uri,
desired_alias)
raise KeyError(msg)
# Check that there is not already a (different) alias for
# this namespace URI
alias = self.namespace_to_alias.get(namespace_uri)
if alias is not None and alias != desired_alias:
fmt = ('Cannot map %r to alias %r. '
'It is already mapped to alias %r')
raise KeyError(fmt % (namespace_uri, desired_alias, alias))
assert (desired_alias == NULL_NAMESPACE or
type(desired_alias) in [str, unicode]), repr(desired_alias)
assert namespace_uri not in self.implicit_namespaces
self.alias_to_namespace[desired_alias] = namespace_uri
self.namespace_to_alias[namespace_uri] = desired_alias
if implicit:
self.implicit_namespaces.append(namespace_uri)
return desired_alias
def add(self, namespace_uri):
"""Add this namespace URI to the mapping, without caring what
alias it ends up with"""
# See if this namespace is already mapped to an alias
alias = self.namespace_to_alias.get(namespace_uri)
if alias is not None:
return alias
# Fall back to generating a numerical alias
i = 0
while True:
alias = 'ext' + str(i)
try:
self.addAlias(namespace_uri, alias)
except KeyError:
i += 1
else:
return alias
assert False, "Not reached"
def isDefined(self, namespace_uri):
return namespace_uri in self.namespace_to_alias
def __contains__(self, namespace_uri):
return self.isDefined(namespace_uri)
def isImplicit(self, namespace_uri):
return namespace_uri in self.implicit_namespaces
| gpl-2.0 |
NickelMedia/phantomjs | src/qt/qtwebkit/Tools/TestResultServer/handlers/menu.py | 129 | 2770 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
dashboards = [
["Results", "/dashboards/flakiness_dashboard.html"],
["Timeline", "/dashboards/timeline_explorer.html"],
["Treemap", "/dashboards/treemap.html"],
["Stats", "/dashboards/aggregate_results.html"],
]
menu = [
["List of test files", "/testfile"],
["List of results.json files", "/testfile?name=results.json"],
["Upload test file", "/testfile/uploadform"],
]
class Menu(webapp.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
user_email = user.email()
login_text = "Sign out"
login_url = users.create_logout_url(self.request.uri)
else:
user_email = ""
login_text = "Sign in"
login_url = users.create_login_url(self.request.uri)
template_values = {
"user_email": user_email,
"login_text": login_text,
"login_url": login_url,
"menu": menu,
"dashboards": dashboards,
}
self.response.out.write(
template.render("templates/menu.html", template_values))
| bsd-3-clause |
damiansoriano/odoo | openerp/tools/osutil.py | 103 | 5594 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Some functions related to the os and os.path module
"""
from contextlib import contextmanager
import os
from os.path import join as opj
import shutil
import tempfile
import zipfile
if os.name == 'nt':
import ctypes
import win32service as ws
import win32serviceutil as wsu
def listdir(dir, recursive=False):
"""Allow to recursively get the file listing"""
dir = os.path.normpath(dir)
if not recursive:
return os.listdir(dir)
res = []
for root, dirs, files in walksymlinks(dir):
root = root[len(dir)+1:]
res.extend([opj(root, f) for f in files])
return res
def walksymlinks(top, topdown=True, onerror=None):
"""
same as os.walk but follow symlinks
attention: all symlinks are walked before all normals directories
"""
for dirpath, dirnames, filenames in os.walk(top, topdown, onerror):
if topdown:
yield dirpath, dirnames, filenames
symlinks = filter(lambda dirname: os.path.islink(os.path.join(dirpath, dirname)), dirnames)
for s in symlinks:
for x in walksymlinks(os.path.join(dirpath, s), topdown, onerror):
yield x
if not topdown:
yield dirpath, dirnames, filenames
@contextmanager
def tempdir():
tmpdir = tempfile.mkdtemp()
try:
yield tmpdir
finally:
shutil.rmtree(tmpdir)
def zip_dir(path, stream, include_dir=True): # TODO add ignore list
path = os.path.normpath(path)
len_prefix = len(os.path.dirname(path)) if include_dir else len(path)
if len_prefix:
len_prefix += 1
with zipfile.ZipFile(stream, 'w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zipf:
for dirpath, dirnames, filenames in os.walk(path):
for fname in filenames:
bname, ext = os.path.splitext(fname)
ext = ext or bname
if ext not in ['.pyc', '.pyo', '.swp', '.DS_Store']:
path = os.path.normpath(os.path.join(dirpath, fname))
if os.path.isfile(path):
zipf.write(path, path[len_prefix:])
if os.name != 'nt':
getppid = os.getppid
is_running_as_nt_service = lambda: False
else:
# based on http://mail.python.org/pipermail/python-win32/2007-June/006174.html
_TH32CS_SNAPPROCESS = 0x00000002
class _PROCESSENTRY32(ctypes.Structure):
_fields_ = [("dwSize", ctypes.c_ulong),
("cntUsage", ctypes.c_ulong),
("th32ProcessID", ctypes.c_ulong),
("th32DefaultHeapID", ctypes.c_ulong),
("th32ModuleID", ctypes.c_ulong),
("cntThreads", ctypes.c_ulong),
("th32ParentProcessID", ctypes.c_ulong),
("pcPriClassBase", ctypes.c_ulong),
("dwFlags", ctypes.c_ulong),
("szExeFile", ctypes.c_char * 260)]
def getppid():
CreateToolhelp32Snapshot = ctypes.windll.kernel32.CreateToolhelp32Snapshot
Process32First = ctypes.windll.kernel32.Process32First
Process32Next = ctypes.windll.kernel32.Process32Next
CloseHandle = ctypes.windll.kernel32.CloseHandle
hProcessSnap = CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0)
current_pid = os.getpid()
try:
pe32 = _PROCESSENTRY32()
pe32.dwSize = ctypes.sizeof(_PROCESSENTRY32)
if not Process32First(hProcessSnap, ctypes.byref(pe32)):
raise OSError('Failed getting first process.')
while True:
if pe32.th32ProcessID == current_pid:
return pe32.th32ParentProcessID
if not Process32Next(hProcessSnap, ctypes.byref(pe32)):
return None
finally:
CloseHandle(hProcessSnap)
from contextlib import contextmanager
from openerp.release import nt_service_name
def is_running_as_nt_service():
@contextmanager
def close_srv(srv):
try:
yield srv
finally:
ws.CloseServiceHandle(srv)
with close_srv(ws.OpenSCManager(None, None, ws.SC_MANAGER_ALL_ACCESS)) as hscm:
with close_srv(wsu.SmartOpenService(hscm, nt_service_name, ws.SERVICE_ALL_ACCESS)) as hs:
info = ws.QueryServiceStatusEx(hs)
return info['ProcessId'] == getppid()
if __name__ == '__main__':
from pprint import pprint as pp
pp(listdir('../report', True))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
partofthething/home-assistant | tests/components/hue/test_device_trigger.py | 6 | 5675 | """The tests for Philips Hue device triggers."""
import pytest
from homeassistant.components import hue
import homeassistant.components.automation as automation
from homeassistant.components.hue import device_trigger
from homeassistant.setup import async_setup_component
from .conftest import setup_bridge_for_sensors as setup_bridge
from .test_sensor_base import HUE_DIMMER_REMOTE_1, HUE_TAP_REMOTE_1
from tests.common import (
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
REMOTES_RESPONSE = {"7": HUE_TAP_REMOTE_1, "8": HUE_DIMMER_REMOTE_1}
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, mock_bridge, device_reg):
"""Test we get the expected triggers from a hue remote."""
mock_bridge.mock_sensor_responses.append(REMOTES_RESPONSE)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
# 2 remotes, just 1 battery sensor
assert len(hass.states.async_all()) == 1
# Get triggers for specific tap switch
hue_tap_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:00:00:00:00:44:23:08")}
)
triggers = await async_get_device_automations(hass, "trigger", hue_tap_device.id)
expected_triggers = [
{
"platform": "device",
"domain": hue.DOMAIN,
"device_id": hue_tap_device.id,
"type": t_type,
"subtype": t_subtype,
}
for t_type, t_subtype in device_trigger.HUE_TAP_REMOTE.keys()
]
assert_lists_same(triggers, expected_triggers)
# Get triggers for specific dimmer switch
hue_dimmer_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")}
)
triggers = await async_get_device_automations(hass, "trigger", hue_dimmer_device.id)
trigger_batt = {
"platform": "device",
"domain": "sensor",
"device_id": hue_dimmer_device.id,
"type": "battery_level",
"entity_id": "sensor.hue_dimmer_switch_1_battery_level",
}
expected_triggers = [
trigger_batt,
*[
{
"platform": "device",
"domain": hue.DOMAIN,
"device_id": hue_dimmer_device.id,
"type": t_type,
"subtype": t_subtype,
}
for t_type, t_subtype in device_trigger.HUE_DIMMER_REMOTE.keys()
],
]
assert_lists_same(triggers, expected_triggers)
async def test_if_fires_on_state_change(hass, mock_bridge, device_reg, calls):
"""Test for button press trigger firing."""
mock_bridge.mock_sensor_responses.append(REMOTES_RESPONSE)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
assert len(hass.states.async_all()) == 1
# Set an automation with a specific tap switch trigger
hue_tap_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:00:00:00:00:44:23:08")}
)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": hue.DOMAIN,
"device_id": hue_tap_device.id,
"type": "remote_button_short_press",
"subtype": "button_4",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "B4 - {{ trigger.event.data.event }}"
},
},
},
{
"trigger": {
"platform": "device",
"domain": hue.DOMAIN,
"device_id": "mock-device-id",
"type": "remote_button_short_press",
"subtype": "button_1",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "B1 - {{ trigger.event.data.event }}"
},
},
},
]
},
)
# Fake that the remote is being pressed.
new_sensor_response = dict(REMOTES_RESPONSE)
new_sensor_response["7"]["state"] = {
"buttonevent": 18,
"lastupdated": "2019-12-28T22:58:02",
}
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 2
assert len(calls) == 1
assert calls[0].data["some"] == "B4 - 18"
# Fake another button press.
new_sensor_response = dict(REMOTES_RESPONSE)
new_sensor_response["7"]["state"] = {
"buttonevent": 34,
"lastupdated": "2019-12-28T22:58:05",
}
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 3
assert len(calls) == 1
| mit |
111pontes/ydk-py | cisco-ios-xe/ydk/models/cisco_ios_xe/CISCO_UBE_MIB.py | 1 | 3416 | """ CISCO_UBE_MIB
This MIB describes objects used for managing Cisco
Unified Border Element (CUBE).
The Cisco Unified Border Element (CUBE) is a Cisco
IOS Session Border Controller (SBC) that interconnects
independent voice over IP (VoIP) and video over IP
networks for data, voice, and video transport
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class CiscoUbeMib(object):
"""
.. attribute:: ciscoubemibobjects
**type**\: :py:class:`Ciscoubemibobjects <ydk.models.cisco_ios_xe.CISCO_UBE_MIB.CiscoUbeMib.Ciscoubemibobjects>`
"""
_prefix = 'CISCO-UBE-MIB'
_revision = '2010-11-29'
def __init__(self):
self.ciscoubemibobjects = CiscoUbeMib.Ciscoubemibobjects()
self.ciscoubemibobjects.parent = self
class Ciscoubemibobjects(object):
"""
.. attribute:: cubeenabled
This object represents, whether the Cisco Unified Border Element (CUBE) is enabled on the device or not. The value 'true' means that the CUBE feature is enabled on the device. The value 'false' means that the CUBE feature is disabled
**type**\: bool
.. attribute:: cubetotalsessionallowed
This object provides the total number of CUBE session allowed on the device. The value zero means no sessions are allowed with CUBE
**type**\: int
**range:** 0..999999
**units**\: session
.. attribute:: cubeversion
This object represents the version of Cisco Unified Border Element on the device
**type**\: str
"""
_prefix = 'CISCO-UBE-MIB'
_revision = '2010-11-29'
def __init__(self):
self.parent = None
self.cubeenabled = None
self.cubetotalsessionallowed = None
self.cubeversion = None
@property
def _common_path(self):
return '/CISCO-UBE-MIB:CISCO-UBE-MIB/CISCO-UBE-MIB:ciscoUbeMIBObjects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.cubeenabled is not None:
return True
if self.cubetotalsessionallowed is not None:
return True
if self.cubeversion is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _CISCO_UBE_MIB as meta
return meta._meta_table['CiscoUbeMib.Ciscoubemibobjects']['meta_info']
@property
def _common_path(self):
return '/CISCO-UBE-MIB:CISCO-UBE-MIB'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.ciscoubemibobjects is not None and self.ciscoubemibobjects._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _CISCO_UBE_MIB as meta
return meta._meta_table['CiscoUbeMib']['meta_info']
| apache-2.0 |
kouaw/CouchPotatoServer | libs/pyutil/test/current/test_iputil.py | 106 | 1287 | #!/usr/bin/env python
try:
from twisted.trial import unittest
unittest # http://divmod.org/trac/ticket/1499
except ImportError, le:
print "Skipping test_iputil since it requires Twisted and Twisted could not be imported: %s" % (le,)
else:
from pyutil import iputil, testutil
import re
DOTTED_QUAD_RE=re.compile("^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
class ListAddresses(testutil.SignalMixin):
def test_get_local_ip_for(self):
addr = iputil.get_local_ip_for('127.0.0.1')
self.failUnless(DOTTED_QUAD_RE.match(addr))
def test_list_async(self):
try:
from twisted.trial import unittest
unittest # http://divmod.org/trac/ticket/1499
from pyutil import iputil
except ImportError, le:
raise unittest.SkipTest("iputil could not be imported (probably because its dependency, Twisted, is not installed). %s" % (le,))
d = iputil.get_local_addresses_async()
def _check(addresses):
self.failUnless(len(addresses) >= 1) # always have localhost
self.failUnless("127.0.0.1" in addresses, addresses)
d.addCallbacks(_check)
return d
test_list_async.timeout=2
| gpl-3.0 |
zhenzhai/edx-platform | common/djangoapps/microsite_configuration/tests/test_logic.py | 19 | 1559 | """
Some additional unit tests for Microsite logic. The LMS covers some of the Microsite testing, this adds
some additional coverage
"""
import ddt
from mock import patch
from microsite_configuration.microsite import (
get_value_for_org,
get_backend,
)
from microsite_configuration.backends.base import BaseMicrositeBackend
from microsite_configuration.tests.tests import (
DatabaseMicrositeTestCase,
MICROSITE_BACKENDS,
)
@ddt.ddt
class TestMicrosites(DatabaseMicrositeTestCase):
"""
Run through some Microsite logic
"""
def setUp(self):
super(TestMicrosites, self).setUp()
@ddt.data(*MICROSITE_BACKENDS)
def test_get_value_for_org_when_microsite_has_no_org(self, site_backend):
"""
Make sure default value is returned if there's no Microsite ORG match
"""
with patch('microsite_configuration.microsite.BACKEND',
get_backend(site_backend, BaseMicrositeBackend)):
value = get_value_for_org("BogusX", "university", "default_value")
self.assertEquals(value, "default_value")
@ddt.data(*MICROSITE_BACKENDS)
def test_get_value_for_org(self, site_backend):
"""
Make sure get_value_for_org return value of org if it present.
"""
with patch('microsite_configuration.microsite.BACKEND',
get_backend(site_backend, BaseMicrositeBackend)):
value = get_value_for_org("TestMicrositeX", "university", "default_value")
self.assertEquals(value, "test_microsite")
| agpl-3.0 |
Zlash65/erpnext | erpnext/accounts/doctype/purchase_invoice/test_purchase_invoice.py | 2 | 32514 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe, erpnext
import frappe.model
from erpnext.accounts.doctype.payment_entry.payment_entry import get_payment_entry
from frappe.utils import cint, flt, today, nowdate, add_days
import frappe.defaults
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import set_perpetual_inventory, \
test_records as pr_test_records
from erpnext.controllers.accounts_controller import get_payment_terms
from erpnext.exceptions import InvalidCurrency
from erpnext.stock.doctype.stock_entry.test_stock_entry import get_qty_after_transaction
from erpnext.accounts.doctype.account.test_account import get_inventory_account
test_dependencies = ["Item", "Cost Center", "Payment Term", "Payment Terms Template"]
test_ignore = ["Serial No"]
class TestPurchaseInvoice(unittest.TestCase):
@classmethod
def setUpClass(self):
unlink_payment_on_cancel_of_invoice()
frappe.db.set_value("Buying Settings", None, "allow_multiple_items", 1)
@classmethod
def tearDownClass(self):
unlink_payment_on_cancel_of_invoice(0)
def test_gl_entries_without_perpetual_inventory(self):
frappe.db.set_value("Company", "_Test Company", "round_off_account", "Round Off - _TC")
wrapper = frappe.copy_doc(test_records[0])
set_perpetual_inventory(0, wrapper.company)
self.assertTrue(not cint(erpnext.is_perpetual_inventory_enabled(wrapper.company)))
wrapper.insert()
wrapper.submit()
wrapper.load_from_db()
dl = wrapper
expected_gl_entries = {
"_Test Payable - _TC": [0, 1512.0],
"_Test Account Cost for Goods Sold - _TC": [1250, 0],
"_Test Account Shipping Charges - _TC": [100, 0],
"_Test Account Excise Duty - _TC": [140, 0],
"_Test Account Education Cess - _TC": [2.8, 0],
"_Test Account S&H Education Cess - _TC": [1.4, 0],
"_Test Account CST - _TC": [29.88, 0],
"_Test Account VAT - _TC": [156.25, 0],
"_Test Account Discount - _TC": [0, 168.03],
"Round Off - _TC": [0, 0.3]
}
gl_entries = frappe.db.sql("""select account, debit, credit from `tabGL Entry`
where voucher_type = 'Purchase Invoice' and voucher_no = %s""", dl.name, as_dict=1)
for d in gl_entries:
self.assertEqual([d.debit, d.credit], expected_gl_entries.get(d.account))
def test_gl_entries_with_perpetual_inventory(self):
pi = frappe.copy_doc(test_records[1])
set_perpetual_inventory(1, pi.company)
self.assertTrue(cint(erpnext.is_perpetual_inventory_enabled(pi.company)), 1)
pi.insert()
pi.submit()
self.check_gle_for_pi(pi.name)
set_perpetual_inventory(0, pi.company)
def test_terms_added_after_save(self):
pi = frappe.copy_doc(test_records[1])
pi.insert()
self.assertTrue(pi.payment_schedule)
self.assertEqual(pi.payment_schedule[0].due_date, pi.due_date)
def test_payment_entry_unlink_against_purchase_invoice(self):
from erpnext.accounts.doctype.payment_entry.test_payment_entry import get_payment_entry
unlink_payment_on_cancel_of_invoice(0)
pi_doc = make_purchase_invoice()
pe = get_payment_entry("Purchase Invoice", pi_doc.name, bank_account="_Test Bank - _TC")
pe.reference_no = "1"
pe.reference_date = nowdate()
pe.paid_from_account_currency = pi_doc.currency
pe.paid_to_account_currency = pi_doc.currency
pe.source_exchange_rate = 1
pe.target_exchange_rate = 1
pe.paid_amount = pi_doc.grand_total
pe.save(ignore_permissions=True)
pe.submit()
pi_doc = frappe.get_doc('Purchase Invoice', pi_doc.name)
self.assertRaises(frappe.LinkExistsError, pi_doc.cancel)
unlink_payment_on_cancel_of_invoice()
def test_purchase_invoice_for_blocked_supplier(self):
supplier = frappe.get_doc('Supplier', '_Test Supplier')
supplier.on_hold = 1
supplier.save()
self.assertRaises(frappe.ValidationError, make_purchase_invoice)
supplier.on_hold = 0
supplier.save()
def test_purchase_invoice_for_blocked_supplier_invoice(self):
supplier = frappe.get_doc('Supplier', '_Test Supplier')
supplier.on_hold = 1
supplier.hold_type = 'Invoices'
supplier.save()
self.assertRaises(frappe.ValidationError, make_purchase_invoice)
supplier.on_hold = 0
supplier.save()
def test_purchase_invoice_for_blocked_supplier_payment(self):
supplier = frappe.get_doc('Supplier', '_Test Supplier')
supplier.on_hold = 1
supplier.hold_type = 'Payments'
supplier.save()
pi = make_purchase_invoice()
self.assertRaises(
frappe.ValidationError, get_payment_entry, dt='Purchase Invoice', dn=pi.name, bank_account="_Test Bank - _TC")
supplier.on_hold = 0
supplier.save()
def test_purchase_invoice_for_blocked_supplier_payment_today_date(self):
supplier = frappe.get_doc('Supplier', '_Test Supplier')
supplier.on_hold = 1
supplier.hold_type = 'Payments'
supplier.release_date = nowdate()
supplier.save()
pi = make_purchase_invoice()
self.assertRaises(
frappe.ValidationError, get_payment_entry, dt='Purchase Invoice', dn=pi.name,
bank_account="_Test Bank - _TC")
supplier.on_hold = 0
supplier.save()
def test_purchase_invoice_for_blocked_supplier_payment_past_date(self):
# this test is meant to fail only if something fails in the try block
with self.assertRaises(Exception):
try:
supplier = frappe.get_doc('Supplier', '_Test Supplier')
supplier.on_hold = 1
supplier.hold_type = 'Payments'
supplier.release_date = '2018-03-01'
supplier.save()
pi = make_purchase_invoice()
get_payment_entry('Purchase Invoice', dn=pi.name, bank_account="_Test Bank - _TC")
supplier.on_hold = 0
supplier.save()
except:
pass
else:
raise Exception
def test_purchase_invoice_blocked_invoice_must_be_in_future(self):
pi = make_purchase_invoice(do_not_save=True)
pi.release_date = nowdate()
self.assertRaises(frappe.ValidationError, pi.save)
pi.release_date = ''
pi.save()
def test_purchase_invoice_temporary_blocked(self):
pi = make_purchase_invoice(do_not_save=True)
pi.release_date = add_days(nowdate(), 10)
pi.save()
pi.submit()
pe = get_payment_entry('Purchase Invoice', dn=pi.name, bank_account="_Test Bank - _TC")
self.assertRaises(frappe.ValidationError, pe.save)
def test_purchase_invoice_explicit_block(self):
pi = make_purchase_invoice()
pi.block_invoice()
self.assertEqual(pi.on_hold, 1)
pi.unblock_invoice()
self.assertEqual(pi.on_hold, 0)
def test_gl_entries_with_perpetual_inventory_against_pr(self):
pr = frappe.copy_doc(pr_test_records[0])
set_perpetual_inventory(1, pr.company)
self.assertTrue(cint(erpnext.is_perpetual_inventory_enabled(pr.company)), 1)
pr.submit()
pi = frappe.copy_doc(test_records[1])
for d in pi.get("items"):
d.purchase_receipt = pr.name
pi.insert()
pi.submit()
self.check_gle_for_pi(pi.name)
set_perpetual_inventory(0, pr.company)
def check_gle_for_pi(self, pi):
gl_entries = frappe.db.sql("""select account, debit, credit
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi, as_dict=1)
self.assertTrue(gl_entries)
expected_values = dict((d[0], d) for d in [
["_Test Payable - _TC", 0, 720],
["Stock Received But Not Billed - _TC", 500.0, 0],
["_Test Account Shipping Charges - _TC", 100.0, 0],
["_Test Account VAT - _TC", 120.0, 0],
])
for i, gle in enumerate(gl_entries):
self.assertEqual(expected_values[gle.account][0], gle.account)
self.assertEqual(expected_values[gle.account][1], gle.debit)
self.assertEqual(expected_values[gle.account][2], gle.credit)
def test_purchase_invoice_change_naming_series(self):
pi = frappe.copy_doc(test_records[1])
pi.insert()
pi.naming_series = 'TEST-'
self.assertRaises(frappe.CannotChangeConstantError, pi.save)
pi = frappe.copy_doc(test_records[0])
pi.insert()
pi.naming_series = 'TEST-'
self.assertRaises(frappe.CannotChangeConstantError, pi.save)
def test_gl_entries_with_aia_for_non_stock_items(self):
pi = frappe.copy_doc(test_records[1])
set_perpetual_inventory(1, pi.company)
self.assertTrue(cint(erpnext.is_perpetual_inventory_enabled(pi.company)), 1)
pi.get("items")[0].item_code = "_Test Non Stock Item"
pi.get("items")[0].expense_account = "_Test Account Cost for Goods Sold - _TC"
pi.get("taxes").pop(0)
pi.get("taxes").pop(1)
pi.insert()
pi.submit()
gl_entries = frappe.db.sql("""select account, debit, credit
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
expected_values = sorted([
["_Test Payable - _TC", 0, 620],
["_Test Account Cost for Goods Sold - _TC", 500.0, 0],
["_Test Account VAT - _TC", 120.0, 0],
])
for i, gle in enumerate(gl_entries):
self.assertEqual(expected_values[i][0], gle.account)
self.assertEqual(expected_values[i][1], gle.debit)
self.assertEqual(expected_values[i][2], gle.credit)
set_perpetual_inventory(0, pi.company)
def test_purchase_invoice_calculation(self):
pi = frappe.copy_doc(test_records[0])
pi.insert()
pi.load_from_db()
expected_values = [
["_Test Item Home Desktop 100", 90, 59],
["_Test Item Home Desktop 200", 135, 177]
]
for i, item in enumerate(pi.get("items")):
self.assertEqual(item.item_code, expected_values[i][0])
self.assertEqual(item.item_tax_amount, expected_values[i][1])
self.assertEqual(item.valuation_rate, expected_values[i][2])
self.assertEqual(pi.base_net_total, 1250)
# tax amounts
expected_values = [
["_Test Account Shipping Charges - _TC", 100, 1350],
["_Test Account Customs Duty - _TC", 125, 1350],
["_Test Account Excise Duty - _TC", 140, 1490],
["_Test Account Education Cess - _TC", 2.8, 1492.8],
["_Test Account S&H Education Cess - _TC", 1.4, 1494.2],
["_Test Account CST - _TC", 29.88, 1524.08],
["_Test Account VAT - _TC", 156.25, 1680.33],
["_Test Account Discount - _TC", 168.03, 1512.30],
]
for i, tax in enumerate(pi.get("taxes")):
self.assertEqual(tax.account_head, expected_values[i][0])
self.assertEqual(tax.tax_amount, expected_values[i][1])
self.assertEqual(tax.total, expected_values[i][2])
def test_purchase_invoice_with_subcontracted_item(self):
wrapper = frappe.copy_doc(test_records[0])
wrapper.get("items")[0].item_code = "_Test FG Item"
wrapper.insert()
wrapper.load_from_db()
expected_values = [
["_Test FG Item", 90, 59],
["_Test Item Home Desktop 200", 135, 177]
]
for i, item in enumerate(wrapper.get("items")):
self.assertEqual(item.item_code, expected_values[i][0])
self.assertEqual(item.item_tax_amount, expected_values[i][1])
self.assertEqual(item.valuation_rate, expected_values[i][2])
self.assertEqual(wrapper.base_net_total, 1250)
# tax amounts
expected_values = [
["_Test Account Shipping Charges - _TC", 100, 1350],
["_Test Account Customs Duty - _TC", 125, 1350],
["_Test Account Excise Duty - _TC", 140, 1490],
["_Test Account Education Cess - _TC", 2.8, 1492.8],
["_Test Account S&H Education Cess - _TC", 1.4, 1494.2],
["_Test Account CST - _TC", 29.88, 1524.08],
["_Test Account VAT - _TC", 156.25, 1680.33],
["_Test Account Discount - _TC", 168.03, 1512.30],
]
for i, tax in enumerate(wrapper.get("taxes")):
self.assertEqual(tax.account_head, expected_values[i][0])
self.assertEqual(tax.tax_amount, expected_values[i][1])
self.assertEqual(tax.total, expected_values[i][2])
def test_purchase_invoice_with_advance(self):
from erpnext.accounts.doctype.journal_entry.test_journal_entry \
import test_records as jv_test_records
jv = frappe.copy_doc(jv_test_records[1])
jv.insert()
jv.submit()
pi = frappe.copy_doc(test_records[0])
pi.disable_rounded_total = 1
pi.allocate_advances_automatically = 0
pi.append("advances", {
"reference_type": "Journal Entry",
"reference_name": jv.name,
"reference_row": jv.get("accounts")[0].name,
"advance_amount": 400,
"allocated_amount": 300,
"remarks": jv.remark
})
pi.insert()
self.assertEqual(pi.outstanding_amount, 1212.30)
pi.disable_rounded_total = 0
pi.get("payment_schedule")[0].payment_amount = 1512.0
pi.save()
self.assertEqual(pi.outstanding_amount, 1212.0)
pi.submit()
pi.load_from_db()
self.assertTrue(frappe.db.sql("""select name from `tabJournal Entry Account`
where reference_type='Purchase Invoice'
and reference_name=%s and debit_in_account_currency=300""", pi.name))
pi.cancel()
self.assertFalse(frappe.db.sql("""select name from `tabJournal Entry Account`
where reference_type='Purchase Invoice' and reference_name=%s""", pi.name))
def test_invoice_with_advance_and_multi_payment_terms(self):
from erpnext.accounts.doctype.journal_entry.test_journal_entry \
import test_records as jv_test_records
jv = frappe.copy_doc(jv_test_records[1])
jv.insert()
jv.submit()
pi = frappe.copy_doc(test_records[0])
pi.disable_rounded_total = 1
pi.allocate_advances_automatically = 0
pi.append("advances", {
"reference_type": "Journal Entry",
"reference_name": jv.name,
"reference_row": jv.get("accounts")[0].name,
"advance_amount": 400,
"allocated_amount": 300,
"remarks": jv.remark
})
pi.insert()
pi.update({
"payment_schedule": get_payment_terms("_Test Payment Term Template",
pi.posting_date, pi.grand_total)
})
pi.save()
pi.submit()
self.assertEqual(pi.payment_schedule[0].payment_amount, 606.15)
self.assertEqual(pi.payment_schedule[0].due_date, pi.posting_date)
self.assertEqual(pi.payment_schedule[1].payment_amount, 606.15)
self.assertEqual(pi.payment_schedule[1].due_date, add_days(pi.posting_date, 30))
pi.load_from_db()
self.assertTrue(
frappe.db.sql(
"select name from `tabJournal Entry Account` where reference_type='Purchase Invoice' and "
"reference_name=%s and debit_in_account_currency=300", pi.name)
)
self.assertEqual(pi.outstanding_amount, 1212.30)
pi.cancel()
self.assertFalse(
frappe.db.sql(
"select name from `tabJournal Entry Account` where reference_type='Purchase Invoice' and "
"reference_name=%s", pi.name)
)
def test_total_purchase_cost_for_project(self):
existing_purchase_cost = frappe.db.sql("""select sum(base_net_amount)
from `tabPurchase Invoice Item` where project = '_Test Project' and docstatus=1""")
existing_purchase_cost = existing_purchase_cost and existing_purchase_cost[0][0] or 0
pi = make_purchase_invoice(currency="USD", conversion_rate=60, project="_Test Project")
self.assertEqual(frappe.db.get_value("Project", "_Test Project", "total_purchase_cost"),
existing_purchase_cost + 15000)
pi1 = make_purchase_invoice(qty=10, project="_Test Project")
self.assertEqual(frappe.db.get_value("Project", "_Test Project", "total_purchase_cost"),
existing_purchase_cost + 15500)
pi1.cancel()
self.assertEqual(frappe.db.get_value("Project", "_Test Project", "total_purchase_cost"),
existing_purchase_cost + 15000)
pi.cancel()
self.assertEqual(frappe.db.get_value("Project", "_Test Project", "total_purchase_cost"), existing_purchase_cost)
def test_return_purchase_invoice(self):
set_perpetual_inventory()
pi = make_purchase_invoice()
return_pi = make_purchase_invoice(is_return=1, return_against=pi.name, qty=-2)
# check gl entries for return
gl_entries = frappe.db.sql("""select account, debit, credit
from `tabGL Entry` where voucher_type=%s and voucher_no=%s
order by account desc""", ("Purchase Invoice", return_pi.name), as_dict=1)
self.assertTrue(gl_entries)
expected_values = {
"Creditors - _TC": [100.0, 0.0],
"Stock Received But Not Billed - _TC": [0.0, 100.0],
}
for gle in gl_entries:
self.assertEqual(expected_values[gle.account][0], gle.debit)
self.assertEqual(expected_values[gle.account][1], gle.credit)
set_perpetual_inventory(0)
def test_multi_currency_gle(self):
set_perpetual_inventory(0)
pi = make_purchase_invoice(supplier="_Test Supplier USD", credit_to="_Test Payable USD - _TC",
currency="USD", conversion_rate=50)
gl_entries = frappe.db.sql("""select account, account_currency, debit, credit,
debit_in_account_currency, credit_in_account_currency
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
expected_values = {
"_Test Payable USD - _TC": {
"account_currency": "USD",
"debit": 0,
"debit_in_account_currency": 0,
"credit": 12500,
"credit_in_account_currency": 250
},
"_Test Account Cost for Goods Sold - _TC": {
"account_currency": "INR",
"debit": 12500,
"debit_in_account_currency": 12500,
"credit": 0,
"credit_in_account_currency": 0
}
}
for field in ("account_currency", "debit", "debit_in_account_currency", "credit", "credit_in_account_currency"):
for i, gle in enumerate(gl_entries):
self.assertEqual(expected_values[gle.account][field], gle[field])
# Check for valid currency
pi1 = make_purchase_invoice(supplier="_Test Supplier USD", credit_to="_Test Payable USD - _TC",
do_not_save=True)
self.assertRaises(InvalidCurrency, pi1.save)
# cancel
pi.cancel()
gle = frappe.db.sql("""select name from `tabGL Entry`
where voucher_type='Sales Invoice' and voucher_no=%s""", pi.name)
self.assertFalse(gle)
def test_purchase_invoice_update_stock_gl_entry_with_perpetual_inventory(self):
set_perpetual_inventory()
pi = make_purchase_invoice(update_stock=1, posting_date=frappe.utils.nowdate(),
posting_time=frappe.utils.nowtime())
gl_entries = frappe.db.sql("""select account, account_currency, debit, credit,
debit_in_account_currency, credit_in_account_currency
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
stock_in_hand_account = get_inventory_account(pi.company, pi.get("items")[0].warehouse)
expected_gl_entries = dict((d[0], d) for d in [
[pi.credit_to, 0.0, 250.0],
[stock_in_hand_account, 250.0, 0.0]
])
for i, gle in enumerate(gl_entries):
self.assertEqual(expected_gl_entries[gle.account][0], gle.account)
self.assertEqual(expected_gl_entries[gle.account][1], gle.debit)
self.assertEqual(expected_gl_entries[gle.account][2], gle.credit)
def test_purchase_invoice_for_is_paid_and_update_stock_gl_entry_with_perpetual_inventory(self):
set_perpetual_inventory()
pi = make_purchase_invoice(update_stock=1, posting_date=frappe.utils.nowdate(),
posting_time=frappe.utils.nowtime(), cash_bank_account="Cash - _TC", is_paid=1)
gl_entries = frappe.db.sql("""select account, account_currency, sum(debit) as debit,
sum(credit) as credit, debit_in_account_currency, credit_in_account_currency
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
group by account, voucher_no order by account asc;""", pi.name, as_dict=1)
stock_in_hand_account = get_inventory_account(pi.company, pi.get("items")[0].warehouse)
self.assertTrue(gl_entries)
expected_gl_entries = dict((d[0], d) for d in [
[pi.credit_to, 250.0, 250.0],
[stock_in_hand_account, 250.0, 0.0],
["Cash - _TC", 0.0, 250.0]
])
for i, gle in enumerate(gl_entries):
self.assertEqual(expected_gl_entries[gle.account][0], gle.account)
self.assertEqual(expected_gl_entries[gle.account][1], gle.debit)
self.assertEqual(expected_gl_entries[gle.account][2], gle.credit)
def test_auto_batch(self):
item_code = frappe.db.get_value('Item',
{'has_batch_no': 1, 'create_new_batch':1}, 'name')
if not item_code:
doc = frappe.get_doc({
'doctype': 'Item',
'is_stock_item': 1,
'item_code': 'test batch item',
'item_group': 'Products',
'has_batch_no': 1,
'create_new_batch': 1
}).insert(ignore_permissions=True)
item_code = doc.name
pi = make_purchase_invoice(update_stock=1, posting_date=frappe.utils.nowdate(),
posting_time=frappe.utils.nowtime(), item_code=item_code)
self.assertTrue(frappe.db.get_value('Batch',
{'item': item_code, 'reference_name': pi.name}))
def test_update_stock_and_purchase_return(self):
actual_qty_0 = get_qty_after_transaction()
pi = make_purchase_invoice(update_stock=1, posting_date=frappe.utils.nowdate(),
posting_time=frappe.utils.nowtime())
actual_qty_1 = get_qty_after_transaction()
self.assertEqual(actual_qty_0 + 5, actual_qty_1)
# return entry
pi1 = make_purchase_invoice(is_return=1, return_against=pi.name, qty=-2, rate=50, update_stock=1)
actual_qty_2 = get_qty_after_transaction()
self.assertEqual(actual_qty_1 - 2, actual_qty_2)
pi1.cancel()
self.assertEqual(actual_qty_1, get_qty_after_transaction())
pi.reload()
pi.cancel()
self.assertEqual(actual_qty_0, get_qty_after_transaction())
def test_subcontracting_via_purchase_invoice(self):
from erpnext.stock.doctype.stock_entry.test_stock_entry import make_stock_entry
make_stock_entry(item_code="_Test Item", target="_Test Warehouse 1 - _TC", qty=100, basic_rate=100)
make_stock_entry(item_code="_Test Item Home Desktop 100", target="_Test Warehouse 1 - _TC",
qty=100, basic_rate=100)
pi = make_purchase_invoice(item_code="_Test FG Item", qty=10, rate=500,
update_stock=1, is_subcontracted="Yes")
self.assertEqual(len(pi.get("supplied_items")), 2)
rm_supp_cost = sum([d.amount for d in pi.get("supplied_items")])
self.assertEqual(pi.get("items")[0].rm_supp_cost, flt(rm_supp_cost, 2))
def test_rejected_serial_no(self):
pi = make_purchase_invoice(item_code="_Test Serialized Item With Series", received_qty=2, qty=1,
rejected_qty=1, rate=500, update_stock=1,
rejected_warehouse = "_Test Rejected Warehouse - _TC")
self.assertEqual(frappe.db.get_value("Serial No", pi.get("items")[0].serial_no, "warehouse"),
pi.get("items")[0].warehouse)
self.assertEqual(frappe.db.get_value("Serial No", pi.get("items")[0].rejected_serial_no,
"warehouse"), pi.get("items")[0].rejected_warehouse)
def test_outstanding_amount_after_advance_jv_cancelation(self):
from erpnext.accounts.doctype.journal_entry.test_journal_entry \
import test_records as jv_test_records
jv = frappe.copy_doc(jv_test_records[1])
jv.accounts[0].is_advance = 'Yes'
jv.insert()
jv.submit()
pi = frappe.copy_doc(test_records[0])
pi.append("advances", {
"reference_type": "Journal Entry",
"reference_name": jv.name,
"reference_row": jv.get("accounts")[0].name,
"advance_amount": 400,
"allocated_amount": 300,
"remarks": jv.remark
})
pi.insert()
pi.submit()
pi.load_from_db()
#check outstanding after advance allocation
self.assertEqual(flt(pi.outstanding_amount), flt(pi.rounded_total - pi.total_advance))
#added to avoid Document has been modified exception
jv = frappe.get_doc("Journal Entry", jv.name)
jv.cancel()
pi.load_from_db()
#check outstanding after advance cancellation
self.assertEqual(flt(pi.outstanding_amount), flt(pi.rounded_total + pi.total_advance))
def test_outstanding_amount_after_advance_payment_entry_cancelation(self):
pe = frappe.get_doc({
"doctype": "Payment Entry",
"payment_type": "Pay",
"party_type": "Supplier",
"party": "_Test Supplier",
"company": "_Test Company",
"paid_from_account_currency": "INR",
"paid_to_account_currency": "INR",
"source_exchange_rate": 1,
"target_exchange_rate": 1,
"reference_no": "1",
"reference_date": nowdate(),
"received_amount": 300,
"paid_amount": 300,
"paid_from": "_Test Cash - _TC",
"paid_to": "_Test Payable - _TC"
})
pe.insert()
pe.submit()
pi = frappe.copy_doc(test_records[0])
pi.is_pos = 0
pi.append("advances", {
"doctype": "Purchase Invoice Advance",
"reference_type": "Payment Entry",
"reference_name": pe.name,
"advance_amount": 300,
"allocated_amount": 300,
"remarks": pe.remarks
})
pi.insert()
pi.submit()
pi.load_from_db()
#check outstanding after advance allocation
self.assertEqual(flt(pi.outstanding_amount), flt(pi.rounded_total - pi.total_advance))
#added to avoid Document has been modified exception
pe = frappe.get_doc("Payment Entry", pe.name)
pe.cancel()
pi.load_from_db()
#check outstanding after advance cancellation
self.assertEqual(flt(pi.outstanding_amount), flt(pi.rounded_total + pi.total_advance))
def test_purchase_invoice_with_shipping_rule(self):
from erpnext.accounts.doctype.shipping_rule.test_shipping_rule \
import create_shipping_rule
shipping_rule = create_shipping_rule(shipping_rule_type = "Buying", shipping_rule_name = "Shipping Rule - Purchase Invoice Test")
pi = frappe.copy_doc(test_records[0])
pi.shipping_rule = shipping_rule.name
pi.insert()
shipping_amount = 0.0
for condition in shipping_rule.get("conditions"):
if not condition.to_value or (flt(condition.from_value) <= pi.net_total <= flt(condition.to_value)):
shipping_amount = condition.shipping_amount
shipping_charge = {
"doctype": "Purchase Taxes and Charges",
"category": "Valuation and Total",
"charge_type": "Actual",
"account_head": shipping_rule.account,
"cost_center": shipping_rule.cost_center,
"tax_amount": shipping_amount,
"description": shipping_rule.name,
"add_deduct_tax": "Add"
}
pi.append("taxes", shipping_charge)
pi.save()
self.assertEqual(pi.net_total, 1250)
self.assertEqual(pi.total_taxes_and_charges, 462.3)
self.assertEqual(pi.grand_total, 1712.3)
def test_make_pi_without_terms(self):
pi = make_purchase_invoice(do_not_save=1)
self.assertFalse(pi.get('payment_schedule'))
pi.insert()
self.assertTrue(pi.get('payment_schedule'))
def test_duplicate_due_date_in_terms(self):
pi = make_purchase_invoice(do_not_save=1)
pi.append('payment_schedule', dict(due_date='2017-01-01', invoice_portion=50.00, payment_amount=50))
pi.append('payment_schedule', dict(due_date='2017-01-01', invoice_portion=50.00, payment_amount=50))
self.assertRaises(frappe.ValidationError, pi.insert)
def test_debit_note(self):
from erpnext.accounts.doctype.payment_entry.test_payment_entry import get_payment_entry
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import get_outstanding_amount
pi = make_purchase_invoice(item_code = "_Test Item", qty = (5 * -1), rate=500, is_return = 1)
outstanding_amount = get_outstanding_amount(pi.doctype,
pi.name, "Creditors - _TC", pi.supplier, "Supplier")
self.assertEqual(pi.outstanding_amount, outstanding_amount)
pe = get_payment_entry("Purchase Invoice", pi.name, bank_account="_Test Bank - _TC")
pe.reference_no = "1"
pe.reference_date = nowdate()
pe.paid_from_account_currency = pi.currency
pe.paid_to_account_currency = pi.currency
pe.source_exchange_rate = 1
pe.target_exchange_rate = 1
pe.paid_amount = pi.grand_total * -1
pe.insert()
pe.submit()
pi_doc = frappe.get_doc('Purchase Invoice', pi.name)
self.assertEqual(pi_doc.outstanding_amount, 0)
def test_purchase_invoice_for_enable_allow_cost_center_in_entry_of_bs_account(self):
from erpnext.accounts.doctype.cost_center.test_cost_center import create_cost_center
accounts_settings = frappe.get_doc('Accounts Settings', 'Accounts Settings')
accounts_settings.allow_cost_center_in_entry_of_bs_account = 1
accounts_settings.save()
cost_center = "_Test Cost Center for BS Account - _TC"
create_cost_center(cost_center_name="_Test Cost Center for BS Account", company="_Test Company")
pi = make_purchase_invoice_against_cost_center(cost_center=cost_center, credit_to="Creditors - _TC")
self.assertEqual(pi.cost_center, cost_center)
expected_values = {
"Creditors - _TC": {
"cost_center": cost_center
},
"_Test Account Cost for Goods Sold - _TC": {
"cost_center": cost_center
}
}
gl_entries = frappe.db.sql("""select account, cost_center, account_currency, debit, credit,
debit_in_account_currency, credit_in_account_currency
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
for gle in gl_entries:
self.assertEqual(expected_values[gle.account]["cost_center"], gle.cost_center)
accounts_settings.allow_cost_center_in_entry_of_bs_account = 0
accounts_settings.save()
def test_purchase_invoice_for_disable_allow_cost_center_in_entry_of_bs_account(self):
accounts_settings = frappe.get_doc('Accounts Settings', 'Accounts Settings')
accounts_settings.allow_cost_center_in_entry_of_bs_account = 0
accounts_settings.save()
cost_center = "_Test Cost Center - _TC"
pi = make_purchase_invoice(credit_to="Creditors - _TC")
expected_values = {
"Creditors - _TC": {
"cost_center": None
},
"_Test Account Cost for Goods Sold - _TC": {
"cost_center": cost_center
}
}
gl_entries = frappe.db.sql("""select account, cost_center, account_currency, debit, credit,
debit_in_account_currency, credit_in_account_currency
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
for gle in gl_entries:
self.assertEqual(expected_values[gle.account]["cost_center"], gle.cost_center)
def unlink_payment_on_cancel_of_invoice(enable=1):
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.unlink_payment_on_cancellation_of_invoice = enable
accounts_settings.save()
def make_purchase_invoice(**args):
pi = frappe.new_doc("Purchase Invoice")
args = frappe._dict(args)
pi.posting_date = args.posting_date or today()
if args.posting_time:
pi.posting_time = args.posting_time
if args.update_stock:
pi.update_stock = 1
if args.is_paid:
pi.is_paid = 1
if args.cash_bank_account:
pi.cash_bank_account=args.cash_bank_account
pi.company = args.company or "_Test Company"
pi.supplier = args.supplier or "_Test Supplier"
pi.currency = args.currency or "INR"
pi.conversion_rate = args.conversion_rate or 1
pi.is_return = args.is_return
pi.return_against = args.return_against
pi.is_subcontracted = args.is_subcontracted or "No"
pi.supplier_warehouse = "_Test Warehouse 1 - _TC"
pi.append("items", {
"item_code": args.item or args.item_code or "_Test Item",
"warehouse": args.warehouse or "_Test Warehouse - _TC",
"qty": args.qty or 5,
"received_qty": args.received_qty or 0,
"rejected_qty": args.rejected_qty or 0,
"rate": args.rate or 50,
"conversion_factor": 1.0,
"serial_no": args.serial_no,
"stock_uom": "_Test UOM",
"cost_center": "_Test Cost Center - _TC",
"project": args.project,
"rejected_warehouse": args.rejected_warehouse or "",
"rejected_serial_no": args.rejected_serial_no or ""
})
if not args.do_not_save:
pi.insert()
if not args.do_not_submit:
pi.submit()
return pi
def make_purchase_invoice_against_cost_center(**args):
pi = frappe.new_doc("Purchase Invoice")
args = frappe._dict(args)
pi.posting_date = args.posting_date or today()
if args.posting_time:
pi.posting_time = args.posting_time
if args.update_stock:
pi.update_stock = 1
if args.is_paid:
pi.is_paid = 1
if args.cash_bank_account:
pi.cash_bank_account=args.cash_bank_account
pi.company = args.company or "_Test Company"
pi.cost_center = args.cost_center or "_Test Cost Center - _TC"
pi.supplier = args.supplier or "_Test Supplier"
pi.currency = args.currency or "INR"
pi.conversion_rate = args.conversion_rate or 1
pi.is_return = args.is_return
pi.is_return = args.is_return
pi.credit_to = args.return_against or "Creditors - _TC"
pi.is_subcontracted = args.is_subcontracted or "No"
pi.supplier_warehouse = "_Test Warehouse 1 - _TC"
pi.append("items", {
"item_code": args.item or args.item_code or "_Test Item",
"warehouse": args.warehouse or "_Test Warehouse - _TC",
"qty": args.qty or 5,
"received_qty": args.received_qty or 0,
"rejected_qty": args.rejected_qty or 0,
"rate": args.rate or 50,
"conversion_factor": 1.0,
"serial_no": args.serial_no,
"stock_uom": "_Test UOM",
"cost_center": args.cost_center or "_Test Cost Center - _TC",
"project": args.project,
"rejected_warehouse": args.rejected_warehouse or "",
"rejected_serial_no": args.rejected_serial_no or ""
})
if not args.do_not_save:
pi.insert()
if not args.do_not_submit:
pi.submit()
return pi
test_records = frappe.get_test_records('Purchase Invoice')
| gpl-3.0 |
mircealungu/Zeeguu-Core | zeeguu_core_test/test_language.py | 1 | 1511 | from unittest import TestCase
import zeeguu_core
from sqlalchemy.orm.exc import NoResultFound
from zeeguu_core_test.model_test_mixin import ModelTestMixIn
from zeeguu_core_test.rules.language_rule import LanguageRule
from zeeguu_core_test.rules.user_rule import UserRule
from zeeguu_core.model.language import Language
session = zeeguu_core.db.session
class LanguageTest(ModelTestMixIn, TestCase):
def setUp(self):
super().setUp()
self.user = UserRule().user
def test_languages_exists(self):
language_should_be = LanguageRule().random
try:
language_to_check = Language.find(language_should_be.code)
except NoResultFound:
assert False, "No Language found in database"
assert language_should_be.code == language_to_check.code \
and language_should_be.name == language_to_check.name
def test_get_all_languages(self):
languages = LanguageRule.languages
for lan in languages:
assert LanguageRule.get_or_create_language(lan)
def test_user_set_language(self):
language_should_be = LanguageRule().random
self.user.set_learned_language(language_should_be.code, session)
assert self.user.learned_language.id == language_should_be.id
def test_native_language(self):
language_should_be = LanguageRule().random
self.user.set_native_language(language_should_be.code)
assert self.user.native_language.id == language_should_be.id
| mit |
Workday/OpenFrame | tools/memory_inspector/memory_inspector/core/backends_unittest.py | 107 | 1769 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from memory_inspector.core import backends
class MockDevice(backends.Device): # pylint: disable=W0223
def __init__(self, backend, device_id):
super(MockDevice, self).__init__(backend)
self.device_id = device_id
@property
def name(self):
return "Mock Device %s" % self.device_id
@property
def id(self):
return self.device_id
class MockBackend(backends.Backend):
_SETTINGS = {'key_1': 'key descritpion 1'}
def __init__(self, backend_name):
super(MockBackend, self).__init__(MockBackend._SETTINGS)
self.backend_name = backend_name
def EnumerateDevices(self):
yield MockDevice(self, 'device-1')
yield MockDevice(self, 'device-2')
def ExtractSymbols(self, native_heaps, sym_paths):
raise NotImplementedError()
@property
def name(self):
return self.backend_name
class BackendRegisterTest(unittest.TestCase):
def runTest(self):
mock_backend_1 = MockBackend('mock-backend-1')
mock_backend_2 = MockBackend('mock-backend-2')
self.assertEqual(mock_backend_1.settings['key_1'], 'key descritpion 1')
backends.Register(mock_backend_1)
backends.Register(mock_backend_2)
devices = list(backends.ListDevices())
self.assertEqual(len(devices), 4)
self.assertIsNotNone(backends.GetDevice('mock-backend-1', 'device-1'))
self.assertIsNotNone(backends.GetDevice('mock-backend-1', 'device-2'))
self.assertIsNotNone(backends.GetDevice('mock-backend-2', 'device-1'))
self.assertIsNotNone(backends.GetDevice('mock-backend-2', 'device-1'))
self.assertTrue('key_1' in mock_backend_1.settings)
| bsd-3-clause |
alexissmirnov/donomo | donomo_archive/lib/offlineimap/imapserver.py | 1 | 18628 | # IMAP server support
# Copyright (C) 2002 - 2007 John Goerzen
# <jgoerzen@complete.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import imaplib
from offlineimap import imaplibutil, imaputil, threadutil
from offlineimap.ui import UIBase
from threading import *
import thread, hmac, os, time
import base64
from StringIO import StringIO
from platform import system
try:
# do we have a recent pykerberos?
have_gss = False
import kerberos
if 'authGSSClientWrap' in dir(kerberos):
have_gss = True
except ImportError:
pass
class UsefulIMAPMixIn:
def getstate(self):
return self.state
def getselectedfolder(self):
if self.getstate() == 'SELECTED':
return self.selectedfolder
return None
def select(self, mailbox='INBOX', readonly=None, force = 0):
if (not force) and self.getselectedfolder() == mailbox \
and self.is_readonly == readonly:
# No change; return.
return
result = self.__class__.__bases__[1].select(self, mailbox, readonly)
if result[0] != 'OK':
raise ValueError, "Error from select: %s" % str(result)
if self.getstate() == 'SELECTED':
self.selectedfolder = mailbox
else:
self.selectedfolder = None
def _mesg(self, s, secs=None):
imaplibutil.new_mesg(self, s, secs)
class UsefulIMAP4(UsefulIMAPMixIn, imaplib.IMAP4):
def open(self, host = '', port = imaplib.IMAP4_PORT):
imaplibutil.new_open(self, host, port)
# This is a hack around Darwin's implementation of realloc() (which
# Python uses inside the socket code). On Darwin, we split the
# message into 100k chunks, which should be small enough - smaller
# might start seriously hurting performance ...
def read(self, size):
if (system() == 'Darwin') and (size>0) :
read = 0
io = StringIO()
while read < size:
data = imaplib.IMAP4.read (self, min(size-read,8192))
read += len(data)
io.write(data)
return io.getvalue()
else:
return imaplib.IMAP4.read (self, size)
class UsefulIMAP4_SSL(UsefulIMAPMixIn, imaplibutil.WrappedIMAP4_SSL):
def open(self, host = '', port = imaplib.IMAP4_SSL_PORT):
imaplibutil.new_open_ssl(self, host, port)
# This is the same hack as above, to be used in the case of an SSL
# connexion.
def read(self, size):
if (system() == 'Darwin') and (size>0) :
read = 0
io = StringIO()
while read < size:
data = imaplibutil.WrappedIMAP4_SSL.read (self, min(size-read,8192))
read += len(data)
io.write(data)
return io.getvalue()
else:
return imaplibutil.WrappedIMAP4_SSL.read (self,size)
class UsefulIMAP4_Tunnel(UsefulIMAPMixIn, imaplibutil.IMAP4_Tunnel): pass
class IMAPServer:
GSS_STATE_STEP = 0
GSS_STATE_WRAP = 1
def __init__(self, config, reposname,
username = None, password = None, hostname = None,
port = None, ssl = 1, maxconnections = 1, tunnel = None,
reference = '""', sslclientcert = None, sslclientkey = None):
self.reposname = reposname
self.config = config
self.username = username
self.password = password
self.passworderror = None
self.goodpassword = None
self.hostname = hostname
self.tunnel = tunnel
self.port = port
self.usessl = ssl
self.sslclientcert = sslclientcert
self.sslclientkey = sslclientkey
self.delim = None
self.root = None
if port == None:
if ssl:
self.port = 993
else:
self.port = 143
self.maxconnections = maxconnections
self.availableconnections = []
self.assignedconnections = []
self.lastowner = {}
self.semaphore = BoundedSemaphore(self.maxconnections)
self.connectionlock = Lock()
self.reference = reference
self.gss_step = self.GSS_STATE_STEP
self.gss_vc = None
self.gssapi = False
def getpassword(self):
if self.goodpassword != None:
return self.goodpassword
if self.password != None and self.passworderror == None:
return self.password
self.password = UIBase.getglobalui().getpass(self.reposname,
self.config,
self.passworderror)
self.passworderror = None
return self.password
def getdelim(self):
"""Returns this server's folder delimiter. Can only be called
after one or more calls to acquireconnection."""
return self.delim
def getroot(self):
"""Returns this server's folder root. Can only be called after one
or more calls to acquireconnection."""
return self.root
def releaseconnection(self, connection):
"""Releases a connection, returning it to the pool."""
self.connectionlock.acquire()
self.assignedconnections.remove(connection)
self.availableconnections.append(connection)
self.connectionlock.release()
self.semaphore.release()
def md5handler(self, response):
ui = UIBase.getglobalui()
challenge = response.strip()
ui.debug('imap', 'md5handler: got challenge %s' % challenge)
passwd = self.repos.getpassword()
retval = self.username + ' ' + hmac.new(passwd, challenge).hexdigest()
ui.debug('imap', 'md5handler: returning %s' % retval)
return retval
def plainauth(self, imapobj):
UIBase.getglobalui().debug('imap',
'Attempting plain authentication for %s' % self.username)
imapobj.login(self.username, self.repos.getpassword())
def gssauth(self, response):
data = base64.b64encode(response)
try:
if self.gss_step == self.GSS_STATE_STEP:
if not self.gss_vc:
rc, self.gss_vc = kerberos.authGSSClientInit('imap@' +
self.hostname)
response = kerberos.authGSSClientResponse(self.gss_vc)
rc = kerberos.authGSSClientStep(self.gss_vc, data)
if rc != kerberos.AUTH_GSS_CONTINUE:
self.gss_step = self.GSS_STATE_WRAP
elif self.gss_step == self.GSS_STATE_WRAP:
rc = kerberos.authGSSClientUnwrap(self.gss_vc, data)
response = kerberos.authGSSClientResponse(self.gss_vc)
rc = kerberos.authGSSClientWrap(self.gss_vc, response,
self.username)
response = kerberos.authGSSClientResponse(self.gss_vc)
except kerberos.GSSError, err:
# Kerberos errored out on us, respond with None to cancel the
# authentication
UIBase.getglobalui().debug('imap',
'%s: %s' % (err[0][0], err[1][0]))
return None
if not response:
response = ''
return base64.b64decode(response)
def acquireconnection(self):
"""Fetches a connection from the pool, making sure to create a new one
if needed, to obey the maximum connection limits, etc.
Opens a connection to the server and returns an appropriate
object."""
self.semaphore.acquire()
self.connectionlock.acquire()
imapobj = None
if len(self.availableconnections): # One is available.
# Try to find one that previously belonged to this thread
# as an optimization. Start from the back since that's where
# they're popped on.
threadid = thread.get_ident()
imapobj = None
for i in range(len(self.availableconnections) - 1, -1, -1):
tryobj = self.availableconnections[i]
if self.lastowner[tryobj] == threadid:
imapobj = tryobj
del(self.availableconnections[i])
break
if not imapobj:
imapobj = self.availableconnections[0]
del(self.availableconnections[0])
self.assignedconnections.append(imapobj)
self.lastowner[imapobj] = thread.get_ident()
self.connectionlock.release()
return imapobj
self.connectionlock.release() # Release until need to modify data
""" Must be careful here that if we fail we should bail out gracefully
and release locks / threads so that the next attempt can try...
"""
success = 0
try:
while not success:
# Generate a new connection.
if self.tunnel:
UIBase.getglobalui().connecting('tunnel', self.tunnel)
imapobj = UsefulIMAP4_Tunnel(self.tunnel)
success = 1
elif self.usessl:
UIBase.getglobalui().connecting(self.hostname, self.port)
imapobj = UsefulIMAP4_SSL(self.hostname, self.port,
self.sslclientkey, self.sslclientcert)
else:
UIBase.getglobalui().connecting(self.hostname, self.port)
imapobj = UsefulIMAP4(self.hostname, self.port)
imapobj.mustquote = imaplibutil.mustquote
if not self.tunnel:
try:
# Try GSSAPI and continue if it fails
if 'AUTH=GSSAPI' in imapobj.capabilities and have_gss:
UIBase.getglobalui().debug('imap',
'Attempting GSSAPI authentication')
try:
imapobj.authenticate('GSSAPI', self.gssauth)
except imapobj.error, val:
self.gssapi = False
UIBase.getglobalui().debug('imap',
'GSSAPI Authentication failed')
else:
self.gssapi = True
#if we do self.password = None then the next attempt cannot try...
#self.password = None
if not self.gssapi:
if 'AUTH=CRAM-MD5' in imapobj.capabilities:
UIBase.getglobalui().debug('imap',
'Attempting CRAM-MD5 authentication')
try:
imapobj.authenticate('CRAM-MD5', self.md5handler)
except imapobj.error, val:
self.plainauth(imapobj)
else:
self.plainauth(imapobj)
# Would bail by here if there was a failure.
success = 1
self.goodpassword = self.password
except imapobj.error, val:
self.passworderror = str(val)
raise
#self.password = None
if self.delim == None:
listres = imapobj.list(self.reference, '""')[1]
if listres == [None] or listres == None:
# Some buggy IMAP servers do not respond well to LIST "" ""
# Work around them.
listres = imapobj.list(self.reference, '"*"')[1]
self.delim, self.root = \
imaputil.imapsplit(listres[0])[1:]
self.delim = imaputil.dequote(self.delim)
self.root = imaputil.dequote(self.root)
self.connectionlock.acquire()
self.assignedconnections.append(imapobj)
self.lastowner[imapobj] = thread.get_ident()
self.connectionlock.release()
return imapobj
except:
"""If we are here then we did not succeed in getting a connection -
we should clean up and then re-raise the error..."""
self.semaphore.release()
#Make sure that this can be retried the next time...
self.passworderror = None
if(self.connectionlock.locked()):
self.connectionlock.release()
raise
def connectionwait(self):
"""Waits until there is a connection available. Note that between
the time that a connection becomes available and the time it is
requested, another thread may have grabbed it. This function is
mainly present as a way to avoid spawning thousands of threads
to copy messages, then have them all wait for 3 available connections.
It's OK if we have maxconnections + 1 or 2 threads, which is what
this will help us do."""
threadutil.semaphorewait(self.semaphore)
def close(self):
# Make sure I own all the semaphores. Let the threads finish
# their stuff. This is a blocking method.
self.connectionlock.acquire()
threadutil.semaphorereset(self.semaphore, self.maxconnections)
for imapobj in self.assignedconnections + self.availableconnections:
imapobj.logout()
self.assignedconnections = []
self.availableconnections = []
self.lastowner = {}
# reset kerberos state
self.gss_step = self.GSS_STATE_STEP
self.gss_vc = None
self.gssapi = False
self.connectionlock.release()
def keepalive(self, timeout, event):
"""Sends a NOOP to each connection recorded. It will wait a maximum
of timeout seconds between doing this, and will continue to do so
until the Event object as passed is true. This method is expected
to be invoked in a separate thread, which should be join()'d after
the event is set."""
ui = UIBase.getglobalui()
ui.debug('imap', 'keepalive thread started')
while 1:
ui.debug('imap', 'keepalive: top of loop')
time.sleep(timeout)
ui.debug('imap', 'keepalive: after wait')
if event.isSet():
ui.debug('imap', 'keepalive: event is set; exiting')
return
ui.debug('imap', 'keepalive: acquiring connectionlock')
self.connectionlock.acquire()
numconnections = len(self.assignedconnections) + \
len(self.availableconnections)
self.connectionlock.release()
ui.debug('imap', 'keepalive: connectionlock released')
threads = []
imapobjs = []
for i in range(numconnections):
ui.debug('imap', 'keepalive: processing connection %d of %d' % (i, numconnections))
imapobj = self.acquireconnection()
ui.debug('imap', 'keepalive: connection %d acquired' % i)
imapobjs.append(imapobj)
thr = threadutil.ExitNotifyThread(target = imapobj.noop)
thr.setDaemon(1)
thr.start()
threads.append(thr)
ui.debug('imap', 'keepalive: thread started')
ui.debug('imap', 'keepalive: joining threads')
for thr in threads:
# Make sure all the commands have completed.
thr.join()
ui.debug('imap', 'keepalive: releasing connections')
for imapobj in imapobjs:
self.releaseconnection(imapobj)
ui.debug('imap', 'keepalive: bottom of loop')
class ConfigedIMAPServer(IMAPServer):
"""This class is designed for easier initialization given a ConfigParser
object and an account name. The passwordhash is used if
passwords for certain accounts are known. If the password for this
account is listed, it will be obtained from there."""
def __init__(self, repository, passwordhash = {}):
"""Initialize the object. If the account is not a tunnel,
the password is required."""
self.repos = repository
self.config = self.repos.getconfig()
usetunnel = self.repos.getpreauthtunnel()
if not usetunnel:
host = self.repos.gethost()
user = self.repos.getuser()
port = self.repos.getport()
ssl = self.repos.getssl()
sslclientcert = self.repos.getsslclientcert()
sslclientkey = self.repos.getsslclientkey()
reference = self.repos.getreference()
server = None
password = None
if repository.getname() in passwordhash:
password = passwordhash[repository.getname()]
# Connect to the remote server.
if usetunnel:
IMAPServer.__init__(self, self.config, self.repos.getname(),
tunnel = usetunnel,
reference = reference,
maxconnections = self.repos.getmaxconnections())
else:
if not password:
password = self.repos.getpassword()
IMAPServer.__init__(self, self.config, self.repos.getname(),
user, password, host, port, ssl,
self.repos.getmaxconnections(),
reference = reference,
sslclientcert = sslclientcert,
sslclientkey = sslclientkey)
| bsd-3-clause |
shawger/s-kape | lib/requests/packages/chardet/langhebrewmodel.py | 2763 | 11318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': False,
'charsetName': "windows-1255"
}
# flake8: noqa
| gpl-3.0 |
sarvex/tensorflow | tensorflow/python/kernel_tests/eig_op_test.py | 9 | 9610 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.linalg_ops.eig."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import sort_ops
from tensorflow.python.platform import test
def _AddTest(test_class, op_name, testcase_name, fn):
test_name = "_".join(["test", op_name, testcase_name])
if hasattr(test_class, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test_class, test_name, fn)
class EigTest(test.TestCase):
@test_util.run_deprecated_v1
def testWrongDimensions(self):
# The input to self_adjoint_eig should be a tensor of
# at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaises(ValueError):
linalg_ops.eig(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_ops.eig(vector)
@test_util.run_deprecated_v1
def testConcurrentExecutesWithoutError(self):
all_ops = []
with self.session():
for compute_v_ in True, False:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
if compute_v_:
e1, v1 = linalg_ops.eig(matrix1)
e2, v2 = linalg_ops.eig(matrix2)
all_ops += [e1, v1, e2, v2]
else:
e1 = linalg_ops.eigvals(matrix1)
e2 = linalg_ops.eigvals(matrix2)
all_ops += [e1, e2]
val = self.evaluate(all_ops)
self.assertAllEqual(val[0], val[2])
# The algorithm is slightly different for compute_v being True and False,
# so require approximate equality only here.
self.assertAllClose(val[2], val[4])
self.assertAllEqual(val[4], val[5])
self.assertAllEqual(val[1], val[3])
def testMatrixThatFailsWhenFlushingDenormsToZero(self):
# Test a 32x32 matrix which is known to fail if denorm floats are flushed to
# zero.
matrix = np.genfromtxt(
test.test_src_dir_path(
"python/kernel_tests/testdata/"
"self_adjoint_eig_fail_if_denorms_flushed.txt")).astype(np.float32)
self.assertEqual(matrix.shape, (32, 32))
matrix_tensor = constant_op.constant(matrix)
with self.session() as _:
(e, v) = self.evaluate(linalg_ops.self_adjoint_eig(matrix_tensor))
self.assertEqual(e.size, 32)
self.assertAllClose(
np.matmul(v, v.transpose()), np.eye(32, dtype=np.float32), atol=2e-3)
self.assertAllClose(matrix,
np.matmul(np.matmul(v, np.diag(e)), v.transpose()))
def SortEigenValues(e):
perm = np.argsort(e.real + e.imag, -1)
return np.take(e, perm, -1)
def SortEigenDecomposition(e, v):
if v.ndim < 2:
return e, v
perm = np.argsort(e.real + e.imag, -1)
return np.take(e, perm, -1), np.take(v, perm, -1)
def EquilibrateEigenVectorPhases(x, y):
"""Equilibrate the phase of the Eigenvectors in the columns of `x` and `y`.
Eigenvectors are only unique up to an arbitrary phase. This function rotates x
such that it matches y. Precondition: The columns of x and y differ by a
multiplicative complex phase factor only.
Args:
x: `np.ndarray` with Eigenvectors
y: `np.ndarray` with Eigenvectors
Returns:
`np.ndarray` containing an equilibrated version of x.
"""
phases = np.sum(np.conj(x) * y, -2, keepdims=True)
phases /= np.abs(phases)
return phases * x
def _GetEigTest(dtype_, shape_, compute_v_):
def CompareEigenVectors(self, x, y, tol):
x = EquilibrateEigenVectorPhases(x, y)
self.assertAllClose(x, y, atol=tol)
def CompareEigenDecompositions(self, x_e, x_v, y_e, y_v, tol):
num_batches = int(np.prod(x_e.shape[:-1]))
n = x_e.shape[-1]
x_e = np.reshape(x_e, [num_batches] + [n])
x_v = np.reshape(x_v, [num_batches] + [n, n])
y_e = np.reshape(y_e, [num_batches] + [n])
y_v = np.reshape(y_v, [num_batches] + [n, n])
for i in range(num_batches):
x_ei, x_vi = SortEigenDecomposition(x_e[i, :], x_v[i, :, :])
y_ei, y_vi = SortEigenDecomposition(y_e[i, :], y_v[i, :, :])
self.assertAllClose(x_ei, y_ei, atol=tol, rtol=tol)
CompareEigenVectors(self, x_vi, y_vi, tol)
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
def RandomInput():
# Most matrices are diagonalizable
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a = np.tile(a, batch_shape + (1, 1))
return a
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
atol = 1e-4
else:
atol = 1e-12
a = RandomInput()
np_e, np_v = np.linalg.eig(a)
with self.session():
if compute_v_:
tf_e, tf_v = linalg_ops.eig(constant_op.constant(a))
# Check that V*diag(E)*V^(-1) is close to A.
a_ev = math_ops.matmul(
math_ops.matmul(tf_v, array_ops.matrix_diag(tf_e)),
linalg_ops.matrix_inverse(tf_v))
self.assertAllClose(self.evaluate(a_ev), a, atol=atol)
# Compare to numpy.linalg.eig.
CompareEigenDecompositions(self, np_e, np_v, self.evaluate(tf_e),
self.evaluate(tf_v), atol)
else:
tf_e = linalg_ops.eigvals(constant_op.constant(a))
self.assertAllClose(
SortEigenValues(np_e),
SortEigenValues(self.evaluate(tf_e)),
atol=atol)
return Test
class EigGradTest(test.TestCase):
pass # Filled in below
def _GetEigGradTest(dtype_, shape_, compute_v_):
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
def RandomInput():
# Most matrices are diagonalizable
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a = np.tile(a, batch_shape + (1, 1))
return a
# Optimal stepsize for central difference is O(epsilon^{1/3}).
epsilon = np.finfo(np_dtype).eps
delta = 0.1 * epsilon**(1.0 / 3.0)
# tolerance obtained by looking at actual differences using
# np.linalg.norm(theoretical-numerical, np.inf) on -mavx build
# after discarding one random input sample
_ = RandomInput()
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
tol = 1e-2
else:
tol = 1e-7
with self.session():
def Compute(x):
e, v = linalg_ops.eig(x)
# We sort eigenvalues by e.real+e.imag to have consistent
# order between runs
b_dims = len(e.shape) - 1
idx = sort_ops.argsort(math_ops.real(e) + math_ops.imag(e), axis=-1)
e = array_ops.gather(e, idx, batch_dims=b_dims)
v = array_ops.gather(v, idx, batch_dims=b_dims)
# (complex) Eigenvectors are only unique up to an arbitrary phase
# We normalize the vectors such that the first component has phase 0.
top_rows = v[..., 0:1, :]
angle = -math_ops.angle(top_rows)
phase = math_ops.complex(math_ops.cos(angle), math_ops.sin(angle))
v *= phase
return e, v
if compute_v_:
funcs = [lambda x: Compute(x)[0], lambda x: Compute(x)[1]]
else:
funcs = [linalg_ops.eigvals]
for f in funcs:
theoretical, numerical = gradient_checker_v2.compute_gradient(
f, [RandomInput()], delta=delta)
self.assertAllClose(theoretical, numerical, atol=tol, rtol=tol)
return Test
if __name__ == "__main__":
dtypes_to_test = [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.complex64,
dtypes_lib.complex128
]
for compute_v in True, False:
for dtype in dtypes_to_test:
for size in 1, 2, 5, 10:
for batch_dims in [(), (3,)] + [(3, 2)] * (max(size, size) < 10):
shape = batch_dims + (size, size)
name = "%s_%s_%s" % (dtype.name, "_".join(map(str, shape)), compute_v)
_AddTest(EigTest, "Eig", name, _GetEigTest(dtype, shape, compute_v))
if dtype not in [dtypes_lib.float32, dtypes_lib.float64]:
_AddTest(EigGradTest, "EigGrad", name,
_GetEigGradTest(dtype, shape, compute_v))
test.main()
| apache-2.0 |
Jorge-Rodriguez/ansible | test/units/modules/network/f5/test_bigip_gtm_monitor_firepass.py | 21 | 4709 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_gtm_monitor_firepass import ApiParameters
from library.modules.bigip_gtm_monitor_firepass import ModuleParameters
from library.modules.bigip_gtm_monitor_firepass import ModuleManager
from library.modules.bigip_gtm_monitor_firepass import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_gtm_monitor_firepass import ApiParameters
from ansible.modules.network.f5.bigip_gtm_monitor_firepass import ModuleParameters
from ansible.modules.network.f5.bigip_gtm_monitor_firepass import ModuleManager
from ansible.modules.network.f5.bigip_gtm_monitor_firepass import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='/Common/my-http',
max_load_average='60',
concurrency_limit='70',
ip='1.1.1.1',
port='80',
interval='10',
timeout='20',
ignore_down_response=True,
probe_timeout='30'
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/my-http'
assert p.max_load_average == 60
assert p.concurrency_limit == 70
assert p.destination == '1.1.1.1:80'
assert p.ip == '1.1.1.1'
assert p.port == 80
assert p.interval == 10
assert p.timeout == 20
assert p.ignore_down_response is True
assert p.probe_timeout == 30
def test_api_parameters(self):
args = load_fixture('load_gtm_monitor_firepass_1.json')
p = ApiParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/firepass_gtm'
assert p.max_load_average == 12
assert p.concurrency_limit == 95
assert p.destination == '1.1.1.1:80'
assert p.ip == '1.1.1.1'
assert p.port == 80
assert p.interval == 30
assert p.timeout == 90
assert p.ignore_down_response is True
assert p.probe_timeout == 5
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
try:
self.p1 = patch('library.modules.bigip_gtm_monitor_firepass.module_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = True
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_gtm_monitor_firepass.module_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = True
def tearDown(self):
self.p1.stop()
def test_create_monitor(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
mm.module_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
| gpl-3.0 |
Ted1993/Flasky | venv/lib/python2.7/site-packages/flask/testsuite/appctx.py | 558 | 3116 | # -*- coding: utf-8 -*-
"""
flask.testsuite.appctx
~~~~~~~~~~~~~~~~~~~~~~
Tests the application context.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
class AppContextTestCase(FlaskTestCase):
def test_basic_url_generation(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
app.config['PREFERRED_URL_SCHEME'] = 'https'
@app.route('/')
def index():
pass
with app.app_context():
rv = flask.url_for('index')
self.assert_equal(rv, 'https://localhost/')
def test_url_generation_requires_server_name(self):
app = flask.Flask(__name__)
with app.app_context():
with self.assert_raises(RuntimeError):
flask.url_for('index')
def test_url_generation_without_context_fails(self):
with self.assert_raises(RuntimeError):
flask.url_for('index')
def test_request_context_means_app_context(self):
app = flask.Flask(__name__)
with app.test_request_context():
self.assert_equal(flask.current_app._get_current_object(), app)
self.assert_equal(flask._app_ctx_stack.top, None)
def test_app_context_provides_current_app(self):
app = flask.Flask(__name__)
with app.app_context():
self.assert_equal(flask.current_app._get_current_object(), app)
self.assert_equal(flask._app_ctx_stack.top, None)
def test_app_tearing_down(self):
cleanup_stuff = []
app = flask.Flask(__name__)
@app.teardown_appcontext
def cleanup(exception):
cleanup_stuff.append(exception)
with app.app_context():
pass
self.assert_equal(cleanup_stuff, [None])
def test_custom_app_ctx_globals_class(self):
class CustomRequestGlobals(object):
def __init__(self):
self.spam = 'eggs'
app = flask.Flask(__name__)
app.app_ctx_globals_class = CustomRequestGlobals
with app.app_context():
self.assert_equal(
flask.render_template_string('{{ g.spam }}'), 'eggs')
def test_context_refcounts(self):
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_req(error=None):
called.append('request')
@app.teardown_appcontext
def teardown_app(error=None):
called.append('app')
@app.route('/')
def index():
with flask._app_ctx_stack.top:
with flask._request_ctx_stack.top:
pass
self.assert_true(flask._request_ctx_stack.top.request.environ
['werkzeug.request'] is not None)
return u''
c = app.test_client()
c.get('/')
self.assertEqual(called, ['request', 'app'])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AppContextTestCase))
return suite
| mit |
sachintaware/sublime-wakatime | packages/wakatime/packages/pytz/reference.py | 839 | 3649 | '''
Reference tzinfo implementations from the Python docs.
Used for testing against as they are only correct for the years
1987 to 2006. Do not use these for real code.
'''
from datetime import tzinfo, timedelta, datetime
from pytz import utc, UTC, HOUR, ZERO
# A class building tzinfo objects for fixed-offset time zones.
# Note that FixedOffset(0, "UTC") is a different way to build a
# UTC tzinfo object.
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
# A class capturing the platform's idea of local time.
import time as _time
STDOFFSET = timedelta(seconds = -_time.timezone)
if _time.daylight:
DSTOFFSET = timedelta(seconds = -_time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class LocalTimezone(tzinfo):
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
else:
return ZERO
def tzname(self, dt):
return _time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
Local = LocalTimezone()
# A complete implementation of current DST rules for major US time zones.
def first_sunday_on_or_after(dt):
days_to_go = 6 - dt.weekday()
if days_to_go:
dt += timedelta(days_to_go)
return dt
# In the US, DST starts at 2am (standard time) on the first Sunday in April.
DSTSTART = datetime(1, 4, 1, 2)
# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct.
# which is the first Sunday on or after Oct 25.
DSTEND = datetime(1, 10, 25, 1)
class USTimeZone(tzinfo):
def __init__(self, hours, reprname, stdname, dstname):
self.stdoffset = timedelta(hours=hours)
self.reprname = reprname
self.stdname = stdname
self.dstname = dstname
def __repr__(self):
return self.reprname
def tzname(self, dt):
if self.dst(dt):
return self.dstname
else:
return self.stdname
def utcoffset(self, dt):
return self.stdoffset + self.dst(dt)
def dst(self, dt):
if dt is None or dt.tzinfo is None:
# An exception may be sensible here, in one or both cases.
# It depends on how you want to treat them. The default
# fromutc() implementation (called by the default astimezone()
# implementation) passes a datetime with dt.tzinfo is self.
return ZERO
assert dt.tzinfo is self
# Find first Sunday in April & the last in October.
start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year))
end = first_sunday_on_or_after(DSTEND.replace(year=dt.year))
# Can't compare naive to aware objects, so strip the timezone from
# dt first.
if start <= dt.replace(tzinfo=None) < end:
return HOUR
else:
return ZERO
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
Central = USTimeZone(-6, "Central", "CST", "CDT")
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
| bsd-3-clause |
mattcaldwell/django-cms | cms/migrations/0030_limit_visibility_in_menu_step3of3.py | 385 | 19523 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Dummy migration
pass
def backwards(self, orm):
# Dummy migration
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'})
},
'auth.permission': {
'Meta': {
'ordering': "('content_type__app_label', 'content_type__model', 'codename')",
'unique_together': "(('content_type', 'codename'),)",
'object_name': 'Permission'},
'codename': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['contenttypes.ContentType']"}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [],
{'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Group']", 'symmetrical': 'False',
'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'password': (
'django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': (
'django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.CMSPlugin']", 'null': 'True',
'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': "orm['sites.Site']",
'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')",
'object_name': 'Page'},
'changed_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'created_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'limit_visibility_in_menu': (
'django.db.models.fields.SmallIntegerField', [],
{'default': 'None', 'null': 'True', 'db_index': 'True',
'blank': 'True'}),
'login_required': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '80',
'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'blank': 'True', 'related_name': "'children'",
'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['cms.Placeholder']",
'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'published': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'publisher_public': (
'django.db.models.fields.related.OneToOneField', [],
{'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True',
'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '40', 'null': 'True',
'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'template': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.pagemoderator': {
'Meta': {'object_name': 'PageModerator'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderate_children': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_descendants': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_page': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']"})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')",
'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [],
{'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [],
{'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': (
'django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': ['auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_users'",
'to': "orm['auth.User']"}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['auth.User']", 'unique': 'True',
'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_usergroups'",
'to': "orm['auth.User']"}),
'group_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['auth.Group']", 'unique': 'True',
'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': (
'django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)",
'object_name': 'Title'},
'application_urls': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '200',
'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'meta_keywords': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': (
'django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': (
'django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"},
'app_label': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site',
'db_table': "'django_site'"},
'domain': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
| bsd-3-clause |
ocefpaf/conda-smithy | tests/test_variant_algebra.py | 2 | 11511 | import pytest
from textwrap import dedent
from conda_smithy.variant_algebra import parse_variant, variant_add
tv1 = parse_variant(
"""\
foo:
- 1.10
bar:
- 2
"""
)
tv2 = parse_variant(
"""\
foo:
- 1.2
bar:
- 3
"""
)
tv3 = parse_variant(
"""\
baz:
- 1
bar:
- 3
"""
)
tv4 = parse_variant(
"""\
baz:
- 1
bar:
- 0
- 6
"""
)
def test_add():
variant_add(tv1, tv2)
# %%
variant_add(tv1, tv3)
# %%
variant_add(tv2, tv3)
# %%
variant_add(tv1, variant_add(tv2, tv3))
# %%
variant_add(tv1, tv4)
# %%
variant_add(tv4, tv1)
def test_ordering():
start = parse_variant(
dedent(
"""\
c_compiler:
- toolchain
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
__migrator:
ordering:
c_compiler:
- toolchain
- gcc
c_compiler:
- gcc
"""
)
)
res = variant_add(start, mig_compiler)
assert res["c_compiler"] == ["gcc"]
print(res)
# raise Exception()
def test_no_ordering():
start = parse_variant(
dedent(
"""\
xyz:
- 1
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
__migrator:
kind:
version
migration_no:
1
xyz:
- 2
"""
)
)
res = variant_add(start, mig_compiler)
assert res["xyz"] == ["2"]
print(res)
# raise Exception()
def test_ordering_downgrade():
start = parse_variant(
dedent(
"""\
jpeg:
- 3.0
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
__migrator:
ordering:
jpeg:
- 3.0
- 2.0
jpeg:
- 2.0
"""
)
)
res = variant_add(start, mig_compiler)
assert res["jpeg"] == ["2.0"]
print(res)
def test_ordering_space():
start = parse_variant(
dedent(
"""\
python:
- 2.7
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
python:
- 2.7 *_cpython
"""
)
)
res = variant_add(start, mig_compiler)
assert res["python"] == ["2.7 *_cpython"]
print(res)
def test_new_pinned_package():
start = parse_variant(
dedent(
"""\
pin_run_as_build:
jpeg:
max_pin: x
jpeg:
- 3.0
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
pin_run_as_build:
gprc-cpp:
max_pin: x.x
gprc-cpp:
- 1.23
"""
)
)
res = variant_add(start, mig_compiler)
assert res["gprc-cpp"] == ["1.23"]
assert res["pin_run_as_build"]["gprc-cpp"]["max_pin"] == "x.x"
print(res)
def test_zip_keys():
start = parse_variant(
dedent(
"""\
zip_keys:
-
- vc
- python
-
- qt
- pyqt
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
zip_keys:
-
- python
- vc
- vc_runtime
-
- root
- c_compiler
"""
)
)
res = variant_add(start, mig_compiler)
print(res)
assert len(res["zip_keys"]) == 3
assert ["python", "vc", "vc_runtime"] in res["zip_keys"]
def test_migrate_windows_compilers():
start = parse_variant(
dedent(
"""
c_compiler:
- vs2008
- vs2015
vc:
- '9'
- '14'
zip_keys:
- - vc
- c_compiler
"""
)
)
mig = parse_variant(
dedent(
"""
c_compiler:
- vs2008
- vs2017
vc:
- '9'
- '14.1'
"""
)
)
res = variant_add(start, mig)
print(res)
assert len(res["c_compiler"]) == 2
assert res["c_compiler"] == ["vs2008", "vs2017"]
assert len(res["zip_keys"][0]) == 2
def test_pin_run_as_build():
start = parse_variant(
dedent(
"""\
pin_run_as_build:
python:
max_pin: x.x
boost-cpp:
max_pin: x
"""
)
)
mig_compiler = parse_variant(
dedent(
"""\
pin_run_as_build:
boost-cpp:
max_pin: x.x
rust:
max_pin: x
"""
)
)
res = variant_add(start, mig_compiler)
print(res)
assert len(res["pin_run_as_build"]) == 3
def test_py39_migration():
"""Test that running the python 3.9 keyadd migrator has the desired effect."""
base = parse_variant(
dedent(
"""
python:
- 3.6.* *_cpython # [not (osx and arm64)]
- 3.7.* *_cpython # [not (osx and arm64)]
- 3.8.* *_cpython
python_impl:
- cpython
zip_keys:
-
- python
- # ["linux-64"]
- cuda_compiler_version # ["linux-64"]
- docker_image # ["linux-64"]
"""
)
)
migration_pypy = parse_variant(
dedent(
"""
python:
- 3.6.* *_cpython # [not (osx and arm64)]
- 3.7.* *_cpython # [not (osx and arm64)]
- 3.8.* *_cpython
- 3.6.* *_73_pypy # [not (win64 or (osx and arm64))]
numpy:
- 1.16 # [not (osx and arm64)]
- 1.16 # [not (osx and arm64)]
- 1.16
- 1.18 # [not (win64 or (osx and arm64))]
python_impl:
- cpython # [not (osx and arm64)]
- cpython # [not (osx and arm64)]
- cpython
- pypy # [not (win64 or (osx and arm64))]
zip_keys:
-
- python
- numpy
- python_impl
"""
)
)
migration_py39 = parse_variant(
dedent(
"""
__migrator:
operation: key_add
primary_key: python
ordering:
python:
- 3.6.* *_cpython
- 3.9.* *_cpython # new entry
- 3.7.* *_cpython
- 3.8.* *_cpython
- 3.6.* *_73_pypy
python:
- 3.9.* *_cpython
# additional entries to add for zip_keys
numpy:
- 1.100
python_impl:
- cpython
"""
)
)
res = variant_add(base, migration_pypy)
res2 = variant_add(res, migration_py39)
print(res)
print(res2)
assert res2["python"] == migration_py39["__migrator"]["ordering"]["python"]
# assert that we've ordered the numpy bits properly
assert res2["numpy"] == [
"1.16",
"1.100",
"1.16",
"1.16",
"1.18",
]
res3 = variant_add(base, migration_py39)
print(res3)
assert res3["python"] == [
"3.6.* *_cpython",
"3.9.* *_cpython", # newly added
"3.7.* *_cpython",
"3.8.* *_cpython",
]
# The base doesn't have an entry for numpy
assert "numpy" not in res3
def test_variant_key_remove():
base = parse_variant(
dedent(
"""
python:
- 3.6.* *_cpython
- 3.8.* *_cpython
- 3.6.* *_73_pypy
numpy:
- 1.16
- 1.16
- 1.18
python_impl:
- cpython
- cpython
- pypy
zip_keys:
-
- python
- numpy
- python_impl
"""
)
)
removal = parse_variant(
dedent(
"""
__migrator:
operation: key_remove
primary_key: python
ordering:
python:
- 3.6.* *_73_pypy
- 3.6.* *_cpython
- 3.7.* *_cpython
- 3.8.* *_cpython
- 3.9.* *_cpython
python:
- 3.6.* *_cpython
"""
)
)
res = variant_add(base, removal)
assert res["python"] == ["3.6.* *_73_pypy", "3.8.* *_cpython"]
assert res["numpy"] == ["1.18", "1.16"]
assert res["python_impl"] == ["pypy", "cpython"]
@pytest.mark.parametrize(
"platform,arch", [["osx", "64"], ["osx", "arm64"], ["linux", "64"]]
)
def test_variant_remove_add(platform, arch):
from conda_build.config import Config
config = Config(platform=platform, arch=arch)
base = parse_variant(
dedent(
"""
python:
- 3.7.* *_cpython # [not (osx and arm64)]
- 3.8.* *_cpython
- 3.6.* *_73_pypy # [not (win64 or (osx and arm64))]
numpy:
- 1.16 # [not (osx and arm64)]
- 1.16
- 1.18 # [not (win64 or (osx and arm64))]
python_impl:
- cpython # [not (osx and arm64)]
- cpython
- pypy # [not (win64 or (osx and arm64))]
zip_keys:
-
- python
- numpy
- python_impl
"""
),
config=config,
)
remove = parse_variant(
dedent(
"""
__migrator:
operation: key_remove
primary_key: python
python:
- 3.8.* *_cpython
"""
),
config=config,
)
remove2 = parse_variant(
dedent(
"""
__migrator:
operation: key_remove
primary_key: python
python:
- 3.8.* *_cpython # [(osx and arm64)]
"""
),
config=config,
)
add = parse_variant(
dedent(
"""
__migrator:
operation: key_add
primary_key: python
python:
- 3.8.* *_cpython # [not (osx and arm64)]
numpy:
- 1.16 # [not (osx and arm64)]
python_impl:
- cpython # [not (osx and arm64)]
"""
),
config=config,
)
add_py39 = parse_variant(
dedent(
"""
__migrator:
operation: key_add
primary_key: python
python:
- 3.9.* *_cpython
# additional entries to add for zip_keys
numpy:
- 1.100
python_impl:
- cpython
"""
)
)
res = variant_add(base, remove)
res = variant_add(res, add)
res = variant_add(res, add_py39)
print(res["python"])
print(res["numpy"])
# alternatively we could just remove py38_osx-arm64 and then add py39
res2 = variant_add(base, remove2)
res2 = variant_add(res2, add_py39)
assert res2 == res
if (platform, arch) == ("osx", "arm64"):
assert res["python"] == ["3.9.* *_cpython"]
elif (platform, arch) in {("osx", "64"), ("linux", "64")}:
assert res["python"] == [
"3.6.* *_73_pypy",
"3.7.* *_cpython",
"3.8.* *_cpython",
"3.9.* *_cpython",
]
else:
raise RuntimeError("Should have a check")
| bsd-3-clause |
denys-duchier/Scolar | scolog.py | 1 | 2495 | # -*- mode: python -*-
# -*- coding: iso8859-15 -*-
##############################################################################
#
# Gestion scolarite IUT
#
# Copyright (c) 2001 - 2013 Emmanuel Viennet. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Emmanuel Viennet emmanuel.viennet@viennet.net
#
##############################################################################
import pdb,os,sys
from sco_exceptions import *
from notesdb import *
from notes_log import retreive_request
def logdb(REQUEST=None, cnx=None, method=None, etudid=None, msg=None, commit=True ):
if not cnx:
raise ValueError('logdb: cnx is None')
if not REQUEST:
REQUEST = retreive_request(skip=1)
if REQUEST:
args = { 'authenticated_user' : str(REQUEST.AUTHENTICATED_USER),
'remote_addr' : REQUEST.REMOTE_ADDR,
'remote_host' : REQUEST.REMOTE_HOST }
else:
args = { 'authenticated_user' : None,
'remote_addr' : None,
'remote_host' : None }
args.update( { 'method' : method, 'etudid' : etudid, 'msg' : msg })
quote_dict(args)
cursor = cnx.cursor(cursor_factory=ScoDocCursor)
cursor.execute('insert into scolog (authenticated_user,remote_addr,remote_host,method,etudid,msg) values (%(authenticated_user)s,%(remote_addr)s,%(remote_host)s,%(method)s,%(etudid)s,%(msg)s)', args )
if commit:
cnx.commit()
def loglist(cnx, method=None, authenticated_user=None):
"""List of events logged for these method and user
"""
cursor = cnx.cursor(cursor_factory=ScoDocCursor)
cursor.execute('select * from scolog where method=%(method)s and authenticated_user=%(authenticated_user)s', { 'method' : method, 'authenticated_user' : authenticated_user})
return cursor.dictfetchall()
| gpl-2.0 |
gunan/tensorflow | tensorflow/python/ops/nccl_ops_test.py | 4 | 7068 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nccl ops. See also the cc test for nccl_communicator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
import numpy as np
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import nccl_ops
from tensorflow.python.platform import test
def _DeviceTensors(tensors, devices):
res = []
for t, d in zip(tensors, devices):
with ops.device(d):
res.append(array_ops.identity(t))
return res
def _NcclAllReduce(nccl_fun, tensors, devices):
return nccl_fun(_DeviceTensors(tensors, devices))
def _NcclReduce(nccl_fun, tensors, devices):
receiver = np.random.randint(0, len(devices))
with ops.device(devices[receiver]):
return [nccl_fun(_DeviceTensors(tensors, devices))]
def _NcclBroadcast(tensors, devices):
sender = np.random.randint(0, len(devices))
with ops.device(devices[sender]):
tensor = array_ops.identity(tensors[0])
broadcast = nccl_ops.broadcast(tensor)
return _DeviceTensors([broadcast] * len(devices), devices)
class NcclTestCase(test.TestCase):
def _Test(self,
nccl_reduce,
numpy_fn,
device_sets=(['/device:GPU:1', '/device:GPU:2', '/device:GPU:0'],
['/device:GPU:1', '/device:GPU:0'])):
"""Tests that nccl_reduce does the same as reduction with numpy_fn.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the reduction of the
two.
device_sets: Tuple of virtual devices to run test on.
"""
for dtype in [np.float16, np.float32, np.int32, np.int64, np.float64]:
# Create session inside outer loop to test use of
# same communicator across multiple sessions.
with self.test_session(use_gpu=True) as sess:
for devices in device_sets:
shape = (3, 4)
random = (np.random.random_sample(shape) - .5) * 1024
tensors = []
for _ in devices:
tensors.append(random.astype(dtype))
np_ans = tensors[0]
for t in tensors[1:]:
np_ans = numpy_fn(np_ans, t)
reduce_tensors = nccl_reduce(tensors, devices)
self.assertNotEmpty(reduce_tensors)
# Test shape inference.
for r in reduce_tensors:
self.assertEqual(shape, r.get_shape())
result_tensors = [array_ops.identity(t) for t in reduce_tensors]
# Check GPU availability *after* creating session, see b/68975239.
if not test.is_gpu_available():
# If no GPU is available, only test graph construction.
continue
# Test execution and results.
for t in self.evaluate(result_tensors):
self.assertAllClose(t, np_ans)
def _TestGradient(self, nccl_reduce, numpy_fn):
"""Tests the gradient of nccl_reduce.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the gradient of the
reduction of the two.
"""
def _Gradient(tensors, devices):
inputs = [array_ops.placeholder(t.dtype, t.shape) for t in tensors]
reduce_tensors = nccl_reduce(inputs, devices)
losses = _DeviceTensors(tensors, [t.device for t in reduce_tensors])
grads = gradients.gradients(
reduce_tensors, inputs, losses, colocate_gradients_with_ops=True)
return [g for g in grads if g is not None]
self._Test(_Gradient, numpy_fn)
class AllReduceTest(NcclTestCase):
def testAllReduce(self):
self._Test(partial(_NcclAllReduce, nccl_ops.all_sum), lambda x, y: x + y)
self._Test(partial(_NcclAllReduce, nccl_ops.all_prod), lambda x, y: x * y)
self._Test(partial(_NcclAllReduce, nccl_ops.all_min), np.minimum)
self._Test(partial(_NcclAllReduce, nccl_ops.all_max), np.maximum)
def testAllSumGrad(self):
self._TestGradient(
partial(_NcclAllReduce, nccl_ops.all_sum), lambda x, y: x + y)
def testErrors(self):
with self.assertRaisesRegexp(ValueError, 'Device assignment required'):
nccl_ops.all_sum([array_ops.identity(np.random.random_sample((3, 4)))])
with self.assertRaisesRegexp(ValueError, 'Must pass >0 tensors'):
nccl_ops.all_sum([])
class SingleReduceTest(NcclTestCase):
def testSum(self):
self._Test(partial(_NcclReduce, nccl_ops.reduce_sum), lambda x, y: x + y)
def testSumGrad(self):
self._TestGradient(partial(_NcclReduce, nccl_ops.reduce_sum),
lambda x, y: x)
class BroadcastTest(NcclTestCase):
def testBroadcast(self):
self._Test(_NcclBroadcast, lambda x, y: x)
def testBroadcastSingleDevice(self):
# Broadcasts on a single device are removed completely during rewrite.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:GPU:0'],))
def testBroadcastToCpuError(self):
try:
# Broadcasts to CPU is not supported.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:CPU:0'],))
except errors.NotFoundError as e:
self.assertRegexpMatches(
str(e), "No registered '_NcclBroadcastRecv' OpKernel for CPU devices")
else:
# Session isn't executed when no GPU is available.
if test.is_gpu_available():
self.fail("Didn't raise NotFoundError trying to broadcast to CPU")
class CombinedTest(NcclTestCase):
"""Test all-reduce vs. single-reduce plus broadcast in one session.run."""
def _Combined(self, tensors, devices):
all_reduce_tensors = _NcclAllReduce(nccl_ops.all_sum, tensors, devices)
single_reduce_tensors = _NcclReduce(nccl_ops.reduce_sum, tensors, devices)
broadcast_tensors = _NcclBroadcast(single_reduce_tensors, devices)
return all_reduce_tensors + broadcast_tensors
def testCombined(self):
self._Test(self._Combined, lambda x, y: x + y)
if __name__ == '__main__':
test.main()
| apache-2.0 |
ifarup/tc1-82 | webapi/description.py | 2 | 2822 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
description: Generate html description strings for the tc1_97 package.
Copyright (C) 2019 Ivar Farup
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import tc1_97.description as tc
from webapi.utils import list_to_ndarray
def xyz(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.XYZ(data, heading, options, include_head)
def xy(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.xyz(data, heading, options, include_head)
def lms(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.LMS(data, heading, options, include_head)
def lms_base(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.LMS_base(data, heading, options, include_head)
def bm(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.lms_mb(data, heading, options, include_head)
def lm(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.lms_mw(data, heading, options, include_head)
def xyz31(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.XYZ31(data, heading, options, include_head)
def xyz64(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.XYZ64(data, heading, options, include_head)
def xy31(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.xyz31(data, heading, options, include_head)
def xy64(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.xyz64(data, heading, options, include_head)
def xyz_purples(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.XYZ_purples(data, heading, options, include_head)
def xy_purples(data, heading, options, include_head=False):
data = data.copy()
list_to_ndarray(data)
return tc.xyz_purples(data, heading, options, include_head)
| gpl-3.0 |
alxgu/ansible | lib/ansible/module_utils/network/nso/nso.py | 37 | 30015 | # -*- coding: utf-8 -*-
# Copyright: (c) 2017, Cisco and/or its affiliates.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.urls import open_url
from ansible.module_utils._text import to_text
import json
import re
import socket
try:
unicode
HAVE_UNICODE = True
except NameError:
unicode = str
HAVE_UNICODE = False
nso_argument_spec = dict(
url=dict(type='str', required=True),
username=dict(type='str', required=True, fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
password=dict(type='str', required=True, no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD'])),
timeout=dict(type='int', default=300),
validate_certs=dict(type='bool', default=False)
)
class State(object):
SET = 'set'
PRESENT = 'present'
ABSENT = 'absent'
CHECK_SYNC = 'check-sync'
DEEP_CHECK_SYNC = 'deep-check-sync'
IN_SYNC = 'in-sync'
DEEP_IN_SYNC = 'deep-in-sync'
SYNC_STATES = ('check-sync', 'deep-check-sync', 'in-sync', 'deep-in-sync')
class ModuleFailException(Exception):
def __init__(self, message):
super(ModuleFailException, self).__init__(message)
self.message = message
class NsoException(Exception):
def __init__(self, message, error):
super(NsoException, self).__init__(message)
self.message = message
self.error = error
class JsonRpc(object):
def __init__(self, url, timeout, validate_certs):
self._url = url
self._timeout = timeout
self._validate_certs = validate_certs
self._id = 0
self._trans = {}
self._headers = {'Content-Type': 'application/json'}
self._conn = None
self._system_settings = {}
def login(self, user, passwd):
payload = {
'method': 'login',
'params': {'user': user, 'passwd': passwd}
}
resp, resp_json = self._call(payload)
self._headers['Cookie'] = resp.headers['set-cookie']
def logout(self):
payload = {'method': 'logout', 'params': {}}
self._call(payload)
def get_system_setting(self, setting):
if setting not in self._system_settings:
payload = {'method': 'get_system_setting', 'params': {'operation': setting}}
resp, resp_json = self._call(payload)
self._system_settings[setting] = resp_json['result']
return self._system_settings[setting]
def new_trans(self, **kwargs):
payload = {'method': 'new_trans', 'params': kwargs}
resp, resp_json = self._call(payload)
return resp_json['result']['th']
def get_trans(self, mode):
if mode not in self._trans:
th = self.new_trans(mode=mode)
self._trans[mode] = th
return self._trans[mode]
def delete_trans(self, th):
payload = {'method': 'delete_trans', 'params': {'th': th}}
resp, resp_json = self._call(payload)
self._maybe_delete_trans(th)
def validate_trans(self, th):
payload = {'method': 'validate_trans', 'params': {'th': th}}
resp, resp_json = self._write_call(payload)
return resp_json['result']
def get_trans_changes(self, th):
payload = {'method': 'get_trans_changes', 'params': {'th': th}}
resp, resp_json = self._write_call(payload)
return resp_json['result']['changes']
def validate_commit(self, th):
payload = {'method': 'validate_commit', 'params': {'th': th}}
resp, resp_json = self._write_call(payload)
return resp_json['result'].get('warnings', [])
def commit(self, th):
payload = {'method': 'commit', 'params': {'th': th}}
resp, resp_json = self._write_call(payload)
if len(resp_json['result']) == 0:
self._maybe_delete_trans(th)
return resp_json['result']
def get_schema(self, **kwargs):
payload = {'method': 'get_schema', 'params': kwargs}
resp, resp_json = self._maybe_write_call(payload)
return resp_json['result']
def get_module_prefix_map(self, path=None):
if path is None:
payload = {'method': 'get_module_prefix_map', 'params': {}}
resp, resp_json = self._call(payload)
else:
payload = {'method': 'get_module_prefix_map', 'params': {'path': path}}
resp, resp_json = self._maybe_write_call(payload)
return resp_json['result']
def get_value(self, path):
payload = {
'method': 'get_value',
'params': {'path': path}
}
resp, resp_json = self._read_call(payload)
return resp_json['result']
def exists(self, path):
payload = {'method': 'exists', 'params': {'path': path}}
try:
resp, resp_json = self._read_call(payload)
return resp_json['result']['exists']
except NsoException as ex:
# calling exists on a sub-list when the parent list does
# not exists will cause data.not_found errors on recent
# NSO
if 'type' in ex.error and ex.error['type'] == 'data.not_found':
return False
raise
def create(self, th, path):
payload = {'method': 'create', 'params': {'th': th, 'path': path}}
self._write_call(payload)
def delete(self, th, path):
payload = {'method': 'delete', 'params': {'th': th, 'path': path}}
self._write_call(payload)
def set_value(self, th, path, value):
payload = {
'method': 'set_value',
'params': {'th': th, 'path': path, 'value': value}
}
resp, resp_json = self._write_call(payload)
return resp_json['result']
def show_config(self, path, operational=False):
payload = {
'method': 'show_config',
'params': {
'path': path,
'result_as': 'json',
'with_oper': operational}
}
resp, resp_json = self._read_call(payload)
return resp_json['result']
def query(self, xpath, fields):
payload = {
'method': 'query',
'params': {
'xpath_expr': xpath,
'selection': fields
}
}
resp, resp_json = self._read_call(payload)
return resp_json['result']['results']
def run_action(self, th, path, params=None):
if params is None:
params = {}
if is_version(self, [(4, 5), (4, 4, 3)]):
result_format = 'json'
else:
result_format = 'normal'
payload = {
'method': 'run_action',
'params': {
'format': result_format,
'path': path,
'params': params
}
}
if th is None:
resp, resp_json = self._read_call(payload)
else:
payload['params']['th'] = th
resp, resp_json = self._call(payload)
if result_format == 'normal':
# this only works for one-level results, list entries,
# containers etc will have / in their name.
result = {}
for info in resp_json['result']:
result[info['name']] = info['value']
else:
result = resp_json['result']
return result
def _call(self, payload):
self._id += 1
if 'id' not in payload:
payload['id'] = self._id
if 'jsonrpc' not in payload:
payload['jsonrpc'] = '2.0'
data = json.dumps(payload)
try:
resp = open_url(
self._url, timeout=self._timeout,
method='POST', data=data, headers=self._headers,
validate_certs=self._validate_certs)
if resp.code != 200:
raise NsoException(
'NSO returned HTTP code {0}, expected 200'.format(resp.status), {})
except socket.timeout:
raise NsoException('request timed out against NSO at {0}'.format(self._url), {})
resp_body = resp.read()
resp_json = json.loads(resp_body)
if 'error' in resp_json:
self._handle_call_error(payload, resp_json)
return resp, resp_json
def _handle_call_error(self, payload, resp_json):
method = payload['method']
error = resp_json['error']
error_type = error['type'][len('rpc.method.'):]
if error_type in ('unexpected_params',
'unknown_params_value',
'invalid_params',
'invalid_params_type',
'data_not_found'):
key = error['data']['param']
error_type_s = error_type.replace('_', ' ')
if key == 'path':
msg = 'NSO {0} {1}. path = {2}'.format(
method, error_type_s, payload['params']['path'])
else:
path = payload['params'].get('path', 'unknown')
msg = 'NSO {0} {1}. path = {2}. {3} = {4}'.format(
method, error_type_s, path, key, payload['params'][key])
else:
msg = 'NSO {0} returned JSON-RPC error: {1}'.format(method, error)
raise NsoException(msg, error)
def _read_call(self, payload):
if 'th' not in payload['params']:
payload['params']['th'] = self.get_trans(mode='read')
return self._call(payload)
def _write_call(self, payload):
if 'th' not in payload['params']:
payload['params']['th'] = self.get_trans(mode='read_write')
return self._call(payload)
def _maybe_write_call(self, payload):
if 'read_write' in self._trans:
return self._write_call(payload)
else:
return self._read_call(payload)
def _maybe_delete_trans(self, th):
for mode in ('read', 'read_write'):
if th == self._trans.get(mode, None):
del self._trans[mode]
class ValueBuilder(object):
PATH_RE = re.compile('{[^}]*}')
PATH_RE_50 = re.compile('{[^}]*}$')
class Value(object):
__slots__ = ['path', 'tag_path', 'state', 'value', 'deps']
def __init__(self, path, state, value, deps):
self.path = path
self.tag_path = ValueBuilder.PATH_RE.sub('', path)
self.state = state
self.value = value
self.deps = deps
# nodes can depend on themselves
if self.tag_path in self.deps:
self.deps.remove(self.tag_path)
def __lt__(self, rhs):
l_len = len(self.path.split('/'))
r_len = len(rhs.path.split('/'))
if l_len == r_len:
return self.path.__lt__(rhs.path)
return l_len < r_len
def __str__(self):
return 'Value<path={0}, state={1}, value={2}>'.format(
self.path, self.state, self.value)
class ValueIterator(object):
def __init__(self, client, values, delayed_values):
self._client = client
self._values = values
self._delayed_values = delayed_values
self._pos = 0
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
if self._pos >= len(self._values):
if len(self._delayed_values) == 0:
raise StopIteration()
builder = ValueBuilder(self._client, delay=False)
for (parent, maybe_qname, value) in self._delayed_values:
builder.build(parent, maybe_qname, value)
del self._delayed_values[:]
self._values.extend(builder.values)
return self.next()
value = self._values[self._pos]
self._pos += 1
return value
def __init__(self, client, mode='config', delay=None):
self._client = client
self._mode = mode
self._schema_cache = {}
self._module_prefix_map_cache = {}
self._values = []
self._values_dirty = False
self._delay = delay is None and mode == 'config' and is_version(self._client, [(5, 0)])
self._delayed_values = []
def build(self, parent, maybe_qname, value, schema=None):
qname, name = self.get_prefix_name(parent, maybe_qname)
if name is None:
path = parent
else:
path = '{0}/{1}'.format(parent, qname)
if schema is None:
schema = self._get_schema(path)
if self._delay and schema.get('is_mount_point', False):
# delay conversion of mounted values, required to get
# shema information on 5.0 and later.
self._delayed_values.append((parent, maybe_qname, value))
elif self._is_leaf_list(schema) and is_version(self._client, [(4, 5)]):
self._build_leaf_list(path, schema, value)
elif self._is_leaf(schema):
deps = schema.get('deps', [])
if self._is_empty_leaf(schema):
exists = self._client.exists(path)
if exists and value != [None]:
self._add_value(path, State.ABSENT, None, deps)
elif not exists and value == [None]:
self._add_value(path, State.PRESENT, None, deps)
else:
if maybe_qname is None:
value_type = self.get_type(path)
else:
value_type = self._get_child_type(parent, qname)
if 'identityref' in value_type:
if isinstance(value, list):
value = [ll_v for ll_v, t_ll_v
in [self.get_prefix_name(parent, v) for v in value]]
else:
value, t_value = self.get_prefix_name(parent, value)
self._add_value(path, State.SET, value, deps)
elif isinstance(value, dict):
self._build_dict(path, schema, value)
elif isinstance(value, list):
self._build_list(path, schema, value)
else:
raise ModuleFailException(
'unsupported schema {0} at {1}'.format(
schema['kind'], path))
@property
def values(self):
if self._values_dirty:
self._values = ValueBuilder.sort_values(self._values)
self._values_dirty = False
return ValueBuilder.ValueIterator(self._client, self._values, self._delayed_values)
@staticmethod
def sort_values(values):
class N(object):
def __init__(self, v):
self.tmp_mark = False
self.mark = False
self.v = v
sorted_values = []
nodes = [N(v) for v in sorted(values)]
def get_node(tag_path):
return next((m for m in nodes
if m.v.tag_path == tag_path), None)
def is_cycle(n, dep, visited):
visited.add(n.v.tag_path)
if dep in visited:
return True
dep_n = get_node(dep)
if dep_n is not None:
for sub_dep in dep_n.v.deps:
if is_cycle(dep_n, sub_dep, visited):
return True
return False
# check for dependency cycles, remove if detected. sort will
# not be 100% but allows for a best-effort to work around
# issue in NSO.
for n in nodes:
for dep in n.v.deps:
if is_cycle(n, dep, set()):
n.v.deps.remove(dep)
def visit(n):
if n.tmp_mark:
return False
if not n.mark:
n.tmp_mark = True
for m in nodes:
if m.v.tag_path in n.v.deps:
if not visit(m):
return False
n.tmp_mark = False
n.mark = True
sorted_values.insert(0, n.v)
return True
n = next((n for n in nodes if not n.mark), None)
while n is not None:
visit(n)
n = next((n for n in nodes if not n.mark), None)
return sorted_values[::-1]
def _build_dict(self, path, schema, value):
keys = schema.get('key', [])
for dict_key, dict_value in value.items():
qname, name = self.get_prefix_name(path, dict_key)
if dict_key in ('__state', ) or name in keys:
continue
child_schema = self._find_child(path, schema, qname)
self.build(path, dict_key, dict_value, child_schema)
def _build_leaf_list(self, path, schema, value):
deps = schema.get('deps', [])
entry_type = self.get_type(path, schema)
if self._mode == 'verify':
for entry in value:
if 'identityref' in entry_type:
entry, t_entry = self.get_prefix_name(path, entry)
entry_path = '{0}{{{1}}}'.format(path, entry)
if not self._client.exists(entry_path):
self._add_value(entry_path, State.ABSENT, None, deps)
else:
# remove leaf list if treated as a list and then re-create the
# expected list entries.
self._add_value(path, State.ABSENT, None, deps)
for entry in value:
if 'identityref' in entry_type:
entry, t_entry = self.get_prefix_name(path, entry)
entry_path = '{0}{{{1}}}'.format(path, entry)
self._add_value(entry_path, State.PRESENT, None, deps)
def _build_list(self, path, schema, value):
deps = schema.get('deps', [])
for entry in value:
entry_key = self._build_key(path, entry, schema['key'])
entry_path = '{0}{{{1}}}'.format(path, entry_key)
entry_state = entry.get('__state', 'present')
entry_exists = self._client.exists(entry_path)
if entry_state == 'absent':
if entry_exists:
self._add_value(entry_path, State.ABSENT, None, deps)
else:
if not entry_exists:
self._add_value(entry_path, State.PRESENT, None, deps)
if entry_state in State.SYNC_STATES:
self._add_value(entry_path, entry_state, None, deps)
self.build(entry_path, None, entry)
def _build_key(self, path, entry, schema_keys):
key_parts = []
for key in schema_keys:
value = entry.get(key, None)
if value is None:
raise ModuleFailException(
'required leaf {0} in {1} not set in data'.format(
key, path))
value_type = self._get_child_type(path, key)
if 'identityref' in value_type:
value, t_value = self.get_prefix_name(path, value)
key_parts.append(self._quote_key(value))
return ' '.join(key_parts)
def _quote_key(self, key):
if isinstance(key, bool):
return key and 'true' or 'false'
q_key = []
for c in str(key):
if c in ('{', '}', "'", '\\'):
q_key.append('\\')
q_key.append(c)
q_key = ''.join(q_key)
if ' ' in q_key:
return '"{0}"'.format(q_key)
return q_key
def _find_child(self, path, schema, qname):
if 'children' not in schema:
schema = self._get_schema(path)
# look for the qualified name if : is in the name
child_schema = self._get_child(schema, qname)
if child_schema is not None:
return child_schema
# no child was found, look for a choice with a child matching
for child_schema in schema['children']:
if child_schema['kind'] != 'choice':
continue
choice_child_schema = self._get_choice_child(child_schema, qname)
if choice_child_schema is not None:
return choice_child_schema
raise ModuleFailException(
'no child in {0} with name {1}. children {2}'.format(
path, qname, ','.join((c.get('qname', c.get('name', None)) for c in schema['children']))))
def _add_value(self, path, state, value, deps):
self._values.append(ValueBuilder.Value(path, state, value, deps))
self._values_dirty = True
def get_prefix_name(self, path, qname):
if not isinstance(qname, (str, unicode)):
return qname, None
if ':' not in qname:
return qname, qname
module_prefix_map = self._get_module_prefix_map(path)
module, name = qname.split(':', 1)
if module not in module_prefix_map:
raise ModuleFailException(
'no module mapping for module {0}. loaded modules {1}'.format(
module, ','.join(sorted(module_prefix_map.keys()))))
return '{0}:{1}'.format(module_prefix_map[module], name), name
def _get_schema(self, path):
return self._ensure_schema_cached(path)['data']
def _get_child_type(self, parent_path, key):
all_schema = self._ensure_schema_cached(parent_path)
parent_schema = all_schema['data']
meta = all_schema['meta']
schema = self._find_child(parent_path, parent_schema, key)
return self.get_type(parent_path, schema, meta)
def get_type(self, path, schema=None, meta=None):
if schema is None or meta is None:
all_schema = self._ensure_schema_cached(path)
schema = all_schema['data']
meta = all_schema['meta']
if self._is_leaf(schema):
def get_type(meta, curr_type):
if curr_type.get('primitive', False):
return [curr_type['name']]
if 'namespace' in curr_type:
curr_type_key = '{0}:{1}'.format(
curr_type['namespace'], curr_type['name'])
type_info = meta['types'][curr_type_key][-1]
return get_type(meta, type_info)
if 'leaf_type' in curr_type:
return get_type(meta, curr_type['leaf_type'][-1])
if 'union' in curr_type:
union_types = []
for union_type in curr_type['union']:
union_types.extend(get_type(meta, union_type[-1]))
return union_types
return [curr_type.get('name', 'unknown')]
return get_type(meta, schema['type'])
return None
def _ensure_schema_cached(self, path):
if not self._delay and is_version(self._client, [(5, 0)]):
# newer versions of NSO support multiple different schemas
# for different devices, thus the device is required to
# look up the schema. Remove the key entry to get schema
# logic working ok.
path = ValueBuilder.PATH_RE_50.sub('', path)
else:
path = ValueBuilder.PATH_RE.sub('', path)
if path not in self._schema_cache:
schema = self._client.get_schema(path=path, levels=1)
self._schema_cache[path] = schema
return self._schema_cache[path]
def _get_module_prefix_map(self, path):
# newer versions of NSO support multiple mappings from module
# to prefix depending on which device is used.
if path != '' and is_version(self._client, [(5, 0)]):
if path not in self._module_prefix_map_cache:
self._module_prefix_map_cache[path] = self._client.get_module_prefix_map(path)
return self._module_prefix_map_cache[path]
if '' not in self._module_prefix_map_cache:
self._module_prefix_map_cache[''] = self._client.get_module_prefix_map()
return self._module_prefix_map_cache['']
def _get_child(self, schema, qname):
# no child specified, return parent
if qname is None:
return schema
name_key = ':' in qname and 'qname' or 'name'
return next((c for c in schema['children']
if c.get(name_key, None) == qname), None)
def _get_choice_child(self, schema, qname):
name_key = ':' in qname and 'qname' or 'name'
for child_case in schema['cases']:
# look for direct child
choice_child_schema = next(
(c for c in child_case['children']
if c.get(name_key, None) == qname), None)
if choice_child_schema is not None:
return choice_child_schema
# look for nested choice
for child_schema in child_case['children']:
if child_schema['kind'] != 'choice':
continue
choice_child_schema = self._get_choice_child(child_schema, qname)
if choice_child_schema is not None:
return choice_child_schema
return None
def _is_leaf_list(self, schema):
return schema.get('kind', None) == 'leaf-list'
def _is_leaf(self, schema):
# still checking for leaf-list here to be compatible with pre
# 4.5 versions of NSO.
return schema.get('kind', None) in ('key', 'leaf', 'leaf-list')
def _is_empty_leaf(self, schema):
return (schema.get('kind', None) == 'leaf' and
schema['type'].get('primitive', False) and
schema['type'].get('name', '') == 'empty')
def connect(params):
client = JsonRpc(params['url'],
params['timeout'],
params['validate_certs'])
client.login(params['username'], params['password'])
return client
def verify_version(client, required_versions):
version_str = client.get_system_setting('version')
if not verify_version_str(version_str, required_versions):
supported_versions = ', '.join(
['.'.join([str(p) for p in required_version])
for required_version in required_versions])
raise ModuleFailException(
'unsupported NSO version {0}. {1} or later supported'.format(
version_str, supported_versions))
def is_version(client, required_versions):
version_str = client.get_system_setting('version')
return verify_version_str(version_str, required_versions)
def verify_version_str(version_str, required_versions):
version_str = re.sub('_.*', '', version_str)
version = [int(p) for p in version_str.split('.')]
if len(version) < 2:
raise ModuleFailException(
'unsupported NSO version format {0}'.format(version_str))
def check_version(required_version, version):
for pos in range(len(required_version)):
if pos >= len(version):
return False
if version[pos] > required_version[pos]:
return True
if version[pos] < required_version[pos]:
return False
return True
for required_version in required_versions:
if check_version(required_version, version):
return True
return False
def normalize_value(expected_value, value, key):
if value is None:
return None
if (isinstance(expected_value, bool) and
isinstance(value, (str, unicode))):
return value == 'true'
if isinstance(expected_value, int):
try:
return int(value)
except TypeError:
raise ModuleFailException(
'returned value {0} for {1} is not a valid integer'.format(
key, value))
if isinstance(expected_value, float):
try:
return float(value)
except TypeError:
raise ModuleFailException(
'returned value {0} for {1} is not a valid float'.format(
key, value))
if isinstance(expected_value, (list, tuple)):
if not isinstance(value, (list, tuple)):
raise ModuleFailException(
'returned value {0} for {1} is not a list'.format(value, key))
if len(expected_value) != len(value):
raise ModuleFailException(
'list length mismatch for {0}'.format(key))
normalized_value = []
for i in range(len(expected_value)):
normalized_value.append(
normalize_value(expected_value[i], value[i], '{0}[{1}]'.format(key, i)))
return normalized_value
if isinstance(expected_value, dict):
if not isinstance(value, dict):
raise ModuleFailException(
'returned value {0} for {1} is not a dict'.format(value, key))
if len(expected_value) != len(value):
raise ModuleFailException(
'dict length mismatch for {0}'.format(key))
normalized_value = {}
for k in expected_value.keys():
n_k = normalize_value(k, k, '{0}[{1}]'.format(key, k))
if n_k not in value:
raise ModuleFailException('missing {0} in value'.format(n_k))
normalized_value[n_k] = normalize_value(expected_value[k], value[k], '{0}[{1}]'.format(key, k))
return normalized_value
if HAVE_UNICODE:
if isinstance(expected_value, unicode) and isinstance(value, str):
return value.decode('utf-8')
if isinstance(expected_value, str) and isinstance(value, unicode):
return value.encode('utf-8')
else:
if hasattr(expected_value, 'encode') and hasattr(value, 'decode'):
return value.decode('utf-8')
if hasattr(expected_value, 'decode') and hasattr(value, 'encode'):
return value.encode('utf-8')
return value
| gpl-3.0 |
pyjs/pyjs | examples/kitchensink/sink/Trees.py | 6 | 4694 | from pyjamas.ui.Sink import Sink, SinkInfo
from pyjamas.ui.Tree import Tree
from pyjamas.ui.TreeItem import TreeItem
class Trees(Sink):
def __init__(self):
Sink.__init__(self)
self.fProto = [
Proto("Beethoven", [
Proto("Concertos", [
Proto("No. 1 - C"),
Proto("No. 2 - B-Flat Major"),
Proto("No. 3 - C Minor"),
Proto("No. 4 - G Major"),
Proto("No. 5 - E-Flat Major")
]),
Proto("Quartets", [
Proto("Six String Quartets"),
Proto("Three String Quartets"),
Proto("Grosse Fugue for String Quartets")
]),
Proto("Sonatas", [
Proto("Sonata in A Minor"),
Proto("Sonata in F Major")
]),
Proto("Symphonies", [
Proto("No. 1 - C Major"),
Proto("No. 2 - D Major"),
Proto("No. 3 - E-Flat Major"),
Proto("No. 4 - B-Flat Major"),
Proto("No. 5 - C Minor"),
Proto("No. 6 - F Major"),
Proto("No. 7 - A Major"),
Proto("No. 8 - F Major"),
Proto("No. 9 - D Minor")
])
]),
Proto("Brahms", [
Proto("Concertos", [
Proto("Violin Concerto"),
Proto("Double Concerto - A Minor"),
Proto("Piano Concerto No. 1 - D Minor"),
Proto("Piano Concerto No. 2 - B-Flat Major")
]),
Proto("Quartets", [
Proto("Piano Quartet No. 1 - G Minor"),
Proto("Piano Quartet No. 2 - A Major"),
Proto("Piano Quartet No. 3 - C Minor"),
Proto("String Quartet No. 3 - B-Flat Minor")
]),
Proto("Sonatas", [
Proto("Two Sonatas for Clarinet - F Minor"),
Proto("Two Sonatas for Clarinet - E-Flat Major")
]),
Proto("Symphonies", [
Proto("No. 1 - C Minor"),
Proto("No. 2 - D Minor"),
Proto("No. 3 - F Major"),
Proto("No. 4 - E Minor")
])
]),
Proto("Mozart", [
Proto("Concertos", [
Proto("Piano Concerto No. 12"),
Proto("Piano Concerto No. 17"),
Proto("Clarinet Concerto"),
Proto("Violin Concerto No. 5"),
Proto("Violin Concerto No. 4")
]),
])
]
self.fTree = Tree()
for i in range(len(self.fProto)):
self.createItem(self.fProto[i])
self.fTree.addItem(self.fProto[i].item)
self.fTree.addTreeListener(self)
self.initWidget(self.fTree)
def onTreeItemSelected(self, item):
pass
def onTreeItemStateChanged(self, item):
child = item.getChild(0)
if hasattr(child, "isPendingItem"):
item.removeItem(child)
proto = item.getUserObject()
for i in range(len(proto.children)):
self.createItem(proto.children[i])
index = self.getSortIndex(item, proto.children[i].text)
# demonstrate insertItem. addItem is easy.
item.insertItem(proto.children[i].item, index)
def getSortIndex(self, parent, text):
nodes = parent.getChildCount()
node = 0
text = text.lower()
while node < nodes:
item = parent.getChild(node)
if cmp(text, item.getText().lower()) < 0:
break;
else:
node += 1
return node
def createItem(self, proto):
proto.item = TreeItem(proto.text)
proto.item.setUserObject(proto)
if len(proto.children) > 0:
proto.item.addItem(PendingItem())
class Proto:
def __init__(self, text, children=None):
self.children = []
self.item = None
self.text = text
if children is not None:
self.children = children
class PendingItem(TreeItem):
def __init__(self):
TreeItem.__init__(self, "Please wait...")
def isPendingItem(self):
return True
def init():
text="GWT has a built-in <code>Tree</code> widget. The tree is focusable and has keyboard support as well."
return SinkInfo("Trees", text, Trees)
| apache-2.0 |
smarbos/adopteitor-client | node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py | 1509 | 17165 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
| mit |
yongshengwang/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/xlwt/Workbook.py | 57 | 20514 | # -*- coding: windows-1252 -*-
'''
Record Order in BIFF8
Workbook Globals Substream
BOF Type = workbook globals
Interface Header
MMS
Interface End
WRITEACCESS
CODEPAGE
DSF
TABID
FNGROUPCOUNT
Workbook Protection Block
WINDOWPROTECT
PROTECT
PASSWORD
PROT4REV
PROT4REVPASS
BACKUP
HIDEOBJ
WINDOW1
DATEMODE
PRECISION
REFRESHALL
BOOKBOOL
FONT +
FORMAT *
XF +
STYLE +
? PALETTE
USESELFS
BOUNDSHEET +
COUNTRY
? Link Table
SST
ExtSST
EOF
'''
import BIFFRecords
import Style
class Workbook(object):
#################################################################
## Constructor
#################################################################
def __init__(self, encoding='ascii', style_compression=0):
self.encoding = encoding
self.__owner = 'None'
self.__country_code = None # 0x07 is Russia :-)
self.__wnd_protect = 0
self.__obj_protect = 0
self.__protect = 0
self.__backup_on_save = 0
# for WINDOW1 record
self.__hpos_twips = 0x01E0
self.__vpos_twips = 0x005A
self.__width_twips = 0x3FCF
self.__height_twips = 0x2A4E
self.__active_sheet = 0
self.__first_tab_index = 0
self.__selected_tabs = 0x01
self.__tab_width_twips = 0x0258
self.__wnd_hidden = 0
self.__wnd_mini = 0
self.__hscroll_visible = 1
self.__vscroll_visible = 1
self.__tabs_visible = 1
self.__styles = Style.StyleCollection(style_compression)
self.__dates_1904 = 0
self.__use_cell_values = 1
self.__sst = BIFFRecords.SharedStringTable(self.encoding)
self.__worksheets = []
self.__worksheet_idx_from_name = {}
self.__sheet_refs = {}
self._supbook_xref = {}
self._xcall_xref = {}
self._ownbook_supbookx = None
self._ownbook_supbook_ref = None
self._xcall_supbookx = None
self._xcall_supbook_ref = None
#################################################################
## Properties, "getters", "setters"
#################################################################
def get_style_stats(self):
return self.__styles.stats[:]
def set_owner(self, value):
self.__owner = value
def get_owner(self):
return self.__owner
owner = property(get_owner, set_owner)
#################################################################
def set_country_code(self, value):
self.__country_code = value
def get_country_code(self):
return self.__country_code
country_code = property(get_country_code, set_country_code)
#################################################################
def set_wnd_protect(self, value):
self.__wnd_protect = int(value)
def get_wnd_protect(self):
return bool(self.__wnd_protect)
wnd_protect = property(get_wnd_protect, set_wnd_protect)
#################################################################
def set_obj_protect(self, value):
self.__obj_protect = int(value)
def get_obj_protect(self):
return bool(self.__obj_protect)
obj_protect = property(get_obj_protect, set_obj_protect)
#################################################################
def set_protect(self, value):
self.__protect = int(value)
def get_protect(self):
return bool(self.__protect)
protect = property(get_protect, set_protect)
#################################################################
def set_backup_on_save(self, value):
self.__backup_on_save = int(value)
def get_backup_on_save(self):
return bool(self.__backup_on_save)
backup_on_save = property(get_backup_on_save, set_backup_on_save)
#################################################################
def set_hpos(self, value):
self.__hpos_twips = value & 0xFFFF
def get_hpos(self):
return self.__hpos_twips
hpos = property(get_hpos, set_hpos)
#################################################################
def set_vpos(self, value):
self.__vpos_twips = value & 0xFFFF
def get_vpos(self):
return self.__vpos_twips
vpos = property(get_vpos, set_vpos)
#################################################################
def set_width(self, value):
self.__width_twips = value & 0xFFFF
def get_width(self):
return self.__width_twips
width = property(get_width, set_width)
#################################################################
def set_height(self, value):
self.__height_twips = value & 0xFFFF
def get_height(self):
return self.__height_twips
height = property(get_height, set_height)
#################################################################
def set_active_sheet(self, value):
self.__active_sheet = value & 0xFFFF
self.__first_tab_index = self.__active_sheet
def get_active_sheet(self):
return self.__active_sheet
active_sheet = property(get_active_sheet, set_active_sheet)
#################################################################
def set_tab_width(self, value):
self.__tab_width_twips = value & 0xFFFF
def get_tab_width(self):
return self.__tab_width_twips
tab_width = property(get_tab_width, set_tab_width)
#################################################################
def set_wnd_visible(self, value):
self.__wnd_hidden = int(not value)
def get_wnd_visible(self):
return not bool(self.__wnd_hidden)
wnd_visible = property(get_wnd_visible, set_wnd_visible)
#################################################################
def set_wnd_mini(self, value):
self.__wnd_mini = int(value)
def get_wnd_mini(self):
return bool(self.__wnd_mini)
wnd_mini = property(get_wnd_mini, set_wnd_mini)
#################################################################
def set_hscroll_visible(self, value):
self.__hscroll_visible = int(value)
def get_hscroll_visible(self):
return bool(self.__hscroll_visible)
hscroll_visible = property(get_hscroll_visible, set_hscroll_visible)
#################################################################
def set_vscroll_visible(self, value):
self.__vscroll_visible = int(value)
def get_vscroll_visible(self):
return bool(self.__vscroll_visible)
vscroll_visible = property(get_vscroll_visible, set_vscroll_visible)
#################################################################
def set_tabs_visible(self, value):
self.__tabs_visible = int(value)
def get_tabs_visible(self):
return bool(self.__tabs_visible)
tabs_visible = property(get_tabs_visible, set_tabs_visible)
#################################################################
def set_dates_1904(self, value):
self.__dates_1904 = int(value)
def get_dates_1904(self):
return bool(self.__dates_1904)
dates_1904 = property(get_dates_1904, set_dates_1904)
#################################################################
def set_use_cell_values(self, value):
self.__use_cell_values = int(value)
def get_use_cell_values(self):
return bool(self.__use_cell_values)
use_cell_values = property(get_use_cell_values, set_use_cell_values)
#################################################################
def get_default_style(self):
return self.__styles.default_style
default_style = property(get_default_style)
##################################################################
## Methods
##################################################################
def add_style(self, style):
return self.__styles.add(style)
def add_str(self, s):
return self.__sst.add_str(s)
def del_str(self, sst_idx):
self.__sst.del_str(sst_idx)
def str_index(self, s):
return self.__sst.str_index(s)
def add_sheet(self, sheetname, cell_overwrite_ok=False):
import Worksheet, Utils
if not isinstance(sheetname, unicode):
sheetname = sheetname.decode(self.encoding)
if not Utils.valid_sheet_name(sheetname):
raise Exception("invalid worksheet name %r" % sheetname)
lower_name = sheetname.lower()
if lower_name in self.__worksheet_idx_from_name:
raise Exception("duplicate worksheet name %r" % sheetname)
self.__worksheet_idx_from_name[lower_name] = len(self.__worksheets)
self.__worksheets.append(Worksheet.Worksheet(sheetname, self, cell_overwrite_ok))
return self.__worksheets[-1]
def get_sheet(self, sheetnum):
return self.__worksheets[sheetnum]
def raise_bad_sheetname(self, sheetname):
raise Exception("Formula: unknown sheet name %s" % sheetname)
def convert_sheetindex(self, strg_ref, n_sheets):
idx = int(strg_ref)
if 0 <= idx < n_sheets:
return idx
msg = "Formula: sheet index (%s) >= number of sheets (%d)" % (strg_ref, n_sheets)
raise Exception(msg)
def _get_supbook_index(self, tag):
if tag in self._supbook_xref:
return self._supbook_xref[tag]
self._supbook_xref[tag] = idx = len(self._supbook_xref)
return idx
def setup_ownbook(self):
self._ownbook_supbookx = self._get_supbook_index(('ownbook', 0))
self._ownbook_supbook_ref = None
reference = (self._ownbook_supbookx, 0xFFFE, 0xFFFE)
if reference in self.__sheet_refs:
raise Exception("can't happen")
self.__sheet_refs[reference] = self._ownbook_supbook_ref = len(self.__sheet_refs)
def setup_xcall(self):
self._xcall_supbookx = self._get_supbook_index(('xcall', 0))
self._xcall_supbook_ref = None
reference = (self._xcall_supbookx, 0xFFFE, 0xFFFE)
if reference in self.__sheet_refs:
raise Exception("can't happen")
self.__sheet_refs[reference] = self._xcall_supbook_ref = len(self.__sheet_refs)
def add_sheet_reference(self, formula):
patches = []
n_sheets = len(self.__worksheets)
sheet_refs, xcall_refs = formula.get_references()
for ref0, ref1, offset in sheet_refs:
if not ref0.isdigit():
try:
ref0n = self.__worksheet_idx_from_name[ref0.lower()]
except KeyError:
self.raise_bad_sheetname(ref0)
else:
ref0n = self.convert_sheetindex(ref0, n_sheets)
if ref1 == ref0:
ref1n = ref0n
elif not ref1.isdigit():
try:
ref1n = self.__worksheet_idx_from_name[ref1.lower()]
except KeyError:
self.raise_bad_sheetname(ref1)
else:
ref1n = self.convert_sheetindex(ref1, n_sheets)
if ref1n < ref0n:
msg = "Formula: sheets out of order; %r:%r -> (%d, %d)" \
% (ref0, ref1, ref0n, ref1n)
raise Exception(msg)
if self._ownbook_supbookx is None:
self.setup_ownbook()
reference = (self._ownbook_supbookx, ref0n, ref1n)
if reference in self.__sheet_refs:
patches.append((offset, self.__sheet_refs[reference]))
else:
nrefs = len(self.__sheet_refs)
if nrefs > 65535:
raise Exception('More than 65536 inter-sheet references')
self.__sheet_refs[reference] = nrefs
patches.append((offset, nrefs))
for funcname, offset in xcall_refs:
if self._ownbook_supbookx is None:
self.setup_ownbook()
if self._xcall_supbookx is None:
self.setup_xcall()
# print funcname, self._supbook_xref
patches.append((offset, self._xcall_supbook_ref))
if not isinstance(funcname, unicode):
funcname = funcname.decode(self.encoding)
if funcname in self._xcall_xref:
idx = self._xcall_xref[funcname]
else:
self._xcall_xref[funcname] = idx = len(self._xcall_xref)
patches.append((offset + 2, idx + 1))
formula.patch_references(patches)
##################################################################
## BIFF records generation
##################################################################
def __bof_rec(self):
return BIFFRecords.Biff8BOFRecord(BIFFRecords.Biff8BOFRecord.BOOK_GLOBAL).get()
def __eof_rec(self):
return BIFFRecords.EOFRecord().get()
def __intf_hdr_rec(self):
return BIFFRecords.InteraceHdrRecord().get()
def __intf_end_rec(self):
return BIFFRecords.InteraceEndRecord().get()
def __intf_mms_rec(self):
return BIFFRecords.MMSRecord().get()
def __write_access_rec(self):
return BIFFRecords.WriteAccessRecord(self.__owner).get()
def __wnd_protect_rec(self):
return BIFFRecords.WindowProtectRecord(self.__wnd_protect).get()
def __obj_protect_rec(self):
return BIFFRecords.ObjectProtectRecord(self.__obj_protect).get()
def __protect_rec(self):
return BIFFRecords.ProtectRecord(self.__protect).get()
def __password_rec(self):
return BIFFRecords.PasswordRecord().get()
def __prot4rev_rec(self):
return BIFFRecords.Prot4RevRecord().get()
def __prot4rev_pass_rec(self):
return BIFFRecords.Prot4RevPassRecord().get()
def __backup_rec(self):
return BIFFRecords.BackupRecord(self.__backup_on_save).get()
def __hide_obj_rec(self):
return BIFFRecords.HideObjRecord().get()
def __window1_rec(self):
flags = 0
flags |= (self.__wnd_hidden) << 0
flags |= (self.__wnd_mini) << 1
flags |= (self.__hscroll_visible) << 3
flags |= (self.__vscroll_visible) << 4
flags |= (self.__tabs_visible) << 5
return BIFFRecords.Window1Record(self.__hpos_twips, self.__vpos_twips,
self.__width_twips, self.__height_twips,
flags,
self.__active_sheet, self.__first_tab_index,
self.__selected_tabs, self.__tab_width_twips).get()
def __codepage_rec(self):
return BIFFRecords.CodepageBiff8Record().get()
def __country_rec(self):
if not self.__country_code:
return ''
return BIFFRecords.CountryRecord(self.__country_code, self.__country_code).get()
def __dsf_rec(self):
return BIFFRecords.DSFRecord().get()
def __tabid_rec(self):
return BIFFRecords.TabIDRecord(len(self.__worksheets)).get()
def __fngroupcount_rec(self):
return BIFFRecords.FnGroupCountRecord().get()
def __datemode_rec(self):
return BIFFRecords.DateModeRecord(self.__dates_1904).get()
def __precision_rec(self):
return BIFFRecords.PrecisionRecord(self.__use_cell_values).get()
def __refresh_all_rec(self):
return BIFFRecords.RefreshAllRecord().get()
def __bookbool_rec(self):
return BIFFRecords.BookBoolRecord().get()
def __all_fonts_num_formats_xf_styles_rec(self):
return self.__styles.get_biff_data()
def __palette_rec(self):
result = ''
return result
def __useselfs_rec(self):
return BIFFRecords.UseSelfsRecord().get()
def __boundsheets_rec(self, data_len_before, data_len_after, sheet_biff_lens):
# .................................
# BOUNDSEHEET0
# BOUNDSEHEET1
# BOUNDSEHEET2
# ..................................
# WORKSHEET0
# WORKSHEET1
# WORKSHEET2
boundsheets_len = 0
for sheet in self.__worksheets:
boundsheets_len += len(BIFFRecords.BoundSheetRecord(
0x00L, sheet.visibility, sheet.name, self.encoding
).get())
start = data_len_before + boundsheets_len + data_len_after
result = ''
for sheet_biff_len, sheet in zip(sheet_biff_lens, self.__worksheets):
result += BIFFRecords.BoundSheetRecord(
start, sheet.visibility, sheet.name, self.encoding
).get()
start += sheet_biff_len
return result
def __all_links_rec(self):
pieces = []
temp = [(idx, tag) for tag, idx in self._supbook_xref.items()]
temp.sort()
for idx, tag in temp:
stype, snum = tag
if stype == 'ownbook':
rec = BIFFRecords.InternalReferenceSupBookRecord(len(self.__worksheets)).get()
pieces.append(rec)
elif stype == 'xcall':
rec = BIFFRecords.XcallSupBookRecord().get()
pieces.append(rec)
temp = [(idx, name) for name, idx in self._xcall_xref.items()]
temp.sort()
for idx, name in temp:
rec = BIFFRecords.ExternnameRecord(
options=0, index=0, name=name, fmla='\x02\x00\x1c\x17').get()
pieces.append(rec)
else:
raise Exception('unknown supbook stype %r' % stype)
if len(self.__sheet_refs) > 0:
# get references in index order
temp = [(idx, ref) for ref, idx in self.__sheet_refs.items()]
temp.sort()
temp = [ref for idx, ref in temp]
externsheet_record = BIFFRecords.ExternSheetRecord(temp).get()
pieces.append(externsheet_record)
return ''.join(pieces)
def __sst_rec(self):
return self.__sst.get_biff_record()
def __ext_sst_rec(self, abs_stream_pos):
return ''
#return BIFFRecords.ExtSSTRecord(abs_stream_pos, self.sst_record.str_placement,
#self.sst_record.portions_len).get()
def get_biff_data(self):
before = ''
before += self.__bof_rec()
before += self.__intf_hdr_rec()
before += self.__intf_mms_rec()
before += self.__intf_end_rec()
before += self.__write_access_rec()
before += self.__codepage_rec()
before += self.__dsf_rec()
before += self.__tabid_rec()
before += self.__fngroupcount_rec()
before += self.__wnd_protect_rec()
before += self.__protect_rec()
before += self.__obj_protect_rec()
before += self.__password_rec()
before += self.__prot4rev_rec()
before += self.__prot4rev_pass_rec()
before += self.__backup_rec()
before += self.__hide_obj_rec()
before += self.__window1_rec()
before += self.__datemode_rec()
before += self.__precision_rec()
before += self.__refresh_all_rec()
before += self.__bookbool_rec()
before += self.__all_fonts_num_formats_xf_styles_rec()
before += self.__palette_rec()
before += self.__useselfs_rec()
country = self.__country_rec()
all_links = self.__all_links_rec()
shared_str_table = self.__sst_rec()
after = country + all_links + shared_str_table
ext_sst = self.__ext_sst_rec(0) # need fake cause we need calc stream pos
eof = self.__eof_rec()
self.__worksheets[self.__active_sheet].selected = True
sheets = ''
sheet_biff_lens = []
for sheet in self.__worksheets:
data = sheet.get_biff_data()
sheets += data
sheet_biff_lens.append(len(data))
bundlesheets = self.__boundsheets_rec(len(before), len(after)+len(ext_sst)+len(eof), sheet_biff_lens)
sst_stream_pos = len(before) + len(bundlesheets) + len(country) + len(all_links)
ext_sst = self.__ext_sst_rec(sst_stream_pos)
return before + bundlesheets + after + ext_sst + eof + sheets
def save(self, filename):
import CompoundDoc
doc = CompoundDoc.XlsDoc()
doc.save(filename, self.get_biff_data())
| apache-2.0 |
adambrenecki/django | django/db/backends/sqlite3/base.py | 1 | 23152 | """
SQLite3 backend for django.
Works with either the pysqlite2 module or the sqlite3 module in the
standard library.
"""
from __future__ import unicode_literals
import datetime
import decimal
import warnings
import re
from django.conf import settings
from django.db import utils
from django.db.backends import (util, BaseDatabaseFeatures,
BaseDatabaseOperations, BaseDatabaseWrapper, BaseDatabaseValidation)
from django.db.backends.sqlite3.client import DatabaseClient
from django.db.backends.sqlite3.creation import DatabaseCreation
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
from django.db.backends.sqlite3.schema import DatabaseSchemaEditor
from django.db.models import fields
from django.db.models.sql import aggregates
from django.utils.dateparse import parse_date, parse_datetime, parse_time
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.utils.safestring import SafeBytes
from django.utils import six
from django.utils import timezone
try:
try:
from pysqlite2 import dbapi2 as Database
except ImportError:
from sqlite3 import dbapi2 as Database
except ImportError as exc:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading either pysqlite2 or sqlite3 modules (tried in that order): %s" % exc)
try:
import pytz
except ImportError:
pytz = None
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
def parse_datetime_with_timezone_support(value):
dt = parse_datetime(value)
# Confirm that dt is naive before overwriting its tzinfo.
if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
dt = dt.replace(tzinfo=timezone.utc)
return dt
def adapt_datetime_with_timezone_support(value):
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
if settings.USE_TZ:
if timezone.is_naive(value):
warnings.warn("SQLite received a naive datetime (%s)"
" while time zone support is active." % value,
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return value.isoformat(str(" "))
def decoder(conv_func):
""" The Python sqlite3 interface returns always byte strings.
This function converts the received value to a regular string before
passing it to the receiver function.
"""
return lambda s: conv_func(s.decode('utf-8'))
Database.register_converter(str("bool"), decoder(lambda s: s == '1'))
Database.register_converter(str("time"), decoder(parse_time))
Database.register_converter(str("date"), decoder(parse_date))
Database.register_converter(str("datetime"), decoder(parse_datetime_with_timezone_support))
Database.register_converter(str("timestamp"), decoder(parse_datetime_with_timezone_support))
Database.register_converter(str("TIMESTAMP"), decoder(parse_datetime_with_timezone_support))
Database.register_converter(str("decimal"), decoder(util.typecast_decimal))
Database.register_adapter(datetime.datetime, adapt_datetime_with_timezone_support)
Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal)
if six.PY2:
Database.register_adapter(str, lambda s: s.decode('utf-8'))
Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8'))
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
supports_1000_query_parameters = False
supports_mixed_date_datetime_comparisons = False
has_bulk_insert = True
can_combine_inserts_with_and_without_auto_increment_pk = False
supports_foreign_keys = False
supports_check_constraints = False
autocommits_when_autocommit_is_off = True
supports_paramstyle_pyformat = False
supports_sequence_reset = False
@cached_property
def uses_savepoints(self):
return Database.sqlite_version_info >= (3, 6, 8)
@cached_property
def supports_stddev(self):
"""Confirm support for STDDEV and related stats functions
SQLite supports STDDEV as an extension package; so
connection.ops.check_aggregate_support() can't unilaterally
rule out support for STDDEV. We need to manually check
whether the call works.
"""
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
try:
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
has_support = True
except utils.DatabaseError:
has_support = False
cursor.execute('DROP TABLE STDDEV_TEST')
return has_support
@cached_property
def has_zoneinfo_database(self):
return pytz is not None
class DatabaseOperations(BaseDatabaseOperations):
def bulk_batch_size(self, fields, objs):
"""
SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of
999 variables per query.
If there is just single field to insert, then we can hit another
limit, SQLITE_MAX_COMPOUND_SELECT which defaults to 500.
"""
limit = 999 if len(fields) > 1 else 500
return (limit // len(fields)) if len(fields) > 0 else len(objs)
def check_aggregate_support(self, aggregate):
bad_fields = (fields.DateField, fields.DateTimeField, fields.TimeField)
bad_aggregates = (aggregates.Sum, aggregates.Avg,
aggregates.Variance, aggregates.StdDev)
if (isinstance(aggregate.source, bad_fields) and
isinstance(aggregate, bad_aggregates)):
raise NotImplementedError(
'You cannot use Sum, Avg, StdDev and Variance aggregations '
'on date/time fields in sqlite3 '
'since date/time is saved as text.')
def date_extract_sql(self, lookup_type, field_name):
# sqlite doesn't support extract, so we fake it with the user-defined
# function django_date_extract that's registered in connect(). Note that
# single quotes are used because this is a string (and could otherwise
# cause a collision with a field name).
return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name)
def date_interval_sql(self, sql, connector, timedelta):
# It would be more straightforward if we could use the sqlite strftime
# function, but it does not allow for keeping six digits of fractional
# second information, nor does it allow for formatting date and datetime
# values differently. So instead we register our own function that
# formats the datetime combined with the delta in a manner suitable
# for comparisons.
return 'django_format_dtdelta(%s, "%s", "%d", "%d", "%d")' % (sql,
connector, timedelta.days, timedelta.seconds, timedelta.microseconds)
def date_trunc_sql(self, lookup_type, field_name):
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
# function django_date_trunc that's registered in connect(). Note that
# single quotes are used because this is a string (and could otherwise
# cause a collision with a field name).
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
def datetime_extract_sql(self, lookup_type, field_name, tzname):
# Same comment as in date_extract_sql.
if settings.USE_TZ:
if pytz is None:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("This query requires pytz, "
"but it isn't installed.")
return "django_datetime_extract('%s', %s, %%s)" % (
lookup_type.lower(), field_name), [tzname]
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
# Same comment as in date_trunc_sql.
if settings.USE_TZ:
if pytz is None:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("This query requires pytz, "
"but it isn't installed.")
return "django_datetime_trunc('%s', %s, %%s)" % (
lookup_type.lower(), field_name), [tzname]
def drop_foreignkey_sql(self):
return ""
def pk_default_value(self):
return "NULL"
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def quote_parameter(self, value):
# Inner import to allow nice failure for backend if not present
import _sqlite3
try:
value = _sqlite3.adapt(value)
except _sqlite3.ProgrammingError:
pass
# Manual emulation of SQLite parameter quoting
if isinstance(value, six.integer_types):
return str(value)
elif isinstance(value, six.string_types):
return six.text_type(value)
elif isinstance(value, type(True)):
return str(int(value))
elif value is None:
return "NULL"
else:
raise ValueError("Cannot quote parameter value %r" % value)
def no_limit_value(self):
return -1
def sql_flush(self, style, tables, sequences, allow_cascade=False):
# NB: The generated SQL below is specific to SQLite
# Note: The DELETE FROM... SQL generated below works for SQLite databases
# because constraints don't exist
sql = ['%s %s %s;' % (
style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
# Note: No requirement for reset of auto-incremented indices (cf. other
# sql_flush() implementations). Just return SQL at this point
return sql
def value_to_db_datetime(self, value):
if value is None:
return None
# SQLite doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = value.astimezone(timezone.utc).replace(tzinfo=None)
else:
raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.")
return six.text_type(value)
def value_to_db_time(self, value):
if value is None:
return None
# SQLite doesn't support tz-aware datetimes
if timezone.is_aware(value):
raise ValueError("SQLite backend does not support timezone-aware times.")
return six.text_type(value)
def convert_values(self, value, field):
"""SQLite returns floats when it should be returning decimals,
and gets dates and datetimes wrong.
For consistency with other backends, coerce when required.
"""
if value is None:
return None
internal_type = field.get_internal_type()
if internal_type == 'DecimalField':
return util.typecast_decimal(field.format_number(value))
elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField':
return int(value)
elif internal_type == 'DateField':
return parse_date(value)
elif internal_type == 'DateTimeField':
return parse_datetime_with_timezone_support(value)
elif internal_type == 'TimeField':
return parse_time(value)
# No field, or the field isn't known to be a decimal or integer
return value
def bulk_insert_sql(self, fields, num_values):
res = []
res.append("SELECT %s" % ", ".join(
"%%s AS %s" % self.quote_name(f.column) for f in fields
))
res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1))
return " ".join(res)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'sqlite'
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
Database = Database
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
# Always allow the underlying SQLite connection to be shareable
# between multiple threads. The safe-guarding will be handled at a
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
# property. This is necessary as the shareability is disabled by
# default in pysqlite and it cannot be changed once a connection is
# opened.
if 'check_same_thread' in kwargs and kwargs['check_same_thread']:
warnings.warn(
'The `check_same_thread` option was provided and set to '
'True. It will be overridden with False. Use the '
'`DatabaseWrapper.allow_thread_sharing` property instead '
'for controlling thread shareability.',
RuntimeWarning
)
kwargs.update({'check_same_thread': False})
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.create_function("django_date_extract", 2, _sqlite_date_extract)
conn.create_function("django_date_trunc", 2, _sqlite_date_trunc)
conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract)
conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc)
conn.create_function("regexp", 2, _sqlite_regexp)
conn.create_function("django_format_dtdelta", 5, _sqlite_format_dtdelta)
return conn
def init_connection_state(self):
pass
def create_cursor(self):
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
self.validate_thread_sharing()
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if self.settings_dict['NAME'] != ":memory:":
BaseDatabaseWrapper.close(self)
def _savepoint_allowed(self):
# When 'isolation_level' is not None, sqlite3 commits before each
# savepoint; it's a bug. When it is None, savepoints don't make sense
# because autocommit is enabled. The only exception is inside atomic
# blocks. To work around that bug, on SQLite, atomic starts a
# transaction explicitly rather than simply disable autocommit.
return self.in_atomic_block
def _set_autocommit(self, autocommit):
if autocommit:
level = None
else:
# sqlite3's internal default is ''. It's different from None.
# See Modules/_sqlite/connection.c.
level = ''
# 'isolation_level' is a misleading API.
# SQLite always runs at the SERIALIZABLE isolation level.
self.connection.isolation_level = level
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign key references. This method is
intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint checks were off.
Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
detailed information about the invalid reference in the error message.
Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0], table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
def is_usable(self):
return True
def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN")
def schema_editor(self, *args, **kwargs):
"Returns a new instance of this backend's SchemaEditor"
return DatabaseSchemaEditor(self, *args, **kwargs)
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=None):
if params is None:
return Database.Cursor.execute(self, query)
query = self.convert_query(query)
return Database.Cursor.execute(self, query, params)
def executemany(self, query, param_list):
query = self.convert_query(query)
return Database.Cursor.executemany(self, query, param_list)
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
def _sqlite_date_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
def _sqlite_datetime_extract(lookup_type, dt, tzname):
if dt is None:
return None
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if tzname is not None:
dt = timezone.localtime(dt, pytz.timezone(tzname))
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if tzname is not None:
dt = timezone.localtime(dt, pytz.timezone(tzname))
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
elif lookup_type == 'hour':
return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour)
elif lookup_type == 'minute':
return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute)
elif lookup_type == 'second':
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def _sqlite_format_dtdelta(dt, conn, days, secs, usecs):
try:
dt = util.typecast_timestamp(dt)
delta = datetime.timedelta(int(days), int(secs), int(usecs))
if conn.strip() == '+':
dt = dt + delta
else:
dt = dt - delta
except (ValueError, TypeError):
return None
# typecast_timestamp returns a date or a datetime without timezone.
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
return str(dt)
def _sqlite_regexp(re_pattern, re_string):
return bool(re.search(re_pattern, force_text(re_string))) if re_string is not None else False
| bsd-3-clause |
albertrdixon/CouchPotatoServer | libs/guessit/transfo/split_explicit_groups.py | 150 | 1738 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit.textutils import find_first_level_groups
from guessit.patterns import group_delimiters
import functools
import logging
log = logging.getLogger(__name__)
def process(mtree):
"""return the string split into explicit groups, that is, those either
between parenthese, square brackets or curly braces, and those separated
by a dash."""
for c in mtree.children:
groups = find_first_level_groups(c.value, group_delimiters[0])
for delimiters in group_delimiters:
flatten = lambda l, x: l + find_first_level_groups(x, delimiters)
groups = functools.reduce(flatten, groups, [])
# do not do this at this moment, it is not strong enough and can break other
# patterns, such as dates, etc...
#groups = functools.reduce(lambda l, x: l + x.split('-'), groups, [])
c.split_on_components(groups)
| gpl-3.0 |
itaipubinacional/geocab | solution/src/main/webapp/static/libs/bootstrap/test-infra/s3_cache.py | 1700 | 3523 | #!/usr/bin/env python2.7
from __future__ import absolute_import, unicode_literals, print_function, division
from sys import argv
from os import environ, stat, remove as _delete_file
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
BUCKET_NAME = environ['TWBS_S3_BUCKET']
except KeyError:
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
hasher.update(input_file.read())
file_hash = hasher.hexdigest()
print('sha256({}) = {}'.format(filename, file_hash))
return file_hash
def _delete_file_quietly(filename):
try:
_delete_file(filename)
except (OSError, IOError):
pass
def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
def _tarball_filename_for(directory):
return abspath('./{}.tar.gz'.format(basename(directory)))
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(friendly_name))
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached {} download failed!".format(friendly_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(friendly_name))
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(friendly_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if __name__ == '__main__':
# Uses environment variables:
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
argv.pop(0)
if len(argv) != 4:
raise SystemExit("USAGE: s3_cache.py <download | upload> <friendly name> <dependencies file> <directory>")
mode, friendly_name, dependencies_file, directory = argv
conn = S3Connection()
bucket = conn.lookup(BUCKET_NAME, validate=False)
if bucket is None:
raise SystemExit("Could not access bucket!")
dependencies_file_hash = _sha256_of_file(dependencies_file)
key = Key(bucket, dependencies_file_hash)
key.storage_class = 'REDUCED_REDUNDANCY'
if mode == 'download':
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
upload(directory)
else:
print("No need to upload anything.")
else:
raise SystemExit("Unrecognized mode {!r}".format(mode))
| gpl-2.0 |
barnsnake351/nova | nova/virt/ironic/ironic_states.py | 6 | 4453 | # Copyright (c) 2012 NTT DOCOMO, INC.
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Mapping of bare metal node states.
Setting the node `power_state` is handled by the conductor's power
synchronization thread. Based on the power state retrieved from the driver
for the node, the state is set to POWER_ON or POWER_OFF, accordingly.
Should this fail, the `power_state` value is left unchanged, and the node
is placed into maintenance mode.
The `power_state` can also be set manually via the API. A failure to change
the state leaves the current state unchanged. The node is NOT placed into
maintenance mode in this case.
"""
#####################
# Provisioning states
#####################
NOSTATE = None
""" No state information.
This state is used with power_state to represent a lack of knowledge of
power state, and in target_*_state fields when there is no target.
Prior to the Kilo release, Ironic set node.provision_state to NOSTATE
when the node was available for provisioning. During Kilo cycle, this was
changed to the AVAILABLE state.
"""
MANAGEABLE = 'manageable'
""" Node is in a manageable state.
This state indicates that Ironic has verified, at least once, that it had
sufficient information to manage the hardware. While in this state, the node
is not available for provisioning (it must be in the AVAILABLE state for that).
"""
AVAILABLE = 'available'
""" Node is available for use and scheduling.
This state is replacing the NOSTATE state used prior to Kilo.
"""
ACTIVE = 'active'
""" Node is successfully deployed and associated with an instance. """
DEPLOYWAIT = 'wait call-back'
""" Node is waiting to be deployed.
This will be the node `provision_state` while the node is waiting for
the driver to finish deployment.
"""
DEPLOYING = 'deploying'
""" Node is ready to receive a deploy request, or is currently being deployed.
A node will have its `provision_state` set to DEPLOYING briefly before it
receives its initial deploy request. It will also move to this state from
DEPLOYWAIT after the callback is triggered and deployment is continued
(disk partitioning and image copying).
"""
DEPLOYFAIL = 'deploy failed'
""" Node deployment failed. """
DEPLOYDONE = 'deploy complete'
""" Node was successfully deployed.
This is mainly a target provision state used during deployment. A successfully
deployed node should go to ACTIVE status.
"""
DELETING = 'deleting'
""" Node is actively being torn down. """
DELETED = 'deleted'
""" Node tear down was successful.
In Juno, target_provision_state was set to this value during node tear down.
In Kilo, this will be a transitory value of provision_state, and never
represented in target_provision_state.
"""
CLEANING = 'cleaning'
""" Node is being automatically cleaned to prepare it for provisioning. """
CLEANWAIT = 'clean wait'
""" Node is waiting for a clean step to be finished.
This will be the node's `provision_state` while the node is waiting for
the driver to finish a cleaning step.
"""
CLEANFAIL = 'clean failed'
""" Node failed cleaning. This requires operator intervention to resolve. """
ERROR = 'error'
""" An error occurred during node processing.
The `last_error` attribute of the node details should contain an error message.
"""
REBUILD = 'rebuild'
""" Node is to be rebuilt.
This is not used as a state, but rather as a "verb" when changing the node's
provision_state via the REST API.
"""
INSPECTING = 'inspecting'
""" Node is under inspection.
This is the provision state used when inspection is started. A successfully
inspected node shall transition to MANAGEABLE status.
"""
INSPECTFAIL = 'inspect failed'
""" Node inspection failed. """
##############
# Power states
##############
POWER_ON = 'power on'
""" Node is powered on. """
POWER_OFF = 'power off'
""" Node is powered off. """
REBOOT = 'rebooting'
""" Node is rebooting. """
| apache-2.0 |
peterschriever/project_embed_systems | userpanel/ajax_api/json_views.py | 1 | 5896 | from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, HttpRequest
import json
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from control_unit.submodels.CommandIdentifier import *
from control_unit.submodels.UnitCommunication import *
from control_unit.submodels.UnitScanner import *
cacheDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + "\\control_unit\\json_cache\\"
#ajax/json response generator
def buildResponse(dictIn, error=False):
resp = {} #response dict
if('error' in dictIn or error == True):#error array
#2nd parameter in .get is the default value
resp['error'] = dictIn.get('error', True)
resp['errorcode'] = dictIn.get('errorcode', '000000')
resp['error_msg'] = dictIn.get('error_msg', 'Unknown error')
resp['extra'] = dictIn.get('extra', None)
else:
resp = dictIn
resp['error'] = False
return HttpResponse(json.dumps(resp))
def buildErrorResponse(dict):
return buildResponse(dict, True)
# Create your views here.
@csrf_exempt # for debugging
def templateFunction(request):
if request.method == "POST":
jsonObj = json.loads(request.body.decode('utf-8'))
else:
jsonObj = {}
return buildResponse(jsonObj)
@csrf_exempt # for debugging
def testCommandCommunication(request):
getTempCmd = CommandIdentifier.getCommand('getTemperature')
toUnits = UnitScanner.getAllUnits()
resolveCmd = UnitCommunication.sendCommand(getTempCmd, toUnits)
return HttpResponse([resolveCmd])
def getConnectedDevices(request):
tempdict = getConnectedDevicesInfo()
d = {
'count': len(tempdict),
'info': tempdict
}
return buildResponse(d)
def getDeviceSettings(request):
#get settings from cache
dict = readFromCache(cacheDir + 'deviceSettings.json', 'dict')
deviceID = request.POST.get('deviceID')
if(deviceID == None):
return buildResponse(dict)
else:
for deviceSerial in dict:
if(deviceSerial == deviceID):
return buildResponse(dict[deviceSerial]['settings'])
#not found
return buildErrorResponse({'error_msg':'Device "' + deviceID + '" not found.', \
'extra':dict})
def setDeviceSettings(request):
scanPorts() #this will force any uninitialized devices to initialize
#get min/max and default settings from file
#any setting not included in the POST will use the default setting
minmaxSet = readFromCache(cacheDir + 'settingsInfo.json', 'dict')
MINSET = minmaxSet['min']
MAXSET = minmaxSet['max']
default = minmaxSet['default']
deviceID = request.POST.get('deviceID')
newsettings['temp'] = request.POST.get('temp', default['temp'])
newsettings['distMax'] = request.POST.get('distMax', default['distMax'])
newsettings['distMin'] = request.POST.get('distMin', default['distMin'])
newsettings['light'] = request.POST.get('light', default['light'])
#check all new settings
for setting in newsettings:
if((newsettings[setting] > MAXSET[setting]) or (newsettings[setting] < MINSET[setting])):
#invalid setting, throw error
return buildErrorResponse({'error_msg':'Setting "' + setting + '" must be between "' + MINSET[setting] + '" and "' + MAXSET[setting] + '".', \
'extra':{'newsettings':newsettings, 'minmaxSet':minmaxSet}})
#all good
#only chance the default values
if(deviceID == 'default'):
minmaxSet['default'] = newsettings
writeToCache(cacheDir + 'settingsInfo.json', minmaxSet)
return
#get current settings from cache
dict = readFromCache(cacheDir + 'deviceSettings.json', 'dict')
#change ALL devices' settings, if deviceID == None (NULL);
#otherwise only change 1 device's setting
counter = 0
for device in dict:
if((deviceID == None) or (deviceID == device)):
#save settings to device:
if(sendCommandToDevice(dict[device]['port'], 'setSettings', newsettings)):
counter += 1
#update the cached dictionary
dict[device] = {'port':dict[device]['port'], 'settings':newsettings}
else:
#something went wrong
return buildErrorResponse({'error_msg':'something went wrong while trying to send new settings to the device..', \
'extra':{'msg':'applied settings to ' + counter + ' devices', 'counter':counter}})
writeToCache(cacheDir + 'deviceSettings.json', dict)
#all done now
return buildResponse({'msg':'applied settings to ' + counter + ' devices', 'counter':counter})
def getGraphUpdate(request):
scanPorts() #to force update on connected device cache
deviceID = request.POST.get('deviceID')
sensordata = readFromCache(cacheDir + 'sensordata.json', 'dict')
if(deviceID == None):
#TODO
#get data for all devices
return
else:
#TODO: get current timestamp
currentTime = 300
timestamp = sensordata[deviceID].get('timestamp', None)
if(timestamp == None or (currentTime - timestamp) > 3600):
#TODO
#get sensor data
return buildErrorResponse({'error_msg':'this function is not yet fully supported by the API'})
else:
return buildResponse(sensordata[deviceID])
return buildErrorResponse({'error_msg':'this function is not yet fully supported by the API'})
def getWindowblindState(request):
return buildErrorResponse({'error_msg':'this function is not yet fully supported by the API'})
def setWindowblindState(request):
return buildErrorResponse({'error_msg':'this function is not yet fully supported by the API'})
| gpl-3.0 |
angelapper/edx-platform | openedx/features/course_experience/tests/views/test_course_updates.py | 2 | 4871 | """
Tests for the course updates page.
"""
from courseware.courses import get_course_info_usage_key
from django.core.urlresolvers import reverse
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from openedx.features.course_experience.views.course_updates import STATUS_VISIBLE
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
TEST_PASSWORD = 'test'
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
def course_updates_url(course):
"""
Returns the URL for the course's home page
"""
return reverse(
'openedx.course_experience.course_updates',
kwargs={
'course_id': unicode(course.id),
}
)
def create_course_update(course, user, content, date='December 31, 1999'):
"""
Creates a test welcome message for the specified course.
"""
updates_usage_key = get_course_info_usage_key(course, 'updates')
try:
course_updates = modulestore().get_item(updates_usage_key)
except ItemNotFoundError:
course_updates = create_course_updates_block(course, user)
course_updates.items.append({
"id": len(course_updates.items) + 1,
"date": date,
"content": content,
"status": STATUS_VISIBLE
})
modulestore().update_item(course_updates, user.id)
def create_course_updates_block(course, user):
"""
Create a course updates block.
"""
updates_usage_key = get_course_info_usage_key(course, 'updates')
course_updates = modulestore().create_item(
user.id,
updates_usage_key.course_key,
updates_usage_key.block_type,
block_id=updates_usage_key.block_id
)
course_updates.data = ''
return course_updates
def remove_course_updates(user, course):
"""
Remove any course updates in the specified course.
"""
updates_usage_key = get_course_info_usage_key(course, 'updates')
try:
course_updates = modulestore().get_item(updates_usage_key)
modulestore().delete_item(course_updates.location, user.id)
except (ItemNotFoundError, ValueError):
pass
class TestCourseUpdatesPage(SharedModuleStoreTestCase):
"""
Test the course updates page.
"""
@classmethod
def setUpClass(cls):
"""Set up the simplest course possible."""
# setUpClassAndTestData() already calls setUpClass on SharedModuleStoreTestCase
# pylint: disable=super-method-not-called
with super(TestCourseUpdatesPage, cls).setUpClassAndTestData():
with cls.store.default_store(ModuleStoreEnum.Type.split):
cls.course = CourseFactory.create()
with cls.store.bulk_operations(cls.course.id):
# Create a basic course structure
chapter = ItemFactory.create(category='chapter', parent_location=cls.course.location)
section = ItemFactory.create(category='sequential', parent_location=chapter.location)
ItemFactory.create(category='vertical', parent_location=section.location)
@classmethod
def setUpTestData(cls):
"""Set up and enroll our fake user in the course."""
cls.user = UserFactory(password=TEST_PASSWORD)
CourseEnrollment.enroll(cls.user, cls.course.id)
def setUp(self):
"""
Set up for the tests.
"""
super(TestCourseUpdatesPage, self).setUp()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
def tearDown(self):
remove_course_updates(self.user, self.course)
super(TestCourseUpdatesPage, self).tearDown()
def test_view(self):
create_course_update(self.course, self.user, 'First Message')
create_course_update(self.course, self.user, 'Second Message')
url = course_updates_url(self.course)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'First Message')
self.assertContains(response, 'Second Message')
def test_queries(self):
create_course_update(self.course, self.user, 'First Message')
# Pre-fetch the view to populate any caches
course_updates_url(self.course)
# Fetch the view and verify that the query counts haven't changed
with self.assertNumQueries(33, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(4):
url = course_updates_url(self.course)
self.client.get(url)
| agpl-3.0 |
nmishkin/tosca-vcloud-plugin | tests/integration/__init__.py | 1 | 3021 | from testconfig import config
import mock
import time
import unittest
from cloudify import mocks as cfy_mocks
from cloudify.exceptions import OperationRetry
from vcloud_plugin_common import Config, VcloudAirClient
SUBSCRIPTION = 'subscription'
ONDEMAND = 'ondemand'
class IntegrationSubscriptionTestConfig(Config):
VCLOUD_CONFIG_PATH_ENV_VAR = 'VCLOUD_INTEGRATION_TEST_CONFIG_PATH'
VCLOUD_CONFIG_PATH_DEFAULT = \
'~/vcloud_integration_subscription_test_config.yaml'
class IntegrationOndemandTestConfig(Config):
VCLOUD_CONFIG_PATH_ENV_VAR = 'VCLOUD_INTEGRATION_TEST_CONFIG_PATH'
VCLOUD_CONFIG_PATH_DEFAULT = \
'~/vcloud_integration_ondemand_test_config.yaml'
class VcloudSubscriptionTestConfig(Config):
VCLOUD_CONFIG_PATH_ENV_VAR = 'VCLOUD_CONFIG_PATH'
VCLOUD_CONFIG_PATH_DEFAULT = '~/vcloud_config_subscription.yaml'
class VcloudOndemandTestConfig(Config):
VCLOUD_CONFIG_PATH_ENV_VAR = 'VCLOUD_CONFIG_PATH'
VCLOUD_CONFIG_PATH_DEFAULT = '~/vcloud_config_ondemand.yaml'
class TestCase(unittest.TestCase):
def __init__(self, testname):
super(TestCase, self).__init__(testname)
test_configs = {
SUBSCRIPTION: (VcloudSubscriptionTestConfig().get(),
IntegrationSubscriptionTestConfig().get()),
ONDEMAND: (VcloudOndemandTestConfig().get(),
IntegrationOndemandTestConfig().get())}
if not config:
raise RuntimeError(
"Vcloud Service type not defined."
"To define service type for tests, add one of command line key"
" to nosetest command: --tc=ondemand: --tc=subscription:")
if len(config) != 1:
raise RuntimeError("Config must contain 1 element")
self.service_type = config.keys()[0]
service_config = test_configs.get(self.service_type)
if not service_config:
raise RuntimeError(
"Unknown service_type: {0}. Parameter must one of {1}".
format(self.service_type, (SUBSCRIPTION, ONDEMAND)))
self.vcloud_config = service_config[0]
self.test_config = service_config[1]
if not self.vcloud_config:
raise RuntimeError("vcloud_config empty")
if not self.test_config:
raise RuntimeError("test_config empty")
def setUp(self):
print "\nUsed config: {0}".format(self.service_type)
fake_ctx = cfy_mocks.MockCloudifyContext(
node_id='test',
node_name='test',
properties={})
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.vca_client = VcloudAirClient().get(config=self.vcloud_config)
def _run_with_retry(self, func, ctx):
while True:
try:
return func(ctx=ctx)
except OperationRetry as e:
ctx.operation._operation_retry = None
ctx.logger.info(format(str(e)))
time.sleep(e.retry_after)
| apache-2.0 |
cloudera/Impala | tests/query_test/test_sort.py | 1 | 10722 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from copy import copy, deepcopy
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.skip import SkipIfNotHdfsMinicluster
def transpose_results(result, map_fn=lambda x: x):
"""Given a query result (list of strings, each string represents a row), return a list
of columns, where each column is a list of strings. Optionally, map_fn can be provided
to be applied to every value, eg. to convert the strings to their underlying types."""
split_result = [row.split('\t') for row in result]
return [map(map_fn, list(l)) for l in zip(*split_result)]
class TestQueryFullSort(ImpalaTestSuite):
"""Test class to do functional validation of sorting when data is spilled to disk."""
@classmethod
def get_workload(self):
return 'tpch'
@classmethod
def add_test_dimensions(cls):
super(TestQueryFullSort, cls).add_test_dimensions()
if cls.exploration_strategy() == 'core':
cls.ImpalaTestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'parquet')
def test_multiple_buffer_pool_limits(self, vector):
"""Using lineitem table forces the multi-phase sort with low buffer_pool_limit.
This test takes about a minute."""
query = """select l_comment, l_partkey, l_orderkey, l_suppkey, l_commitdate
from lineitem order by l_comment limit 100000"""
exec_option = copy(vector.get_value('exec_option'))
exec_option['disable_outermost_topn'] = 1
exec_option['num_nodes'] = 1
table_format = vector.get_value('table_format')
"""The first run should fit in memory, the second run is a 2-phase disk sort,
and the third run is a multi-phase sort (i.e. with an intermediate merge)."""
for buffer_pool_limit in ['-1', '300m', '130m']:
exec_option['buffer_pool_limit'] = buffer_pool_limit
query_result = self.execute_query(
query, exec_option, table_format=table_format)
result = transpose_results(query_result.data)
assert(result[0] == sorted(result[0]))
def test_multiple_mem_limits_full_output(self, vector):
""" Exercise a range of memory limits, returning the full sorted input. """
query = """select o_orderdate, o_custkey, o_comment
from orders
order by o_orderdate"""
exec_option = copy(vector.get_value('exec_option'))
table_format = vector.get_value('table_format')
exec_option['default_spillable_buffer_size'] = '8M'
# Minimum memory for different parts of the plan.
sort_reservation_mb = 48
if table_format.file_format == 'parquet':
scan_reservation_mb = 24
else:
scan_reservation_mb = 8
total_reservation_mb = sort_reservation_mb + scan_reservation_mb
# The below memory value assume 8M pages.
# Test with unlimited and minimum memory for all file formats.
buffer_pool_limit_values = ['-1', '{0}M'.format(total_reservation_mb)]
if self.exploration_strategy() == 'exhaustive' and \
table_format.file_format == 'parquet':
# Test some intermediate values for parquet on exhaustive.
buffer_pool_limit_values += ['128M', '256M']
for buffer_pool_limit in buffer_pool_limit_values:
exec_option['buffer_pool_limit'] = buffer_pool_limit
result = transpose_results(self.execute_query(
query, exec_option, table_format=table_format).data)
assert(result[0] == sorted(result[0]))
def test_sort_join(self, vector):
"""With minimum memory limit this should be a 1-phase sort"""
query = """select o1.o_orderdate, o2.o_custkey, o1.o_comment from orders o1 join
orders o2 on (o1.o_orderkey = o2.o_orderkey) order by o1.o_orderdate limit 100000"""
exec_option = copy(vector.get_value('exec_option'))
exec_option['disable_outermost_topn'] = 1
exec_option['mem_limit'] = "134m"
exec_option['num_nodes'] = 1
table_format = vector.get_value('table_format')
query_result = self.execute_query(query, exec_option, table_format=table_format)
assert "TotalMergesPerformed: 1" in query_result.runtime_profile
result = transpose_results(query_result.data)
assert(result[0] == sorted(result[0]))
def test_sort_union(self, vector):
query = """select o_orderdate, o_custkey, o_comment from (select * from orders union
select * from orders union all select * from orders) as i
order by o_orderdate limit 100000"""
exec_option = copy(vector.get_value('exec_option'))
exec_option['disable_outermost_topn'] = 1
exec_option['mem_limit'] = "3000m"
table_format = vector.get_value('table_format')
result = transpose_results(self.execute_query(
query, exec_option, table_format=table_format).data)
assert(result[0] == sorted(result[0]))
def test_pathological_input(self, vector):
""" Regression test for stack overflow and poor performance on certain inputs where
always selecting the middle element as a quicksort pivot caused poor performance. The
trick is to concatenate two equal-size sorted inputs. If the middle element is always
selected as the pivot (the old method), the sorter tends to get stuck selecting the
minimum element as the pivot, which results in almost all of the tuples ending up
in the right partition.
"""
query = """select l_orderkey from (
select * from lineitem limit 300000
union all
select * from lineitem limit 300000) t
order by l_orderkey"""
exec_option = copy(vector.get_value('exec_option'))
exec_option['disable_outermost_topn'] = 1
# Run with a single scanner thread so that the input doesn't get reordered.
exec_option['num_nodes'] = "1"
exec_option['num_scanner_threads'] = "1"
table_format = vector.get_value('table_format')
result = transpose_results(self.execute_query(
query, exec_option, table_format=table_format).data)
numeric_results = [int(val) for val in result[0]]
assert(numeric_results == sorted(numeric_results))
def test_spill_empty_strings(self, vector):
"""Test corner case of spilling sort with only empty strings. Spilling with var len
slots typically means the sort must reorder blocks and convert pointers, but this case
has to be handled differently because there are no var len blocks to point into."""
query = """
select empty_str, l_orderkey, l_partkey, l_suppkey,
l_linenumber, l_quantity, l_extendedprice, l_discount, l_tax
from (select substr(l_comment, 1000, 0) empty_str, * from lineitem) t
order by empty_str, l_orderkey, l_partkey, l_suppkey, l_linenumber
limit 100000
"""
exec_option = copy(vector.get_value('exec_option'))
exec_option['disable_outermost_topn'] = 1
exec_option['buffer_pool_limit'] = "256m"
exec_option['num_nodes'] = "1"
table_format = vector.get_value('table_format')
result = transpose_results(self.execute_query(
query, exec_option, table_format=table_format).data)
assert(result[0] == sorted(result[0]))
@SkipIfNotHdfsMinicluster.tuned_for_minicluster
def test_sort_reservation_usage(self, vector):
"""Tests for sorter reservation usage."""
new_vector = deepcopy(vector)
# Run with num_nodes=1 to make execution more deterministic.
new_vector.get_value('exec_option')['num_nodes'] = 1
self.run_test_case('sort-reservation-usage-single-node', new_vector)
class TestRandomSort(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional'
def test_order_by_random(self):
"""Tests that 'order by random()' works as expected."""
# "order by random()" with different seeds should produce different orderings.
seed_query = "select * from functional.alltypestiny order by random(%s)"
results_seed0 = self.execute_query(seed_query % "0")
results_seed1 = self.execute_query(seed_query % "1")
assert results_seed0.data != results_seed1.data
assert sorted(results_seed0.data) == sorted(results_seed1.data)
# Include "random()" in the select list to check that it's sorted correctly.
results = transpose_results(self.execute_query(
"select random() as r from functional.alltypessmall order by r").data,
lambda x: float(x))
assert(results[0] == sorted(results[0]))
# Like above, but with a limit.
results = transpose_results(self.execute_query(
"select random() as r from functional.alltypes order by r limit 100").data,
lambda x: float(x))
assert(results == sorted(results))
# "order by random()" inside an inline view.
query = "select r from (select random() r from functional.alltypessmall) v order by r"
results = transpose_results(self.execute_query(query).data, lambda x: float(x))
assert (results == sorted(results))
def test_analytic_order_by_random(self):
"""Tests that a window function over 'order by random()' works as expected."""
# Since we use the same random seed, the results should be returned in order.
query = """select last_value(rand(2)) over (order by rand(2)) from
functional.alltypestiny"""
results = transpose_results(self.execute_query(query).data, lambda x: float(x))
assert (results == sorted(results))
class TestPartialSort(ImpalaTestSuite):
"""Test class to do functional validation of partial sorts."""
def test_partial_sort_min_reservation(self, unique_database):
"""Test that the partial sort node can operate if it only gets its minimum
memory reservation."""
table_name = "%s.kudu_test" % unique_database
self.client.set_configuration_option(
"debug_action", "-1:OPEN:SET_DENY_RESERVATION_PROBABILITY@1.0")
self.execute_query("""create table %s (col0 string primary key)
partition by hash(col0) partitions 8 stored as kudu""" % table_name)
result = self.execute_query(
"insert into %s select string_col from functional.alltypessmall" % table_name)
assert "PARTIAL SORT" in result.runtime_profile, result.runtime_profile
| apache-2.0 |
caesar2164/edx-platform | lms/djangoapps/instructor/enrollment_report.py | 73 | 3294 | """
Defines abstract class for the Enrollment Reports.
"""
from django.contrib.auth.models import User
from student.models import UserProfile
import collections
import json
import abc
class AbstractEnrollmentReportProvider(object):
"""
Abstract interface for Detailed Enrollment Report Provider
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_enrollment_info(self, user, course_id):
"""
Returns the User Enrollment information.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_user_profile(self, user_id):
"""
Returns the UserProfile information.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_payment_info(self, user, course_id):
"""
Returns the User Payment information.
"""
raise NotImplementedError()
class BaseAbstractEnrollmentReportProvider(AbstractEnrollmentReportProvider):
"""
The base abstract class for all Enrollment Reports that can support multiple
backend such as MySQL/Django-ORM.
# don't allow instantiation of this class, it must be subclassed
"""
def get_user_profile(self, user_id):
"""
Returns the UserProfile information.
"""
user_info = User.objects.select_related('profile').get(id=user_id)
# extended user profile fields are stored in the user_profile meta column
meta = {}
if user_info.profile.meta:
meta = json.loads(user_info.profile.meta)
user_data = collections.OrderedDict()
user_data['User ID'] = user_info.id
user_data['Username'] = user_info.username
user_data['Email'] = user_info.email
user_data['Full Name'] = user_info.profile.name
user_data['First Name'] = meta.get('first-name', '')
user_data['Last Name'] = meta.get('last-name', '')
user_data['Company Name'] = meta.get('company', '')
user_data['Title'] = meta.get('title', '')
user_data['Language'] = user_info.profile.language
user_data['Country'] = user_info.profile.country
user_data['Year of Birth'] = user_info.profile.year_of_birth
user_data['Gender'] = None
gender = user_info.profile.gender
for _gender in UserProfile.GENDER_CHOICES:
if gender == _gender[0]:
user_data['Gender'] = _gender[1]
break
user_data['Level of Education'] = None
level_of_education = user_info.profile.level_of_education
for _loe in UserProfile.LEVEL_OF_EDUCATION_CHOICES:
if level_of_education == _loe[0]:
user_data['Level of Education'] = _loe[1]
user_data['Mailing Address'] = user_info.profile.mailing_address
user_data['Goals'] = user_info.profile.goals
user_data['City'] = user_info.profile.city
user_data['Country'] = user_info.profile.country
return user_data
def get_enrollment_info(self, user, course_id):
"""
Returns the User Enrollment information.
"""
raise NotImplementedError()
def get_payment_info(self, user, course_id):
"""
Returns the User Payment information.
"""
raise NotImplementedError()
| agpl-3.0 |
bpgc-cte/python2017 | Week 7/django/lib/python3.6/site-packages/django/template/library.py | 115 | 12790 | import functools
import warnings
from importlib import import_module
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.html import conditional_escape
from django.utils.inspect import getargspec
from django.utils.itercompat import is_iterable
from .base import Node, Template, token_kwargs
from .exceptions import TemplateSyntaxError
class InvalidTemplateLibrary(Exception):
pass
class Library(object):
"""
A class for registering template tags and filters. Compiled filter and
template tag functions are stored in the filters and tags attributes.
The filter, simple_tag, and inclusion_tag methods provide a convenient
way to register callables as tags.
"""
def __init__(self):
self.filters = {}
self.tags = {}
def tag(self, name=None, compile_function=None):
if name is None and compile_function is None:
# @register.tag()
return self.tag_function
elif name is not None and compile_function is None:
if callable(name):
# @register.tag
return self.tag_function(name)
else:
# @register.tag('somename') or @register.tag(name='somename')
def dec(func):
return self.tag(name, func)
return dec
elif name is not None and compile_function is not None:
# register.tag('somename', somefunc)
self.tags[name] = compile_function
return compile_function
else:
raise ValueError(
"Unsupported arguments to Library.tag: (%r, %r)" %
(name, compile_function),
)
def tag_function(self, func):
self.tags[getattr(func, "_decorated_function", func).__name__] = func
return func
def filter(self, name=None, filter_func=None, **flags):
"""
Register a callable as a template filter. Example:
@register.filter
def lower(value):
return value.lower()
"""
if name is None and filter_func is None:
# @register.filter()
def dec(func):
return self.filter_function(func, **flags)
return dec
elif name is not None and filter_func is None:
if callable(name):
# @register.filter
return self.filter_function(name, **flags)
else:
# @register.filter('somename') or @register.filter(name='somename')
def dec(func):
return self.filter(name, func, **flags)
return dec
elif name is not None and filter_func is not None:
# register.filter('somename', somefunc)
self.filters[name] = filter_func
for attr in ('expects_localtime', 'is_safe', 'needs_autoescape'):
if attr in flags:
value = flags[attr]
# set the flag on the filter for FilterExpression.resolve
setattr(filter_func, attr, value)
# set the flag on the innermost decorated function
# for decorators that need it, e.g. stringfilter
if hasattr(filter_func, "_decorated_function"):
setattr(filter_func._decorated_function, attr, value)
filter_func._filter_name = name
return filter_func
else:
raise ValueError(
"Unsupported arguments to Library.filter: (%r, %r)" %
(name, filter_func),
)
def filter_function(self, func, **flags):
name = getattr(func, "_decorated_function", func).__name__
return self.filter(name, func, **flags)
def simple_tag(self, func=None, takes_context=None, name=None):
"""
Register a callable as a compiled template tag. Example:
@register.simple_tag
def hello(*args, **kwargs):
return 'world'
"""
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
function_name = (name or getattr(func, '_decorated_function', func).__name__)
@functools.wraps(func)
def compile_func(parser, token):
bits = token.split_contents()[1:]
target_var = None
if len(bits) >= 2 and bits[-2] == 'as':
target_var = bits[-1]
bits = bits[:-2]
args, kwargs = parse_bits(
parser, bits, params, varargs, varkw, defaults,
takes_context, function_name
)
return SimpleNode(func, takes_context, args, kwargs, target_var)
self.tag(function_name, compile_func)
return func
if func is None:
# @register.simple_tag(...)
return dec
elif callable(func):
# @register.simple_tag
return dec(func)
else:
raise ValueError("Invalid arguments provided to simple_tag")
def assignment_tag(self, func=None, takes_context=None, name=None):
warnings.warn(
"assignment_tag() is deprecated. Use simple_tag() instead",
RemovedInDjango20Warning,
stacklevel=2,
)
return self.simple_tag(func, takes_context, name)
def inclusion_tag(self, filename, func=None, takes_context=None, name=None):
"""
Register a callable as an inclusion tag:
@register.inclusion_tag('results.html')
def show_results(poll):
choices = poll.choice_set.all()
return {'choices': choices}
"""
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
function_name = (name or getattr(func, '_decorated_function', func).__name__)
@functools.wraps(func)
def compile_func(parser, token):
bits = token.split_contents()[1:]
args, kwargs = parse_bits(
parser, bits, params, varargs, varkw, defaults,
takes_context, function_name,
)
return InclusionNode(
func, takes_context, args, kwargs, filename,
)
self.tag(function_name, compile_func)
return func
return dec
class TagHelperNode(Node):
"""
Base class for tag helper nodes such as SimpleNode and InclusionNode.
Manages the positional and keyword arguments to be passed to the decorated
function.
"""
def __init__(self, func, takes_context, args, kwargs):
self.func = func
self.takes_context = takes_context
self.args = args
self.kwargs = kwargs
def get_resolved_arguments(self, context):
resolved_args = [var.resolve(context) for var in self.args]
if self.takes_context:
resolved_args = [context] + resolved_args
resolved_kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()}
return resolved_args, resolved_kwargs
class SimpleNode(TagHelperNode):
def __init__(self, func, takes_context, args, kwargs, target_var):
super(SimpleNode, self).__init__(func, takes_context, args, kwargs)
self.target_var = target_var
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
output = self.func(*resolved_args, **resolved_kwargs)
if self.target_var is not None:
context[self.target_var] = output
return ''
if context.autoescape:
output = conditional_escape(output)
return output
class InclusionNode(TagHelperNode):
def __init__(self, func, takes_context, args, kwargs, filename):
super(InclusionNode, self).__init__(func, takes_context, args, kwargs)
self.filename = filename
def render(self, context):
"""
Render the specified template and context. Cache the template object
in render_context to avoid reparsing and loading when used in a for
loop.
"""
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
_dict = self.func(*resolved_args, **resolved_kwargs)
t = context.render_context.get(self)
if t is None:
if isinstance(self.filename, Template):
t = self.filename
elif isinstance(getattr(self.filename, 'template', None), Template):
t = self.filename.template
elif not isinstance(self.filename, six.string_types) and is_iterable(self.filename):
t = context.template.engine.select_template(self.filename)
else:
t = context.template.engine.get_template(self.filename)
context.render_context[self] = t
new_context = context.new(_dict)
# Copy across the CSRF token, if present, because inclusion tags are
# often used for forms, and we need instructions for using CSRF
# protection to be as simple as possible.
csrf_token = context.get('csrf_token')
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
return t.render(new_context)
def parse_bits(parser, bits, params, varargs, varkw, defaults,
takes_context, name):
"""
Parse bits for template tag helpers simple_tag and inclusion_tag, in
particular by detecting syntax errors and by extracting positional and
keyword arguments.
"""
if takes_context:
if params[0] == 'context':
params = params[1:]
else:
raise TemplateSyntaxError(
"'%s' is decorated with takes_context=True so it must "
"have a first argument of 'context'" % name)
args = []
kwargs = {}
unhandled_params = list(params)
for bit in bits:
# First we try to extract a potential kwarg from the bit
kwarg = token_kwargs([bit], parser)
if kwarg:
# The kwarg was successfully extracted
param, value = kwarg.popitem()
if param not in params and varkw is None:
# An unexpected keyword argument was supplied
raise TemplateSyntaxError(
"'%s' received unexpected keyword argument '%s'" %
(name, param))
elif param in kwargs:
# The keyword argument has already been supplied once
raise TemplateSyntaxError(
"'%s' received multiple values for keyword argument '%s'" %
(name, param))
else:
# All good, record the keyword argument
kwargs[str(param)] = value
if param in unhandled_params:
# If using the keyword syntax for a positional arg, then
# consume it.
unhandled_params.remove(param)
else:
if kwargs:
raise TemplateSyntaxError(
"'%s' received some positional argument(s) after some "
"keyword argument(s)" % name)
else:
# Record the positional argument
args.append(parser.compile_filter(bit))
try:
# Consume from the list of expected positional arguments
unhandled_params.pop(0)
except IndexError:
if varargs is None:
raise TemplateSyntaxError(
"'%s' received too many positional arguments" %
name)
if defaults is not None:
# Consider the last n params handled, where n is the
# number of defaults.
unhandled_params = unhandled_params[:-len(defaults)]
if unhandled_params:
# Some positional arguments were not supplied
raise TemplateSyntaxError(
"'%s' did not receive value(s) for the argument(s): %s" %
(name, ", ".join("'%s'" % p for p in unhandled_params)))
return args, kwargs
def import_library(name):
"""
Load a Library object from a template tag module.
"""
try:
module = import_module(name)
except ImportError as e:
raise InvalidTemplateLibrary(
"Invalid template library specified. ImportError raised when "
"trying to load '%s': %s" % (name, e)
)
try:
return module.register
except AttributeError:
raise InvalidTemplateLibrary(
"Module %s does not have a variable named 'register'" % name,
)
| mit |
dataxu/ansible | test/units/modules/remote_management/oneview/test_oneview_san_manager.py | 77 | 9119 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.compat.tests import unittest, mock
from oneview_module_loader import SanManagerModule
from hpe_test_utils import OneViewBaseTestCase
from copy import deepcopy
FAKE_MSG_ERROR = 'Fake message error'
DEFAULT_SAN_MANAGER_TEMPLATE = dict(
name='172.18.15.1',
providerDisplayName='Brocade Network Advisor',
uri='/rest/fc-sans/device-managers/UUU-AAA-BBB',
refreshState='OK',
connectionInfo=[
{
'valueFormat': 'IPAddressOrHostname',
'displayName': 'Host',
'name': 'Host',
'valueType': 'String',
'required': False,
'value': '172.18.15.1'
}]
)
class SanManagerModuleSpec(unittest.TestCase,
OneViewBaseTestCase):
PARAMS_FOR_PRESENT = dict(
config='config.json',
state='present',
data=DEFAULT_SAN_MANAGER_TEMPLATE
)
PARAMS_FOR_CONNECTION_INFORMATION_SET = dict(
config='config.json',
state='connection_information_set',
data=DEFAULT_SAN_MANAGER_TEMPLATE.copy()
)
PARAMS_WITH_CHANGES = dict(
config='config.json',
state='present',
data=dict(name=DEFAULT_SAN_MANAGER_TEMPLATE['name'],
refreshState='RefreshPending')
)
PARAMS_FOR_ABSENT = dict(
config='config.json',
state='absent',
data=dict(name=DEFAULT_SAN_MANAGER_TEMPLATE['name'])
)
def setUp(self):
self.configure_mocks(self, SanManagerModule)
self.resource = self.mock_ov_client.san_managers
def test_should_add_new_san_manager(self):
self.resource.get_by_name.return_value = []
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.resource.add.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.mock_ansible_module.params = self.PARAMS_FOR_PRESENT
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=SanManagerModule.MSG_CREATED,
ansible_facts=dict(san_manager=DEFAULT_SAN_MANAGER_TEMPLATE)
)
def test_should_find_provider_uri_to_add(self):
self.resource.get_by_name.return_value = []
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.resource.add.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.mock_ansible_module.params = self.PARAMS_FOR_PRESENT
SanManagerModule().run()
provider_display_name = DEFAULT_SAN_MANAGER_TEMPLATE['providerDisplayName']
self.resource.get_provider_uri.assert_called_once_with(provider_display_name)
def test_should_not_update_when_data_is_equals(self):
output_data = deepcopy(DEFAULT_SAN_MANAGER_TEMPLATE)
output_data.pop('connectionInfo')
self.resource.get_by_name.return_value = deepcopy(DEFAULT_SAN_MANAGER_TEMPLATE)
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.mock_ansible_module.params = self.PARAMS_FOR_PRESENT
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=SanManagerModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(san_manager=output_data)
)
def test_update_when_data_has_modified_attributes(self):
data_merged = deepcopy(DEFAULT_SAN_MANAGER_TEMPLATE)
data_merged['fabricType'] = 'DirectAttach'
self.resource.get_by_name.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.resource.update.return_value = data_merged
self.mock_ansible_module.params = self.PARAMS_WITH_CHANGES
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=SanManagerModule.MSG_UPDATED,
ansible_facts=dict(san_manager=data_merged)
)
def test_update_should_not_send_connection_info_when_not_informed_on_data(self):
merged_data = deepcopy(DEFAULT_SAN_MANAGER_TEMPLATE)
merged_data['refreshState'] = 'RefreshPending'
output_data = deepcopy(merged_data)
output_data.pop('connectionInfo')
self.resource.get_by_name.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.resource.update.return_value = merged_data
self.mock_ansible_module.params = self.PARAMS_WITH_CHANGES
SanManagerModule().run()
self.resource.update.assert_called_once_with(resource=output_data, id_or_uri=output_data['uri'])
def test_should_remove_san_manager(self):
self.resource.get_by_name.return_value = deepcopy(DEFAULT_SAN_MANAGER_TEMPLATE)
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.mock_ansible_module.params = self.PARAMS_FOR_ABSENT.copy()
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=SanManagerModule.MSG_DELETED
)
def test_should_do_nothing_when_san_manager_not_exist(self):
self.resource.get_by_name.return_value = []
self.mock_ansible_module.params = self.PARAMS_FOR_ABSENT.copy()
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=SanManagerModule.MSG_ALREADY_ABSENT
)
def test_should_fail_when_name_not_found(self):
self.resource.get_by_name.return_value = []
self.resource.get_provider_uri.return_value = None
self.mock_ansible_module.params = self.PARAMS_FOR_PRESENT
SanManagerModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(
exception=mock.ANY,
msg="The provider 'Brocade Network Advisor' was not found."
)
def test_should_fail_when_name_and_hosts_in_connectionInfo_missing(self):
bad_params = deepcopy(self.PARAMS_FOR_PRESENT)
bad_params['data'].pop('name')
bad_params['data'].pop('connectionInfo')
self.mock_ansible_module.params = bad_params
SanManagerModule().run()
msg = 'A "name" or "connectionInfo" must be provided inside the "data" field for this operation. '
msg += 'If a "connectionInfo" is provided, the "Host" name is considered as the "name" for the resource.'
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=msg)
def test_connection_information_set_should_set_the_connection_information(self):
data_merged = deepcopy(DEFAULT_SAN_MANAGER_TEMPLATE)
data_merged['fabricType'] = 'DirectAttach'
self.resource.get_by_name.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.resource.update.return_value = data_merged
self.mock_ansible_module.params = self.PARAMS_FOR_CONNECTION_INFORMATION_SET
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=SanManagerModule.MSG_UPDATED,
ansible_facts=dict(san_manager=data_merged)
)
def test_should_add_new_san_manager_when_connection_information_set_called_without_resource(self):
self.resource.get_by_name.return_value = []
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.resource.add.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.mock_ansible_module.params = self.PARAMS_FOR_CONNECTION_INFORMATION_SET
SanManagerModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=SanManagerModule.MSG_CREATED,
ansible_facts=dict(san_manager=DEFAULT_SAN_MANAGER_TEMPLATE)
)
def test_should_fail_when_required_attribute_missing(self):
bad_params = deepcopy(self.PARAMS_FOR_CONNECTION_INFORMATION_SET)
bad_params['data'] = self.PARAMS_FOR_CONNECTION_INFORMATION_SET['data'].copy()
bad_params['data'].pop('connectionInfo')
self.resource.get_by_name.return_value = DEFAULT_SAN_MANAGER_TEMPLATE
self.resource.get_provider_uri.return_value = '/rest/fc-sans/providers/123/device-managers'
self.mock_ansible_module.params = bad_params
SanManagerModule().run()
msg = 'A connectionInfo field is required for this operation.'
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=msg)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
Innovahn/cybex | addons/base_report_designer/plugin/openerp_report_designer/bin/script/NewReport.py | 384 | 3903 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import uno
import string
import unohelper
import xmlrpclib
from com.sun.star.task import XJobExecutor
if __name__<>"package":
from lib.gui import *
from lib.error import ErrorDialog
from lib.functions import *
from lib.logreport import *
from LoginTest import *
from lib.rpc import *
database="test"
uid = 3
#
#
#
# Start OpenOffice.org, listen for connections and open testing document
#
#
class NewReport(unohelper.Base, XJobExecutor):
def __init__(self, ctx):
self.ctx = ctx
self.module = "openerp_report"
self.version = "0.1"
LoginTest()
self.logobj=Logger()
if not loginstatus and __name__=="package":
exit(1)
self.win=DBModalDialog(60, 50, 180, 115, "Open New Report")
self.win.addFixedText("lblModuleSelection", 2, 2, 60, 15, "Module Selection")
self.win.addComboListBox("lstModule", -2,13,176,80 , False)
self.lstModule = self.win.getControl( "lstModule" )
self.aModuleName=[]
desktop=getDesktop()
doc = desktop.getCurrentComponent()
docinfo=doc.getDocumentInfo()
global passwd
self.password = passwd
global url
self.sock=RPCSession(url)
ids = self.sock.execute(database, uid, self.password, 'ir.model' , 'search',[])
fields = [ 'model','name']
res = self.sock.execute(database, uid, self.password, 'ir.model' , 'read', ids, fields)
res.sort(lambda x, y: cmp(x['name'],y['name']))
for i in range(len(res)):
self.lstModule.addItem(res[i]['name'],self.lstModule.getItemCount())
self.aModuleName.append(res[i]['model'])
self.win.addButton('btnOK',-2 ,-5, 70,15,'Use Module in Report' ,actionListenerProc = self.btnOk_clicked )
self.win.addButton('btnCancel',-2 - 70 - 5 ,-5, 35,15,'Cancel' ,actionListenerProc = self.btnCancel_clicked )
self.win.doModalDialog("",None)
def btnOk_clicked(self, oActionEvent):
desktop=getDesktop()
doc = desktop.getCurrentComponent()
docinfo=doc.getDocumentInfo()
docinfo.setUserFieldValue(3,self.aModuleName[self.lstModule.getSelectedItemPos()])
self.logobj.log_write('Module Name',LOG_INFO, ':Module use in creating a report %s using database %s' % (self.aModuleName[self.lstModule.getSelectedItemPos()], database))
self.win.endExecute()
def btnCancel_clicked(self, oActionEvent):
self.win.endExecute()
if __name__<>"package" and __name__=="__main__":
NewReport(None)
elif __name__=="package":
g_ImplementationHelper.addImplementation( \
NewReport,
"org.openoffice.openerp.report.opennewreport",
("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Agent007/deepchem | examples/kaggle/KAGGLE_tf_robust.py | 6 | 3237 | """
Script that trains Tensorflow Robust Multitask models on KAGGLE datasets.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import numpy as np
import tempfile
import shutil
import deepchem as dc
from deepchem.molnet import load_kaggle
###Load data###
shard_size = 2000
num_trials = 2
print("About to load KAGGLE data.")
KAGGLE_tasks, datasets, transformers = load_kaggle(shard_size=shard_size)
train_dataset, valid_dataset, test_dataset = datasets
print("Number of compounds in train set")
print(len(train_dataset))
print("Number of compounds in validation set")
print(len(valid_dataset))
print("Number of compounds in test set")
print(len(test_dataset))
n_layers = 3
n_bypass_layers = 3
nb_epoch = 100
#Use R2 classification metric
metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, task_averager=np.mean)
all_results = []
for trial in range(num_trials):
model = dc.models.RobustMultitaskRegressor(
len(KAGGLE_tasks),
train_dataset.get_data_shape()[0],
layer_sizes=[2000, 1000, 1000],
bypass_layer_sizes=[200] * n_bypass_layers,
dropouts=[.25] * n_layers,
bypass_dropouts=[.25] * n_bypass_layers,
weight_init_stddevs=[.02] * n_layers,
bias_init_consts=[1.] * n_layers,
bypass_weight_init_stddevs=[.02] * n_bypass_layers,
bypass_bias_init_consts=[1.] * n_bypass_layers,
learning_rate=.00003,
weight_decay_penalty=.0004,
weight_decay_penalty_type="l2",
batch_size=100)
print("Fitting Model")
model.fit(train_dataset, nb_epoch=nb_epoch)
print("Evaluating models")
train_score, train_task_scores = model.evaluate(
train_dataset, [metric], transformers, per_task_metrics=True)
valid_score, valid_task_scores = model.evaluate(
valid_dataset, [metric], transformers, per_task_metrics=True)
test_score, test_task_scores = model.evaluate(
test_dataset, [metric], transformers, per_task_metrics=True)
all_results.append((train_score, train_task_scores, valid_score,
valid_task_scores, test_score, test_task_scores))
print("Scores for trial %d" % trial)
print("----------------------------------------------------------------")
print("train_task_scores")
print(train_task_scores)
print("Mean Train score")
print(train_score)
print("valid_task_scores")
print(valid_task_scores)
print("Mean Validation score")
print(valid_score)
print("test_task_scores")
print(test_task_scores)
print("Mean Test score")
print(test_score)
print("####################################################################")
for trial in range(num_trials):
(train_score, train_task_scores, valid_score, valid_task_scores, test_score,
test_task_scores) = all_results[trial]
print("Scores for trial %d" % trial)
print("----------------------------------------------------------------")
print("train_task_scores")
print(train_task_scores)
print("Mean Train score")
print(train_score)
print("valid_task_scores")
print(valid_task_scores)
print("Mean Validation score")
print(valid_score)
print("test_task_scores")
print(test_task_scores)
print("Mean Test score")
print(test_score)
| mit |
tedder/ansible | test/units/modules/network/onyx/test_onyx_l2_interface.py | 68 | 5130 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.onyx import onyx_l2_interface
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxInterfaceModule(TestOnyxModule):
module = onyx_l2_interface
def setUp(self):
super(TestOnyxInterfaceModule, self).setUp()
self.mock_get_config = patch.object(
onyx_l2_interface.OnyxL2InterfaceModule, "_get_switchport_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_version = patch.object(
onyx_l2_interface.OnyxL2InterfaceModule, "_get_os_version")
self.get_version = self.mock_get_version.start()
def tearDown(self):
super(TestOnyxInterfaceModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
config_file = 'onyx_l2_interface_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
self.get_version.return_value = "3.6.5000"
def test_access_vlan_no_change(self):
set_module_args(dict(name='Eth1/11', access_vlan=1))
self.execute_module(changed=False)
def test_trunk_vlans_no_change(self):
set_module_args(dict(name='Eth1/10', mode='hybrid', access_vlan=1,
trunk_allowed_vlans=[10]))
self.execute_module(changed=False)
def test_access_vlan_change(self):
set_module_args(dict(name='Eth1/11', access_vlan=10))
commands = ['interface ethernet 1/11', 'switchport access vlan 10',
'exit']
self.execute_module(changed=True, commands=commands)
def test_trunk_vlan_change(self):
set_module_args(dict(name='Eth1/10', mode='hybrid', access_vlan=1,
trunk_allowed_vlans=[11]))
commands = ['interface ethernet 1/10',
'switchport hybrid allowed-vlan remove 10',
'switchport hybrid allowed-vlan add 11', 'exit']
self.execute_module(changed=True, commands=commands)
def test_trunk_vlan_add(self):
set_module_args(dict(name='Eth1/10', mode='hybrid', access_vlan=1,
trunk_allowed_vlans=[10, 11]))
commands = ['interface ethernet 1/10',
'switchport hybrid allowed-vlan add 11', 'exit']
self.execute_module(changed=True, commands=commands)
def test_switch_port_access(self):
set_module_args(dict(name='Eth1/12', mode='access', access_vlan=11))
commands = ['interface ethernet 1/12', 'switchport mode access',
'switchport access vlan 11', 'exit']
self.execute_module(changed=True, commands=commands)
def test_switch_port_trunk(self):
set_module_args(dict(name='Eth1/12', mode='trunk',
trunk_allowed_vlans=[11]))
commands = ['interface ethernet 1/12', 'switchport mode trunk',
'switchport trunk allowed-vlan add 11', 'exit']
self.execute_module(changed=True, commands=commands)
def test_switch_port_hybrid(self):
set_module_args(dict(name='Eth1/12', mode='hybrid', access_vlan=10,
trunk_allowed_vlans=[11]))
commands = ['interface ethernet 1/12', 'switchport mode hybrid',
'switchport access vlan 10',
'switchport hybrid allowed-vlan add 11', 'exit']
self.execute_module(changed=True, commands=commands)
def test_aggregate(self):
aggregate = list()
aggregate.append(dict(name='Eth1/10'))
aggregate.append(dict(name='Eth1/12'))
set_module_args(dict(aggregate=aggregate, access_vlan=10))
commands = ['interface ethernet 1/10', 'switchport mode access',
'switchport access vlan 10', 'exit',
'interface ethernet 1/12', 'switchport mode access',
'switchport access vlan 10', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
| gpl-3.0 |
r-praveen-jain/paparazzi | sw/ground_segment/python/real_time_plot/plotpanel.py | 50 | 14196 | from __future__ import absolute_import, print_function, division
import wx
from ivy.std_api import *
import logging
from textdroptarget import *
import math
import random
import sys
import os
import messagepicker
sys.path.append(os.getenv("PAPARAZZI_HOME") + "/sw/lib/python")
import messages_xml_map
class plot_data:
def __init__(self, ivy_msg_id, title, width, color = None):
self.id = ivy_msg_id
self.title = title
self.SetPlotSize(width)
self.x_min = 1e32
self.x_max = 1e-32
self.avg = 0.0
self.std_dev = 0.0
self.real_time = False
self.scale = 1.0
self.offset = 0.0
if (color != None):
self.color = color
else:
r,g,b = random.randint(0,255),random.randint(0,255),random.randint(0,255)
self.color = wx.Color(r,g,b)
def SetRealTime(self, value):
self.real_time = value
def SetOffset(self, value):
self.offset = value
def SetScale(self, value):
self.scale = value
def SetPlotSize(self, size):
self.size = size
self.index = size-1 # holds the index of the next point to add and the first point to draw
self.data = [] # holds the list of points to plot
for i in range(size):
self.data.append(None)
self.avg = 0.0
self.std_dev = 0.0
def AddPoint(self, point, x_axis):
self.data[self.index] = point
if self.real_time or (x_axis != None):
self.index = (self.index + 1) % self.size # increment index to next point
self.data[self.index] = None
def DrawTitle(self, dc, margin, width, height):
text ='avg:%.2f std:%.2f %s' % (self.avg, self.std_dev, self.title)
(w,h) = dc.GetTextExtent(text)
dc.SetBrush(wx.Brush(self.color))
dc.DrawRectangle( width-h-margin, height, h, h)
dc.DrawText(text, width-2*margin-w-h, height)
return h
def DrawCurve(self, dc, width, height, margin, _max_, _min_, x_axis):
if width != self.size:
self.SetPlotSize(width)
return
if (not self.real_time) and (x_axis == None):
self.index = (self.index + 1) % self.size # increment index to next point
self.data[self.index] = None
if x_axis != None:
(x_min, x_max) = x_axis.GetXMinMax()
dc.SetPen(wx.Pen(self.color,1))
if _max_ < _min_:
(_min_, _max_) = (-1,1) #prevent divide by zero or inversion
if _max_ == _min_:
(_min_, _max_) = (_max_-0.5, _max_+0.5)
delta = _max_-_min_
dy = (height - margin*2) / delta
n = 0
sums = 0.0
sum_squares = 0.0
lines = []
point_1 = None
for i in range(self.size):
ix = (i+self.index) % self.size
point = self.data[ix]
if point == None:
continue
n += 1
sums = sums + point
sum_squares = sum_squares + (point*point)
if x_axis != None:
x = x_axis.data[ix]
if x == None:
continue
dx = (width-1) / (x_max-x_min)
x = int((x-x_min) * dx)
else:
x = i * width / self.size
scaled_point = (point + self.offset) * self.scale
y = height - margin - int((scaled_point - _min_)*dy)
if point_1 != None:
line = (point_1[0], point_1[1], x, y)
lines.append( line)
point_1 = (x,y)
dc.DrawLineList(lines)
if n > 0:
self.avg = sums / n
self.std_dev = math.sqrt(math.fabs((sum_squares / n) - (self.avg * self.avg)))
def GetXMinMax(self):
x_min = 1e32
x_max = -1e32
for i in range(self.size):
point = self.data[i]
if point == None: continue
x_min = min( x_min, point)
x_max = max( x_max, point)
if x_max < x_min:
(x_min, x_max) = (-1,1) #prevent divide by zero or inversion
if x_max == x_min:
(x_min, x_max) = (x_max-0.5, x_max+0.5)
self.x_max = x_max
self.x_min = x_min
return (x_min, x_max)
_IVY_APPNAME='JobyPlot'
_IVY_STRING = '(%s %s .*$)'
#_IVY_STRING = '^([^ ]*) +(%s( .*|$))' ## <-- from original ocaml (doesn't work here, just returns Sender field...)
def create(parent, frame):
return PlotPanel(parent, frame)
class PlotPanel():
def __init__(self, parent, frame):
self.parent = parent # we are drawing on our parent, so dc comes from this
self.frame = frame # the frame owns any controls we might need to update
parent.SetDropTarget( TextDropTarget(self)) # calls self.OnDropText when drag and drop complete
self.InitIvy()
self.width = 800
self.height = 200
self.margin = min(self.height / 10, 20)
self.font = wx.Font(self.margin/2, wx.DEFAULT, wx.FONTFLAG_DEFAULT, wx.FONTWEIGHT_NORMAL)
self.pixmap = wx.EmptyBitmap(self.width, self.height)
self.plot_size = self.width
self.max = -1e32
self.min = 1e32
self.plot_interval = 200
self.plots = {}
self.auto_scale = True
self.offset = 0.0
self.scale = 1.0
self.x_axis = None
messages_xml_map.ParseMessages()
# start the timer
self.timer = wx.FutureCall( self.plot_interval, self.OnTimer)
def SetPlotInterval(self, value):
self.plot_interval = value
self.timer.Restart(self.plot_interval)
self.timer = wx.FutureCall( self.plot_interval, self.OnTimer)
def SetAutoScale(self, value):
self.auto_scale = value
def SetMin(self, value):
self.min = value
def SetMax(self, value):
self.max = value
def Pause(self, pause):
if pause:
self.timer.Stop()
else:
self.timer = wx.FutureCall( self.plot_interval, self.OnTimer)
def ResetScale(self):
self.max = -1e32
self.min = 1e32
def OnClose(self):
self.timer.Stop()
IvyStop()
def OnErase(self, event):
pass
def ShowMessagePicker(self, parent):
frame = messagepicker.MessagePicker(parent, self.BindCurve, False)
frame.Show()
def InitIvy(self):
# initialising the bus
IvyInit(_IVY_APPNAME, # application name for Ivy
"",#"[%s is ready]" % IVYAPPNAME, # ready message
0, # main loop is local (ie. using IvyMainloop)
lambda x,y: y, # handler called on connection/deconnection
lambda x,y: y # handler called when a diemessage is received
)
# starting the bus
# Note: env variable IVYBUS will be used if no parameter or empty string
# is given ; this is performed by IvyStart (C)
try:
logging.getLogger('Ivy').setLevel(logging.WARN)
IvyStart("")
# binding to every message
#IvyBindMsg(self.OnIvyMsg, "(.*)")
except:
IvyStop()
def OnDropText(self, data):
[ac_id, category, message, field] = data.encode('ASCII').split(':')
self.BindCurve(int(ac_id), message, field)
def OnIvyMsg(self, agent, *larg):
#print(larg[0])
data = larg[0].split(' ')
ac_id = int(data[0])
message = data[1]
if ac_id not in self.plots:
return
if message not in self.plots[ac_id]:
return
for field in self.plots[ac_id][message]:
plot = self.plots[ac_id][message][field]
ix = messages_xml_map.message_dictionary["telemetry"][message].index(field)
point = float(data[ix+2])
if self.x_axis == None or self.x_axis.id != plot.id:
if self.auto_scale:
scaled_point = (point + plot.offset) * plot.scale
self.max = max( self.max, scaled_point)
self.min = min( self.min, scaled_point)
if self.x_axis != None:
plot.index = self.x_axis.index
plot.AddPoint(point, self.x_axis)
def BindCurve(self, ac_id, message, field, color = None, use_as_x = False):
# -- add this telemetry to our list of things to plot ...
message_string = _IVY_STRING % (ac_id, message)
#print('Binding to %s' % message_string)
if ac_id not in self.plots:
self.plots[ac_id] = {}
if message not in self.plots[ac_id]:
self.plots[ac_id][message] = {}
if field in self.plots[ac_id][message]:
self.plots[ac_id][message][field].color = wx.Color(random.randint(0,255),random.randint(0,255),random.randint(0,255))
return
ivy_id = IvyBindMsg(self.OnIvyMsg, str(message_string))
title = '%i:%s:%s' % (ac_id, message, field)
self.plots[ac_id][message][field] = plot_data( ivy_id, title, self.plot_size, color)
self.frame.AddCurve( ivy_id, title, use_as_x)
if (use_as_x):
self.x_axis = self.plots[ac_id][message][field]
def CalcMinMax(self, plot):
if not self.auto_scale: return
for x in plot.data:
self.max = max(self.max, x)
self.min = min(self.min, x)
self.frame.SetMinMax(self.min, self.max)
def FindPlotName(self, ivy_id):
for ac_id in self.plots:
for msg in self.plots[ac_id]:
for field in self.plots[ac_id][msg]:
if self.plots[ac_id][msg][field].id == ivy_id:
return (ac_id, msg, field)
return (None, None, None)
def FindPlot(self, ivy_id):
(ac_id, msg, field) = self.FindPlotName( ivy_id)
if (ac_id == None):
return None
return self.plots[ac_id][msg][field]
def RemovePlot(self, ivy_id):
(ac_id, msg, field) = self.FindPlotName( ivy_id)
if ac_id == None:
return
if (self.x_axis != None) and (self.x_axis.id == ivy_id):
self.x_axis = None
IvyUnBindMsg( ivy_id)
del self.plots[ac_id][msg][field]
if len(self.plots[ac_id][msg]) == 0:
del self.plots[ac_id][msg]
def OffsetPlot(self, ivy_id, offset):
plot = self.FindPlot( ivy_id)
if plot == None:
return
plot.SetOffset(offset)
print('panel value: %.2f' % value)
CalcMinMax(plot)
def ScalePlot(self, ivy_id, offset):
plot = self.FindPlot( ivy_id)
if plot == None:
return
plot.SetScale(offset)
CalcMinMax(plot)
def SetRealTime(self, ivy_id, value):
plot = self.FindPlot( ivy_id)
if plot == None:
return
plot.SetRealTime(value)
def SetXAxis(self, ivy_id):
plot = self.FindPlot( ivy_id)
if plot == None:
return
self.x_axis = plot
def ClearXAxis(self):
self.x_axis = None
def OnSize(self, size):
(width, height) = size
if( self.width == width and self.height == height):
return
self.pixmap = wx.EmptyBitmap(width, height)
self.width = width
self.height = height
self.plot_size = width
self.margin = min(self.height / 10, 20)
self.font = wx.Font(self.margin/2, wx.DEFAULT, wx.FONTFLAG_DEFAULT, wx.FONTWEIGHT_NORMAL)
def OnTimer(self):
self.timer.Restart(self.plot_interval)
self.frame.SetMinMax(self.min, self.max)
self.DrawFrame()
def DrawFrame(self):
dc = wx.ClientDC(self.parent)
bdc = wx.BufferedDC( dc, self.pixmap)
bdc.SetBackground( wx.Brush( "White"))
bdc.Clear()
self.DrawBackground(bdc, self.width, self.height)
title_y = 2
for ac_id in self.plots:
for message in self.plots[ac_id]:
for field in self.plots[ac_id][message]:
plot = self.plots[ac_id][message][field]
if (self.x_axis != None) and (self.x_axis.id == plot.id):
continue
title_height = plot.DrawTitle(bdc, 2, self.width, title_y)
plot.DrawCurve(bdc, self.width, self.height, self.margin, self.max, self.min, self.x_axis)
title_y += title_height + 2
def DrawBackground(self, dc, width, height):
# Time Graduations
dc.SetFont(self.font)
if self.x_axis == None:
t = self.plot_interval * width
t1 = "0.0s"
t2 = "-%.1fs" % (t/2000.0)
t3 = "-%.1fs" % (t/1000.0)
else:
x_max = self.x_axis.x_max
x_min = self.x_axis.x_min
t1 = "%.2f" % x_max
t2 = "%.2f" % (x_min + (x_max-x_min)/2.0)
t3 = "%.2f" % x_min
(w,h) = dc.GetTextExtent(t1)
dc.DrawText(t1, width-w, height-h)
#(w,h) = dc.GetTextExtent(t2) #save time since h will be the same
dc.DrawText(t2, width/2, height-h)
#(w,h) = dc.GetTextExtent(t3) #save time since h will be the same
dc.DrawText(t3, 0, height-h)
# Y graduations
if self.max == -1e32:
return
(_min_, _max_) = (self.min, self.max)
if _max_ < _min_: #prevent divide by zero or inversion
(_min_, _max_) = (-1, 1)
if _max_ == _min_:
(_min_, _max_) = (_max_-0.5, _max_+0.5)
delta = _max_-_min_
dy = (height - self.margin*2) / delta
scale = math.log10( delta)
d = math.pow(10.0, math.floor(scale))
u = d
if delta < 2*d:
u=d/5
elif delta < 5*d:
u=d/2
tick_min =_min_ - math.fmod(_min_, u)
for i in range( int(delta/u) + 1):
tick = tick_min + float(i)*u
s = str(tick)
(w,h) = dc.GetTextExtent(s)
y = height-self.margin-int((tick-_min_)*dy)-h/2
dc.DrawText(s, 0, y)
| gpl-2.0 |
pwaller/pgi | pgi/properties.py | 1 | 8841 | # Copyright 2012,2013 Christoph Reiter
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
from warnings import warn
import ctypes
from .clib import gobject
from .clib.gobject import GValue, GValuePtr, G_TYPE_FROM_INSTANCE
from .clib.gobject import GObjectClassPtr
from .clib.gir import GIInfoType, GITypeTag
from .util import escape_parameter, unescape_parameter, InfoIterWrapper
from .util import import_attribute, set_gvalue_from_py, decode_return
from .gtype import PGType
from ._compat import PY3
PROPS_NAME = "props"
class GParamSpec(object):
_spec = None
_info = None
def __init__(self, spec, name, info):
assert spec # null ptr check
spec.ref()
self._spec = spec
self._info = info
self._name = name
@property
@decode_return()
def name(self):
return self._name
@property
def __gtype__(self):
return G_TYPE_FROM_INSTANCE(self._spec.contents.g_type_instance)
@property
def flags(self):
return self._spec.contents.flags.value
@property
def nick(self):
return self._spec.nick
@property
@decode_return()
def blurb(self):
return self._spec.blurb
@property
def owner_type(self):
return PGType(self._spec.contents.owner_type)
@property
def value_type(self):
return PGType(self._spec.contents.value_type)
@property
def default_value(self):
from pgi.repository import GObject
gvalue = GObject.Value()
gvalue.init(self.value_type)
self._spec.set_default(ctypes.cast(gvalue._obj, GValuePtr))
return gvalue.get_value()
def __repr__(self):
return "<%s %r>" % (self.__gtype__.name, self.name)
class Property(object):
def __init__(self, spec):
self.__spec = spec
type_ = spec._info.get_type()
self.__tag = type_.tag.value
self.__interface = False
if self.__tag == GITypeTag.INTERFACE:
iface_info = type_.get_interface()
self.__tag = iface_info.type.value
name = iface_info.name
namespace = iface_info.namespace
self.__iclass = import_attribute(namespace, name)
self.__interface = True
self.__value_type = spec.value_type._type.value
def __get__(self, instance, owner):
ptr = GValuePtr(GValue())
ptr.init(self.__value_type)
tag = self.__tag
func = None
if not self.__interface:
if tag == GITypeTag.UTF8:
func = lambda: ptr.string
elif tag == GITypeTag.INT32:
func = lambda: ptr.int
elif tag == GITypeTag.BOOLEAN:
func = lambda: ptr.boolean
elif tag == GITypeTag.FLOAT:
func = lambda: ptr.float
else:
if tag == GIInfoType.ENUM:
func = lambda: self.__iclass(ptr.enum)
elif tag == GIInfoType.OBJECT:
def func():
adr = ptr.get_object()
if adr:
new = object.__new__(self.__iclass)
new._obj = ptr.get_object()
return new
else:
return None
if func is None:
ptr.unset()
name = self.__spec.name
warn("Property %r unhandled. Type not supported" % name, Warning)
return None
if not instance._object:
ptr.unset()
raise TypeError("Object not initialized")
name = self.__spec.name
if PY3:
name = name.encode("ascii")
gobject.get_property(instance._object, name, ptr)
return func()
def __set__(self, instance, value):
ptr = GValuePtr(GValue())
ptr.init(self.__value_type)
tag = self.__tag
if not set_gvalue_from_py(ptr, self.__interface, tag, value):
ptr.unset()
name = self.__spec.name
warn("Property %r unhandled. Type not supported" % name, Warning)
return
name = self.__spec.name
if PY3:
name = name.encode("ascii")
gobject.set_property(instance._object, name, ptr)
class _GProps(object):
def __init__(self, name, instance):
self.__name = name
self.__instance = instance
@property
def _object(self):
return self.__instance._obj
def __repr__(self):
text = (self.__instance and "instance ") or ""
return "<GProps of %s%r>" % (text, self.__name)
class PropertyIterWrapper(InfoIterWrapper):
def _get_count(self, source):
return source.n_properties
def _get_info(self, source, index):
return source.get_property(index)
def _get_name(self, info):
return info.name
class _ObjectClassProp(object):
def __init__(self, info, owner):
self._info = info
self._wrapper = PropertyIterWrapper(info)
self._owner = owner
def __get_base_props(self):
for base in self._owner.__mro__:
props = getattr(base, PROPS_NAME, None)
if not props or props is self:
continue
yield props
def __dir__(self):
names = [escape_parameter(n) for n in self._wrapper.iternames()]
for props in self.__get_base_props():
names.extend(
[escape_parameter(n) for n in props._wrapper.iternames()])
base = dir(self.__class__)
return list(set(base + names))
def __getattr__(self, name):
for props in self.__get_base_props():
try:
return getattr(props, name)
except AttributeError:
pass
info = self._info
gname = unescape_parameter(name)
prop_info = self._wrapper.lookup_name(gname)
if PY3:
gname = gname.encode("ascii")
if prop_info:
gtype = info.g_type
if info.type.value == GIInfoType.OBJECT:
klass = gtype.class_ref()
klass = ctypes.cast(klass, GObjectClassPtr)
spec = klass.find_property(gname)
gtype.class_unref(klass)
else:
iface = gtype.default_interface_ref()
spec = iface.find_property(gname)
gtype.default_interface_unref(iface)
if not spec: # FIXME: why can this be the case?
raise AttributeError
gspec = GParamSpec(spec, gname, prop_info)
setattr(self, name, gspec)
return gspec
raise AttributeError
class _PropsDescriptor(object):
__cache = None
__cls_cache = None
def __init__(self, info):
self._info = info
def __get_gparam_spec(self, owner):
if not self.__cache:
cls_dict = dict(_ObjectClassProp.__dict__)
bases = (object,)
self.__cache = type("GProps", bases, cls_dict)(self._info, owner)
return self.__cache
def __get_gprops_class(self, specs):
if not self.__cls_cache:
cls = _GProps
cls_dict = dict(cls.__dict__)
for key in (p for p in dir(specs) if not p.startswith("_")):
spec = getattr(specs, key)
cls_dict[key] = Property(spec)
self.__cls_cache = type("GProps", cls.__bases__, cls_dict)
return self.__cls_cache
def __get__(self, instance, owner):
specs = self.__get_gparam_spec(owner)
if not instance:
return specs
gprops = self.__get_gprops_class(specs)
attr = gprops(self._info.name, instance)
setattr(instance, PROPS_NAME, attr)
return attr
def list_properties(cls):
"""list_properties(cls: GObject.Object or GObject.GInterface or GObejct.GType) -> [GObject.ParamSpec]
Takes a GObject/GInterface subclass or a GType and returns a list of
GParamSpecs of all properties.
"""
if isinstance(cls, PGType):
cls = cls.pytype
from pgi.obj import Object, InterfaceBase
if not issubclass(cls, (Object, InterfaceBase)):
raise TypeError("Must be a subclass of %s or %s" %
(Object.__name__, InterfaceBase.__name__))
gparams = []
for key in dir(cls.props):
if not key.startswith("_"):
gparams.append(getattr(cls.props, key))
return gparams
def PropertyAttribute(obj_info):
cls = _PropsDescriptor
cls_dict = dict(cls.__dict__)
return type(obj_info.name + "Props", cls.__bases__, cls_dict)(obj_info)
| lgpl-2.1 |
Scan-o-Matic/scanomatic | setup.py | 1 | 2375 | #!/usr/bin/env python
from __future__ import absolute_import
import os
from setuptools import find_packages, setup
from scanomatic import get_version
setup(
name="Scan-o-Matic",
version=get_version(),
description="High Throughput Solid Media Image Phenotyping Platform",
long_description="""Scan-o-Matic is a high precision phenotyping platform
that uses scanners to obtain images of yeast colonies growing on solid
substrate.
The package contains a user interface as well as an extensive package
for yeast colony analysis from scanned images.
""",
author="Martin Zackrisson",
author_email="martin.zackrisson@molflow.com",
url="https://github.com/Scan-o-Matic/scanomatic",
packages=find_packages(include=['scanomatic*']),
package_data={
"scanomatic": [
'ui_server_data/*.html',
'ui_server_data/js/*.js',
'ui_server_data/js/external/*.js',
'ui_server_data/style/*.css',
'ui_server_data/fonts/*',
'ui_server/templates/*',
'images/*',
'util/birds.txt',
'util/adjectives.txt',
],
'scanomatic.data': [
'migrations/env.py',
'migrations/alembic.ini',
'migrations/versions/*.py',
],
},
scripts=[
os.path.join("scripts", p) for p in [
"scan-o-matic_migrate",
"scan-o-matic_server",
]
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Application :: GTK',
'Environment :: Console',
'Intended Autdience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
install_requires=[
'alembic',
'chardet',
'enum34',
'flask',
'flask-restful',
'future',
'matplotlib',
'numpy',
'pandas',
'pillow',
'prometheus-client',
'psutil',
'psycopg2-binary',
'pytz',
'requests',
'scikit-image',
'scipy',
'setproctitle',
'sqlalchemy',
'xlrd',
],
)
| gpl-3.0 |
shanzhenren/ClusType | candidate_generation/EntityExtraction/HeapDictionary2.py | 2 | 2844 | __author__ = 'xiang'
import collections
def doc(s):
if hasattr(s, '__call__'):
s = s.__doc__
def f(g):
g.__doc__ = s
return g
return f
class heapdict(collections.MutableMapping):
__marker = object()
@staticmethod
def _parent(i):
return ((i - 1) >> 1)
@staticmethod
def _left(i):
return ((i << 1) + 1)
@staticmethod
def _right(i):
return ((i+1) << 1)
def __init__(self, *args, **kw):
self.heap = []
self.d = {}
self.update(*args, **kw)
@doc(dict.clear)
def clear(self):
self.heap.clear()
self.d.clear()
@doc(dict.__setitem__)
def __setitem__(self, key, value):
if key in self.d:
self.pop(key)
wrapper = [value, key, len(self)]
self.d[key] = wrapper
self.heap.append(wrapper)
self._decrease_key(len(self.heap)-1)
def _min_heapify(self, i):
l = self._left(i)
r = self._right(i)
n = len(self.heap)
if l < n and self.heap[l][0] < self.heap[i][0]:
low = l
else:
low = i
if r < n and self.heap[r][0] < self.heap[low][0]:
low = r
if low != i:
self._swap(i, low)
self._min_heapify(low)
def _decrease_key(self, i):
while i:
parent = self._parent(i)
if self.heap[parent][0] < self.heap[i][0]: break
self._swap(i, parent)
i = parent
def _swap(self, i, j):
self.heap[i], self.heap[j] = self.heap[j], self.heap[i]
self.heap[i][2] = i
self.heap[j][2] = j
@doc(dict.__delitem__)
def __delitem__(self, key):
wrapper = self.d[key]
while wrapper[2]:
parentpos = self._parent(wrapper[2])
parent = self.heap[parentpos]
self._swap(wrapper[2], parent[2])
self.popitem()
@doc(dict.__getitem__)
def __getitem__(self, key):
return self.d[key][0]
@doc(dict.__iter__)
def __iter__(self):
return iter(self.d)
def popitem(self):
"""D.popitem() -> (k, v), remove and return the (key, value) pair with lowest\nvalue; but raise KeyError if D is empty."""
wrapper = self.heap[0]
if len(self.heap) == 1:
self.heap.pop()
else:
self.heap[0] = self.heap.pop(-1)
self.heap[0][2] = 0
self._min_heapify(0)
del self.d[wrapper[1]]
return wrapper[1], wrapper[0]
@doc(dict.__len__)
def __len__(self):
return len(self.d)
def peekitem(self):
"""D.peekitem() -> (k, v), return the (key, value) pair with lowest value;\n but raise KeyError if D is empty."""
return (self.heap[0][1], self.heap[0][0])
del doc
__all__ = ['heapdict'] | gpl-3.0 |
titiushko/readthedocs.org | readthedocs/projects/version_handling.py | 8 | 5152 | from collections import defaultdict
from packaging.version import Version
from packaging.version import InvalidVersion
from readthedocs.builds.constants import LATEST_VERBOSE_NAME
from readthedocs.builds.constants import STABLE_VERBOSE_NAME
def get_major(version):
return version._version.release[0]
def get_minor(version):
try:
return version._version.release[1]
except IndexError:
return 0
class VersionManager(object):
def __init__(self):
self._state = defaultdict(lambda: defaultdict(list))
def add(self, version):
self._state[get_major(version)][get_minor(version)].append(version)
def prune_major(self, num_latest):
all_keys = sorted(set(self._state.keys()))
major_keep = []
for to_keep in range(num_latest):
if len(all_keys) > 0:
major_keep.append(all_keys.pop(-1))
for to_remove in all_keys:
del self._state[to_remove]
def prune_minor(self, num_latest):
for major, minors in self._state.items():
all_keys = sorted(set(minors.keys()))
minor_keep = []
for to_keep in range(num_latest):
if len(all_keys) > 0:
minor_keep.append(all_keys.pop(-1))
for to_remove in all_keys:
del self._state[major][to_remove]
def prune_point(self, num_latest):
for major, minors in self._state.items():
for minor in minors.keys():
try:
self._state[major][minor] = sorted(set(self._state[major][minor]))[-num_latest:]
except TypeError, e:
# Raise these for now.
raise
def get_version_list(self):
versions = []
for major_val in self._state.values():
for version_list in major_val.values():
versions.extend(version_list)
versions = sorted(versions)
return [
version.public
for version in versions
if not version.is_prerelease]
def version_windows(versions, major=1, minor=1, point=1):
# TODO: This needs some documentation on how VersionManager etc works and
# some examples what the expected outcome is.
version_identifiers = []
for version_string in versions:
try:
version_identifiers.append(Version(version_string))
except InvalidVersion:
pass
major_version_window = major
minor_version_window = minor
point_version_window = point
manager = VersionManager()
for v in version_identifiers:
manager.add(v)
manager.prune_major(major_version_window)
manager.prune_minor(minor_version_window)
manager.prune_point(point_version_window)
return manager.get_version_list()
def parse_version_failsafe(version_string):
try:
return Version(version_string)
except InvalidVersion:
return None
def comparable_version(version_string):
"""This can be used as ``key`` argument to ``sorted``.
The ``LATEST`` version shall always beat other versions in comparision.
``STABLE`` should be listed second. If we cannot figure out the version
number then we still assume it's bigger than all other versions since we
cannot predict what it is."""
comparable = parse_version_failsafe(version_string)
if not comparable:
if version_string == LATEST_VERBOSE_NAME:
comparable = Version('99999.0')
elif version_string == STABLE_VERBOSE_NAME:
comparable = Version('9999.0')
else:
comparable = Version('999.0')
return comparable
def sort_versions(version_list):
"""Takes a list of ``Version`` models and return a sorted list,
The returned value is a list of two-tuples. The first is the actual
``Version`` model instance, the second is an instance of
``packaging.version.Version``. They are ordered in descending order (latest
version first).
"""
versions = []
for version_obj in version_list:
version_slug = version_obj.verbose_name
comparable_version = parse_version_failsafe(version_slug)
if comparable_version:
versions.append((version_obj, comparable_version))
return list(sorted(
versions,
key=lambda version_info: version_info[1],
reverse=True))
def highest_version(version_list, version_test=None):
versions = sort_versions(version_list)
if versions:
return versions[0]
else:
return (None, None)
def determine_stable_version(version_list):
"""
Takes a list of ``Version`` model instances and returns the version
instance which can be considered the most recent stable one. It will return
``None`` if there is no stable version in the list.
"""
versions = sort_versions(version_list)
versions = [
(version_obj, comparable)
for version_obj, comparable in versions
if not comparable.is_prerelease]
if versions:
version_obj, comparable = versions[0]
return version_obj
else:
return None
| mit |
runt18/mojo | third_party/cython/src/Cython/Plex/Errors.py | 104 | 1120 | #=======================================================================
#
# Python Lexical Analyser
#
# Exception classes
#
#=======================================================================
class PlexError(Exception):
message = ""
class PlexTypeError(PlexError, TypeError):
pass
class PlexValueError(PlexError, ValueError):
pass
class InvalidRegex(PlexError):
pass
class InvalidToken(PlexError):
def __init__(self, token_number, message):
PlexError.__init__(self, "Token number %d: %s" % (token_number, message))
class InvalidScanner(PlexError):
pass
class AmbiguousAction(PlexError):
message = "Two tokens with different actions can match the same string"
def __init__(self):
pass
class UnrecognizedInput(PlexError):
scanner = None
position = None
state_name = None
def __init__(self, scanner, state_name):
self.scanner = scanner
self.position = scanner.get_position()
self.state_name = state_name
def __str__(self):
return ("'%s', line %d, char %d: Token not recognised in state %s"
% (self.position + (repr(self.state_name),)))
| bsd-3-clause |
gazpachoking/Flexget | flexget/components/trakt/db.py | 4 | 42551 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, division, absolute_import, print_function
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
import time
from datetime import datetime, timedelta
from dateutil.parser import parse as dateutil_parse
from sqlalchemy import Table, Column, Integer, String, Unicode, Date, DateTime, Time, or_, and_
from sqlalchemy.orm import relation
from sqlalchemy.schema import ForeignKey
from flexget import db_schema
from flexget import plugin
from flexget.terminal import console
from flexget.manager import Session
from flexget.utils import requests
from flexget.utils.database import json_synonym
from flexget.utils.tools import split_title_year
Base = db_schema.versioned_base('api_trakt', 7)
AuthBase = db_schema.versioned_base('trakt_auth', 0)
log = logging.getLogger('api_trakt')
# Production Site
CLIENT_ID = '57e188bcb9750c79ed452e1674925bc6848bd126e02bb15350211be74c6547af'
CLIENT_SECRET = 'db4af7531e8df678b134dbc22445a2c04ebdbdd7213be7f5b6d17dfdfabfcdc2'
API_URL = 'https://api.trakt.tv/'
PIN_URL = 'https://trakt.tv/pin/346'
# Oauth account authentication
class TraktUserAuth(AuthBase):
__tablename__ = 'trakt_user_auth'
account = Column(Unicode, primary_key=True)
access_token = Column(Unicode)
refresh_token = Column(Unicode)
created = Column(DateTime)
expires = Column(DateTime)
def __init__(self, account, access_token, refresh_token, created, expires):
self.account = account
self.access_token = access_token
self.refresh_token = refresh_token
self.expires = token_expire_date(expires)
self.created = token_created_date(created)
def token_expire_date(expires):
return datetime.now() + timedelta(seconds=expires)
def token_created_date(created):
return datetime.fromtimestamp(created)
def device_auth():
data = {'client_id': CLIENT_ID}
try:
r = requests.post(get_api_url('oauth/device/code'), data=data).json()
device_code = r['device_code']
user_code = r['user_code']
expires_in = r['expires_in']
interval = r['interval']
console(
'Please visit {0} and authorize Flexget. Your user code is {1}. Your code expires in '
'{2} minutes.'.format(r['verification_url'], user_code, expires_in / 60.0)
)
log.debug('Polling for user authorization.')
data['code'] = device_code
data['client_secret'] = CLIENT_SECRET
end_time = time.time() + expires_in
console('Waiting...', end='')
# stop polling after expires_in seconds
while time.time() < end_time:
time.sleep(interval)
polling_request = requests.post(
get_api_url('oauth/device/token'), data=data, raise_status=False
)
if polling_request.status_code == 200: # success
return polling_request.json()
elif polling_request.status_code == 400: # pending -- waiting for user
console('...', end='')
elif polling_request.status_code == 404: # not found -- invalid device_code
raise plugin.PluginError('Invalid device code. Open an issue on Github.')
elif polling_request.status_code == 409: # already used -- user already approved
raise plugin.PluginError('User code has already been approved.')
elif polling_request.status_code == 410: # expired -- restart process
break
elif polling_request.status_code == 418: # denied -- user denied code
raise plugin.PluginError('User code has been denied.')
elif polling_request.status_code == 429: # polling too fast
log.warning('Polling too quickly. Upping the interval. No action required.')
interval += 1
raise plugin.PluginError('User code has expired. Please try again.')
except requests.RequestException as e:
raise plugin.PluginError('Device authorization with Trakt.tv failed: {0}'.format(e))
def token_oauth(data):
try:
return requests.post(get_api_url('oauth/token'), data=data).json()
except requests.RequestException as e:
raise plugin.PluginError('Token exchange with trakt failed: {0}'.format(e))
def delete_account(account):
with Session() as session:
acc = session.query(TraktUserAuth).filter(TraktUserAuth.account == account).first()
if not acc:
raise plugin.PluginError('Account %s not found.' % account)
session.delete(acc)
def get_access_token(account, token=None, refresh=False, re_auth=False, called_from_cli=False):
"""
Gets authorization info from a pin or refresh token.
:param account: Arbitrary account name to attach authorization to.
:param unicode token: The pin or refresh token, as supplied by the trakt website.
:param bool refresh: If True, refresh the access token using refresh_token from db.
:param bool re_auth: If True, account is re-authorized even if it already exists in db.
:raises RequestException: If there is a network error while authorizing.
"""
data = {
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET,
'redirect_uri': 'urn:ietf:wg:oauth:2.0:oob',
}
with Session() as session:
acc = session.query(TraktUserAuth).filter(TraktUserAuth.account == account).first()
if acc and datetime.now() < acc.expires and not refresh and not re_auth:
return acc.access_token
else:
if (
acc
and (refresh or datetime.now() >= acc.expires - timedelta(days=5))
and not re_auth
):
log.debug('Using refresh token to re-authorize account %s.', account)
data['refresh_token'] = acc.refresh_token
data['grant_type'] = 'refresh_token'
token_dict = token_oauth(data)
elif token:
# We are only in here if a pin was specified, so it's safe to use console instead of logging
console(
'Warning: PIN authorization has been deprecated. Use Device Authorization instead.'
)
data['code'] = token
data['grant_type'] = 'authorization_code'
token_dict = token_oauth(data)
elif called_from_cli:
log.debug(
'No pin specified for an unknown account %s. Attempting to authorize device.',
account,
)
token_dict = device_auth()
else:
raise plugin.PluginError(
'Account %s has not been authorized. See `flexget trakt auth -h` on how to.'
% account
)
try:
new_acc = TraktUserAuth(
account,
token_dict['access_token'],
token_dict['refresh_token'],
token_dict.get('created_at', time.time()),
token_dict['expires_in'],
)
session.merge(new_acc)
return new_acc.access_token
except requests.RequestException as e:
raise plugin.PluginError('Token exchange with trakt failed: {0}'.format(e))
def make_list_slug(name):
"""Return the slug for use in url for given list name."""
slug = name.lower()
# These characters are just stripped in the url
for char in '!@#$%^*()[]{}/=?+\\|':
slug = slug.replace(char, '')
# These characters get replaced
slug = slug.replace('&', 'and')
slug = slug.replace(' ', '-')
return slug
def get_session(account=None, token=None):
"""
Creates a requests session ready to talk to trakt API with FlexGet's api key.
Can also add user level authentication if `account` parameter is given.
:param account: An account authorized via `flexget trakt auth` CLI command. If given, returned session will be
authenticated for that account.
"""
# default to username if account name is not specified
session = requests.Session()
session.headers = {
'Content-Type': 'application/json',
'trakt-api-version': '2',
'trakt-api-key': CLIENT_ID,
}
if account:
access_token = get_access_token(account, token) if account else None
if access_token:
session.headers.update({'Authorization': 'Bearer %s' % access_token})
return session
def get_api_url(*endpoint):
"""
Get the address of a trakt API endpoint.
:param endpoint: Can by a string endpoint (e.g. 'sync/watchlist') or an iterable (e.g. ('sync', 'watchlist')
Multiple parameters can also be specified instead of a single iterable.
:returns: The absolute url to the specified API endpoint.
"""
if len(endpoint) == 1 and not isinstance(endpoint[0], str):
endpoint = endpoint[0]
# Make sure integer portions are turned into strings first too
url = API_URL + '/'.join(map(str, endpoint))
return url
@db_schema.upgrade('api_trakt')
def upgrade(ver, session):
if ver is None or ver <= 6:
raise db_schema.UpgradeImpossible
return ver
def get_entry_ids(entry):
"""Creates a trakt ids dict from id fields on an entry. Prefers already populated info over lazy lookups."""
ids = {}
for lazy in [False, True]:
if entry.get('trakt_movie_id', eval_lazy=lazy):
ids['trakt'] = entry['trakt_movie_id']
elif entry.get('trakt_show_id', eval_lazy=lazy):
ids['trakt'] = entry['trakt_show_id']
elif entry.get('trakt_episode_id', eval_lazy=lazy):
ids['trakt'] = entry['trakt_episode_id']
if entry.get('tmdb_id', eval_lazy=lazy):
ids['tmdb'] = entry['tmdb_id']
if entry.get('tvdb_id', eval_lazy=lazy):
ids['tvdb'] = entry['tvdb_id']
if entry.get('imdb_id', eval_lazy=lazy):
ids['imdb'] = entry['imdb_id']
if entry.get('tvrage_id', eval_lazy=lazy):
ids['tvrage'] = entry['tvrage_id']
if ids:
break
return ids
class TraktMovieTranslation(Base):
__tablename__ = 'trakt_movie_translations'
id = Column(Integer, primary_key=True, autoincrement=True)
language = Column(Unicode)
overview = Column(Unicode)
tagline = Column(Unicode)
title = Column(Unicode)
movie_id = Column(Integer, ForeignKey('trakt_movies.id'))
def __init__(self, translation, session):
super(TraktMovieTranslation, self).__init__()
self.update(translation, session)
def update(self, translation, session):
for col in translation.keys():
setattr(self, col, translation.get(col))
class TraktShowTranslation(Base):
__tablename__ = 'trakt_show_translations'
id = Column(Integer, primary_key=True, autoincrement=True)
language = Column(Unicode)
overview = Column(Unicode)
title = Column(Unicode)
show_id = Column(Integer, ForeignKey('trakt_shows.id'))
def __init__(self, translation, session):
super(TraktShowTranslation, self).__init__()
self.update(translation, session)
def update(self, translation, session):
for col in translation.keys():
setattr(self, col, translation.get(col))
def get_translations(ident, style):
url = get_api_url(style + 's', ident, 'translations')
trakt_translation = TraktShowTranslation if style == 'show' else TraktMovieTranslation
trakt_translation_id = getattr(trakt_translation, style + '_id')
translations = []
req_session = get_session()
try:
results = req_session.get(url, params={'extended': 'full'}).json()
with Session() as session:
for result in results:
translation = (
session.query(trakt_translation)
.filter(
and_(
trakt_translation.language == result.get('language'),
trakt_translation_id == ident,
)
)
.first()
)
if not translation:
translation = trakt_translation(result, session)
translations.append(translation)
return translations
except requests.RequestException as e:
log.debug('Error adding translations to trakt id %s: %s', ident, e)
class TraktGenre(Base):
__tablename__ = 'trakt_genres'
name = Column(Unicode, primary_key=True)
show_genres_table = Table(
'trakt_show_genres',
Base.metadata,
Column('show_id', Integer, ForeignKey('trakt_shows.id')),
Column('genre_id', Unicode, ForeignKey('trakt_genres.name')),
)
Base.register_table(show_genres_table)
movie_genres_table = Table(
'trakt_movie_genres',
Base.metadata,
Column('movie_id', Integer, ForeignKey('trakt_movies.id')),
Column('genre_id', Unicode, ForeignKey('trakt_genres.name')),
)
Base.register_table(movie_genres_table)
class TraktActor(Base):
__tablename__ = 'trakt_actors'
id = Column(Integer, primary_key=True, nullable=False)
name = Column(Unicode)
slug = Column(Unicode)
tmdb = Column(Integer)
imdb = Column(Unicode)
biography = Column(Unicode)
birthday = Column(Date)
death = Column(Date)
homepage = Column(Unicode)
def __init__(self, actor, session):
super(TraktActor, self).__init__()
self.update(actor, session)
def update(self, actor, session):
if self.id and self.id != actor.get('ids').get('trakt'):
raise Exception('Tried to update db actors with different actor data')
elif not self.id:
self.id = actor.get('ids').get('trakt')
self.name = actor.get('name')
ids = actor.get('ids')
self.imdb = ids.get('imdb')
self.slug = ids.get('slug')
self.tmdb = ids.get('tmdb')
self.biography = actor.get('biography')
if actor.get('birthday'):
self.birthday = dateutil_parse(actor.get('birthday'))
if actor.get('death'):
self.death = dateutil_parse(actor.get('death'))
self.homepage = actor.get('homepage')
def to_dict(self):
return {'name': self.name, 'trakt_id': self.id, 'imdb_id': self.imdb, 'tmdb_id': self.tmdb}
show_actors_table = Table(
'trakt_show_actors',
Base.metadata,
Column('show_id', Integer, ForeignKey('trakt_shows.id')),
Column('actors_id', Integer, ForeignKey('trakt_actors.id')),
)
Base.register_table(show_actors_table)
movie_actors_table = Table(
'trakt_movie_actors',
Base.metadata,
Column('movie_id', Integer, ForeignKey('trakt_movies.id')),
Column('actors_id', Integer, ForeignKey('trakt_actors.id')),
)
Base.register_table(movie_actors_table)
def get_db_actors(ident, style):
actors = {}
url = get_api_url(style + 's', ident, 'people')
req_session = get_session()
try:
results = req_session.get(url, params={'extended': 'full'}).json()
with Session() as session:
for result in results.get('cast'):
trakt_id = result.get('person').get('ids').get('trakt')
# sometimes an actor can occur twice in the list by mistake. This check is to avoid this unlikely event
if trakt_id in actors:
continue
actor = session.query(TraktActor).filter(TraktActor.id == trakt_id).first()
if not actor:
actor = TraktActor(result.get('person'), session)
actors[trakt_id] = actor
return list(actors.values())
except requests.RequestException as e:
log.debug('Error searching for actors for trakt id %s', e)
return
def get_translations_dict(translate, style):
res = {}
for lang in translate:
info = {'overview': lang.overview, 'title': lang.title}
if style == 'movie':
info['tagline'] = lang.tagline
res[lang.language] = info
return res
def list_actors(actors):
res = {}
for actor in actors:
info = {
'trakt_id': actor.id,
'name': actor.name,
'imdb_id': str(actor.imdb),
'trakt_slug': actor.slug,
'tmdb_id': str(actor.tmdb),
'birthday': actor.birthday.strftime("%Y/%m/%d") if actor.birthday else None,
'biography': actor.biography,
'homepage': actor.homepage,
'death': actor.death.strftime("%Y/%m/%d") if actor.death else None,
}
res[str(actor.id)] = info
return res
class TraktEpisode(Base):
__tablename__ = 'trakt_episodes'
id = Column(Integer, primary_key=True, autoincrement=False)
tvdb_id = Column(Integer)
imdb_id = Column(Unicode)
tmdb_id = Column(Integer)
tvrage_id = Column(Unicode)
title = Column(Unicode)
season = Column(Integer)
number = Column(Integer)
number_abs = Column(Integer)
overview = Column(Unicode)
first_aired = Column(DateTime)
updated_at = Column(DateTime)
cached_at = Column(DateTime)
series_id = Column(Integer, ForeignKey('trakt_shows.id'), nullable=False)
def __init__(self, trakt_episode, session):
super(TraktEpisode, self).__init__()
self.update(trakt_episode, session)
def update(self, trakt_episode, session):
"""Updates this record from the trakt media object `trakt_episode` returned by the trakt api."""
if self.id and self.id != trakt_episode['ids']['trakt']:
raise Exception('Tried to update db ep with different ep data')
elif not self.id:
self.id = trakt_episode['ids']['trakt']
self.imdb_id = trakt_episode['ids']['imdb']
self.tmdb_id = trakt_episode['ids']['tmdb']
self.tvrage_id = trakt_episode['ids']['tvrage']
self.tvdb_id = trakt_episode['ids']['tvdb']
self.first_aired = None
if trakt_episode.get('first_aired'):
self.first_aired = dateutil_parse(trakt_episode['first_aired'], ignoretz=True)
self.updated_at = dateutil_parse(trakt_episode.get('updated_at'), ignoretz=True)
self.cached_at = datetime.now()
for col in ['title', 'season', 'number', 'number_abs', 'overview']:
setattr(self, col, trakt_episode.get(col))
@property
def expired(self):
# TODO should episode have its own expiration function?
return False
class TraktSeason(Base):
__tablename__ = 'trakt_seasons'
id = Column(Integer, primary_key=True, autoincrement=False)
tvdb_id = Column(Integer)
tmdb_id = Column(Integer)
tvrage_id = Column(Unicode)
title = Column(Unicode)
number = Column(Integer)
episode_count = Column(Integer)
aired_episodes = Column(Integer)
overview = Column(Unicode)
first_aired = Column(DateTime)
ratings = Column(Integer)
votes = Column(Integer)
cached_at = Column(DateTime)
series_id = Column(Integer, ForeignKey('trakt_shows.id'), nullable=False)
def __init__(self, trakt_season, session):
super(TraktSeason, self).__init__()
self.update(trakt_season, session)
def update(self, trakt_season, session):
"""Updates this record from the trakt media object `trakt_episode` returned by the trakt api."""
if self.id and self.id != trakt_season['ids']['trakt']:
raise Exception('Tried to update db season with different season data')
elif not self.id:
self.id = trakt_season['ids']['trakt']
self.tmdb_id = trakt_season['ids']['tmdb']
self.tvrage_id = trakt_season['ids']['tvrage']
self.tvdb_id = trakt_season['ids']['tvdb']
self.first_aired = None
if trakt_season.get('first_aired'):
self.first_aired = dateutil_parse(trakt_season['first_aired'], ignoretz=True)
self.cached_at = datetime.now()
for col in [
'title',
'number',
'episode_count',
'aired_episodes',
'ratings',
'votes',
'overview',
]:
setattr(self, col, trakt_season.get(col))
@property
def expired(self):
# TODO should season have its own expiration function?
return False
class TraktShow(Base):
__tablename__ = 'trakt_shows'
id = Column(Integer, primary_key=True, autoincrement=False)
title = Column(Unicode)
year = Column(Integer)
slug = Column(Unicode)
tvdb_id = Column(Integer)
imdb_id = Column(Unicode)
tmdb_id = Column(Integer)
tvrage_id = Column(Unicode)
overview = Column(Unicode)
first_aired = Column(DateTime)
air_day = Column(Unicode)
air_time = Column(Time)
timezone = Column(Unicode)
runtime = Column(Integer)
certification = Column(Unicode)
network = Column(Unicode)
country = Column(Unicode)
status = Column(String)
rating = Column(Integer)
votes = Column(Integer)
language = Column(Unicode)
homepage = Column(Unicode)
trailer = Column(Unicode)
aired_episodes = Column(Integer)
_translations = relation(TraktShowTranslation)
_translation_languages = Column('translation_languages', Unicode)
translation_languages = json_synonym('_translation_languages')
episodes = relation(
TraktEpisode, backref='show', cascade='all, delete, delete-orphan', lazy='dynamic'
)
seasons = relation(
TraktSeason, backref='show', cascade='all, delete, delete-orphan', lazy='dynamic'
)
genres = relation(TraktGenre, secondary=show_genres_table)
_actors = relation(TraktActor, secondary=show_actors_table)
updated_at = Column(DateTime)
cached_at = Column(DateTime)
def to_dict(self):
return {
"id": self.id,
"title": self.title,
"year": self.year,
"slug": self.slug,
"tvdb_id": self.tvdb_id,
"imdb_id": self.imdb_id,
"tmdb_id": self.tmdb_id,
"tvrage_id": self.tvrage_id,
"overview": self.overview,
"first_aired": self.first_aired,
"air_day": self.air_day,
"air_time": self.air_time.strftime("%H:%M") if self.air_time else None,
"timezone": self.timezone,
"runtime": self.runtime,
"certification": self.certification,
"network": self.network,
"country": self.country,
"status": self.status,
"rating": self.rating,
"votes": self.votes,
"language": self.language,
"homepage": self.homepage,
"number_of_aired_episodes": self.aired_episodes,
"genres": [g.name for g in self.genres],
"updated_at": self.updated_at,
"cached_at": self.cached_at,
}
def __init__(self, trakt_show, session):
super(TraktShow, self).__init__()
self.update(trakt_show, session)
def update(self, trakt_show, session):
"""Updates this record from the trakt media object `trakt_show` returned by the trakt api."""
if self.id and self.id != trakt_show['ids']['trakt']:
raise Exception('Tried to update db show with different show data')
elif not self.id:
self.id = trakt_show['ids']['trakt']
self.slug = trakt_show['ids']['slug']
self.imdb_id = trakt_show['ids']['imdb']
self.tmdb_id = trakt_show['ids']['tmdb']
self.tvrage_id = trakt_show['ids']['tvrage']
self.tvdb_id = trakt_show['ids']['tvdb']
if trakt_show.get('airs'):
airs = trakt_show.get('airs')
self.air_day = airs.get('day')
self.timezone = airs.get('timezone')
if airs.get('time'):
self.air_time = datetime.strptime(airs.get('time'), '%H:%M').time()
else:
self.air_time = None
if trakt_show.get('first_aired'):
self.first_aired = dateutil_parse(trakt_show.get('first_aired'), ignoretz=True)
else:
self.first_aired = None
self.updated_at = dateutil_parse(trakt_show.get('updated_at'), ignoretz=True)
for col in [
'overview',
'runtime',
'rating',
'votes',
'language',
'title',
'year',
'runtime',
'certification',
'network',
'country',
'status',
'aired_episodes',
'trailer',
'homepage',
]:
setattr(self, col, trakt_show.get(col))
# Sometimes genres and translations are None but we really do want a list, hence the "or []"
self.genres = [
TraktGenre(name=g.replace(' ', '-')) for g in trakt_show.get('genres') or []
]
self.cached_at = datetime.now()
self.translation_languages = trakt_show.get('available_translations') or []
def get_episode(self, season, number, session, only_cached=False):
# TODO: Does series data being expired mean all episode data should be refreshed?
episode = (
self.episodes.filter(TraktEpisode.season == season)
.filter(TraktEpisode.number == number)
.first()
)
if not episode or self.expired:
url = get_api_url(
'shows', self.id, 'seasons', season, 'episodes', number, '?extended=full'
)
if only_cached:
raise LookupError('Episode %s %s not found in cache' % (season, number))
log.debug('Episode %s %s not found in cache, looking up from trakt.', season, number)
try:
data = get_session().get(url).json()
except requests.RequestException:
raise LookupError('Error Retrieving Trakt url: %s' % url)
if not data:
raise LookupError('No data in response from trakt %s' % url)
episode = self.episodes.filter(TraktEpisode.id == data['ids']['trakt']).first()
if episode:
episode.update(data, session)
else:
episode = TraktEpisode(data, session)
self.episodes.append(episode)
session.commit()
return episode
def get_season(self, number, session, only_cached=False):
# TODO: Does series data being expired mean all season data should be refreshed?
season = self.seasons.filter(TraktSeason.number == number).first()
if not season or self.expired:
url = get_api_url('shows', self.id, 'seasons', '?extended=full')
if only_cached:
raise LookupError('Season %s not found in cache' % number)
log.debug('Season %s not found in cache, looking up from trakt.', number)
try:
ses = get_session()
data = ses.get(url).json()
except requests.RequestException:
raise LookupError('Error Retrieving Trakt url: %s' % url)
if not data:
raise LookupError('No data in response from trakt %s' % url)
# We fetch all seasons for the given show because we barely get any data otherwise
for season_result in data:
db_season = self.seasons.filter(
TraktSeason.id == season_result['ids']['trakt']
).first()
if db_season:
db_season.update(season_result, session)
else:
db_season = TraktSeason(season_result, session)
self.seasons.append(db_season)
if number == season_result['number']:
season = db_season
if not season:
raise LookupError('Season %s not found for show %s' % (number, self.title))
session.commit()
return season
@property
def expired(self):
"""
:return: True if show details are considered to be expired, ie. need of update
"""
# TODO stolen from imdb plugin, maybe there's a better way?
if self.cached_at is None:
log.debug('cached_at is None: %s', self)
return True
refresh_interval = 2
# if show has been cancelled or ended, then it is unlikely to be updated often
if self.year and (self.status == 'ended' or self.status == 'canceled'):
# Make sure age is not negative
age = max((datetime.now().year - self.year), 0)
refresh_interval += age * 5
log.debug('show `%s` age %i expires in %i days', self.title, age, refresh_interval)
return self.cached_at < datetime.now() - timedelta(days=refresh_interval)
@property
def translations(self):
if not self._translations:
self._translations = get_translations(self.id, 'show')
return self._translations
@property
def actors(self):
if not self._actors:
self._actors[:] = get_db_actors(self.id, 'show')
return self._actors
def __repr__(self):
return '<name=%s, id=%s>' % (self.title, self.id)
class TraktMovie(Base):
__tablename__ = 'trakt_movies'
id = Column(Integer, primary_key=True, autoincrement=False)
title = Column(Unicode)
year = Column(Integer)
slug = Column(Unicode)
imdb_id = Column(Unicode)
tmdb_id = Column(Integer)
tagline = Column(Unicode)
overview = Column(Unicode)
released = Column(Date)
runtime = Column(Integer)
rating = Column(Integer)
votes = Column(Integer)
trailer = Column(Unicode)
homepage = Column(Unicode)
language = Column(Unicode)
updated_at = Column(DateTime)
cached_at = Column(DateTime)
_translations = relation(TraktMovieTranslation, backref='movie')
_translation_languages = Column('translation_languages', Unicode)
translation_languages = json_synonym('_translation_languages')
genres = relation(TraktGenre, secondary=movie_genres_table)
_actors = relation(TraktActor, secondary=movie_actors_table)
def __init__(self, trakt_movie, session):
super(TraktMovie, self).__init__()
self.update(trakt_movie, session)
def to_dict(self):
return {
"id": self.id,
"title": self.title,
"year": self.year,
"slug": self.slug,
"imdb_id": self.imdb_id,
"tmdb_id": self.tmdb_id,
"tagline": self.tagline,
"overview": self.overview,
"released": self.released,
"runtime": self.runtime,
"rating": self.rating,
"votes": self.votes,
"language": self.language,
"homepage": self.homepage,
"trailer": self.trailer,
"genres": [g.name for g in self.genres],
"updated_at": self.updated_at,
"cached_at": self.cached_at,
}
def update(self, trakt_movie, session):
"""Updates this record from the trakt media object `trakt_movie` returned by the trakt api."""
if self.id and self.id != trakt_movie['ids']['trakt']:
raise Exception('Tried to update db movie with different movie data')
elif not self.id:
self.id = trakt_movie['ids']['trakt']
self.slug = trakt_movie['ids']['slug']
self.imdb_id = trakt_movie['ids']['imdb']
self.tmdb_id = trakt_movie['ids']['tmdb']
for col in [
'title',
'overview',
'runtime',
'rating',
'votes',
'language',
'tagline',
'year',
'trailer',
'homepage',
]:
setattr(self, col, trakt_movie.get(col))
if trakt_movie.get('released'):
self.released = dateutil_parse(trakt_movie.get('released'), ignoretz=True).date()
self.updated_at = dateutil_parse(trakt_movie.get('updated_at'), ignoretz=True)
self.genres = [TraktGenre(name=g.replace(' ', '-')) for g in trakt_movie.get('genres', [])]
self.cached_at = datetime.now()
self.translation_languages = trakt_movie.get('available_translations', [])
@property
def expired(self):
"""
:return: True if movie details are considered to be expired, ie. need of update
"""
# TODO stolen from imdb plugin, maybe there's a better way?
if self.updated_at is None:
log.debug('updated_at is None: %s', self)
return True
refresh_interval = 2
if self.year:
# Make sure age is not negative
age = max((datetime.now().year - self.year), 0)
refresh_interval += age * 5
log.debug('movie `%s` age %i expires in %i days', self.title, age, refresh_interval)
return self.cached_at < datetime.now() - timedelta(days=refresh_interval)
@property
def translations(self):
if not self._translations:
self._translations = get_translations(self.id, 'movie')
return self._translations
@property
def actors(self):
if not self._actors:
self._actors[:] = get_db_actors(self.id, 'movie')
return self._actors
class TraktShowSearchResult(Base):
__tablename__ = 'trakt_show_search_results'
id = Column(Integer, primary_key=True)
search = Column(Unicode, unique=True, nullable=False)
series_id = Column(Integer, ForeignKey('trakt_shows.id'), nullable=True)
series = relation(TraktShow, backref='search_strings')
def __init__(self, search, series_id=None, series=None):
self.search = search.lower()
if series_id:
self.series_id = series_id
if series:
self.series = series
class TraktMovieSearchResult(Base):
__tablename__ = 'trakt_movie_search_results'
id = Column(Integer, primary_key=True)
search = Column(Unicode, unique=True, nullable=False)
movie_id = Column(Integer, ForeignKey('trakt_movies.id'), nullable=True)
movie = relation(TraktMovie, backref='search_strings')
def __init__(self, search, movie_id=None, movie=None):
self.search = search.lower()
if movie_id:
self.movie_id = movie_id
if movie:
self.movie = movie
class TraktMovieIds(object):
"""Simple class that holds a variety of possible IDs that Trakt utilize in their API, eg. imdb id, trakt id"""
def __init__(self, trakt_id=None, trakt_slug=None, tmdb_id=None, imdb_id=None, **kwargs):
self.trakt_id = trakt_id
self.trakt_slug = trakt_slug
self.tmdb_id = tmdb_id
self.imdb_id = imdb_id
def get_trakt_id(self):
return self.trakt_id or self.trakt_slug
def to_dict(self):
"""Returns a dict containing id fields that are relevant for a movie"""
return {
'id': self.trakt_id,
'slug': self.trakt_slug,
'tmdb_id': self.tmdb_id,
'imdb_id': self.imdb_id,
}
def __bool__(self):
return any([self.trakt_id, self.trakt_slug, self.tmdb_id, self.imdb_id])
class TraktShowIds(object):
"""Simple class that holds a variety of possible IDs that Trakt utilize in their API, eg. imdb id, trakt id"""
def __init__(
self,
trakt_id=None,
trakt_slug=None,
tmdb_id=None,
imdb_id=None,
tvdb_id=None,
tvrage_id=None,
**kwargs
):
self.trakt_id = trakt_id
self.trakt_slug = trakt_slug
self.tmdb_id = tmdb_id
self.imdb_id = imdb_id
self.tvdb_id = tvdb_id
self.tvrage_id = tvrage_id
def get_trakt_id(self):
return self.trakt_id or self.trakt_slug
def to_dict(self):
"""Returns a dict containing id fields that are relevant for a show/season/episode"""
return {
'id': self.trakt_id,
'slug': self.trakt_slug,
'tmdb_id': self.tmdb_id,
'imdb_id': self.imdb_id,
'tvdb_id': self.tvdb_id,
'tvrage_id': self.tvrage_id,
}
def __bool__(self):
return any(
[
self.trakt_id,
self.trakt_slug,
self.tmdb_id,
self.imdb_id,
self.tvdb_id,
self.tvrage_id,
]
)
def get_item_from_cache(table, session, title=None, year=None, trakt_ids=None):
"""
Get the cached info for a given show/movie from the database.
:param table: Either TraktMovie or TraktShow
:param title: Title of the show/movie
:param year: First release year
:param trakt_ids: instance of TraktShowIds or TraktMovieIds
:param session: database session object
:return: query result
"""
result = None
if trakt_ids:
result = (
session.query(table)
.filter(
or_(getattr(table, col) == val for col, val in trakt_ids.to_dict().items() if val)
)
.first()
)
elif title:
title, y = split_title_year(title)
year = year or y
query = session.query(table).filter(table.title == title)
if year:
query = query.filter(table.year == year)
result = query.first()
return result
def get_trakt_id_from_id(trakt_ids, media_type):
if not trakt_ids:
raise LookupError('No lookup arguments provided.')
requests_session = get_session()
for id_type, identifier in trakt_ids.to_dict().items():
if not identifier:
continue
stripped_id_type = id_type.rstrip('_id') # need to remove _id for the api call
try:
log.debug('Searching with params: %s=%s', stripped_id_type, identifier)
results = requests_session.get(
get_api_url('search'), params={'id_type': stripped_id_type, 'id': identifier}
).json()
except requests.RequestException as e:
raise LookupError(
'Searching trakt for %s=%s failed with error: %s'
% (stripped_id_type, identifier, e)
)
for result in results:
if result['type'] != media_type:
continue
return result[media_type]['ids']['trakt']
def get_trakt_id_from_title(title, media_type, year=None):
if not title:
raise LookupError('No lookup arguments provided.')
requests_session = get_session()
# Try finding trakt id based on title and year
parsed_title, y = split_title_year(title)
y = year or y
try:
params = {'query': parsed_title, 'type': media_type, 'year': y}
log.debug('Type of title: %s', type(parsed_title))
log.debug(
'Searching with params: %s',
', '.join('{}={}'.format(k, v) for (k, v) in params.items()),
)
results = requests_session.get(get_api_url('search'), params=params).json()
except requests.RequestException as e:
raise LookupError('Searching trakt for %s failed with error: %s' % (title, e))
for result in results:
if year and result[media_type]['year'] != year:
continue
if parsed_title.lower() == result[media_type]['title'].lower():
return result[media_type]['ids']['trakt']
# grab the first result if there is no exact match
if results:
return results[0][media_type]['ids']['trakt']
def get_trakt_data(media_type, title=None, year=None, trakt_ids=None):
trakt_id = None
if trakt_ids:
trakt_id = trakt_ids.get_trakt_id()
if not trakt_id and trakt_ids:
trakt_id = get_trakt_id_from_id(trakt_ids, media_type)
if not trakt_id and title:
trakt_id = get_trakt_id_from_title(title, media_type, year=year)
if not trakt_id:
raise LookupError(
'No results on Trakt.tv, title=%s, ids=%s.'
% (title, trakt_ids.to_dict if trakt_ids else None)
)
# Get actual data from trakt
try:
return (
get_session()
.get(get_api_url(media_type + 's', trakt_id), params={'extended': 'full'})
.json()
)
except requests.RequestException as e:
raise LookupError('Error getting trakt data for id %s: %s' % (trakt_id, e))
def get_user_data(data_type, media_type, session, username):
"""
Fetches user data from Trakt.tv on the /users/<username>/<data_type>/<media_type> end point. Eg. a user's
movie collection is fetched from /users/<username>/collection/movies.
:param data_type: Name of the data type eg. collection, watched etc.
:param media_type: Type of media we want <data_type> for eg. shows, episodes, movies.
:param session: A trakt requests session with a valid token
:param username: Username of the user to fetch data
:return:
"""
endpoint = '{}/{}'.format(data_type, media_type)
try:
data = session.get(get_api_url('users', username, data_type, media_type)).json()
if not data:
log.warning('No %s data returned from trakt endpoint %s.', data_type, endpoint)
return []
log.verbose(
'Received %d records from trakt.tv for user %s from endpoint %s',
len(data),
username,
endpoint,
)
# extract show, episode and movie information
for item in data:
episode = item.pop('episode', {})
season = item.pop('season', {})
show = item.pop('show', {})
movie = item.pop('movie', {})
item.update(episode)
item.update(season)
item.update(movie)
# show is irrelevant if either episode or season is present
if not episode and not season:
item.update(show)
return data
except requests.RequestException as e:
raise plugin.PluginError(
'Error fetching data from trakt.tv endpoint %s for user %s: %s'
% (endpoint, username, e)
)
def get_username(username=None, account=None):
"""Returns 'me' if account is provided and username is not"""
if not username and account:
return 'me'
return username
| mit |
proxysh/Safejumper-for-Mac | buildlinux/env32/lib/python2.7/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py | 120 | 3024 | #
# Test script for Crypto.Protocol.AllOrNothing
#
# Part of the Python Cryptography Toolkit
#
# Written by Andrew Kuchling and others
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
__revision__ = "$Id$"
import unittest
from Crypto.Protocol import AllOrNothing
from Crypto.Util.py3compat import *
text = b("""\
When in the Course of human events, it becomes necessary for one people to
dissolve the political bands which have connected them with another, and to
assume among the powers of the earth, the separate and equal station to which
the Laws of Nature and of Nature's God entitle them, a decent respect to the
opinions of mankind requires that they should declare the causes which impel
them to the separation.
We hold these truths to be self-evident, that all men are created equal, that
they are endowed by their Creator with certain unalienable Rights, that among
these are Life, Liberty, and the pursuit of Happiness. That to secure these
rights, Governments are instituted among Men, deriving their just powers from
the consent of the governed. That whenever any Form of Government becomes
destructive of these ends, it is the Right of the People to alter or to
abolish it, and to institute new Government, laying its foundation on such
principles and organizing its powers in such form, as to them shall seem most
likely to effect their Safety and Happiness.
""")
class AllOrNothingTest (unittest.TestCase):
def runTest(self):
"Simple test of AllOrNothing"
from Crypto.Cipher import AES
import base64
# The current AllOrNothing will fail
# every so often. Repeat the test
# several times to force this.
for i in range(50):
x = AllOrNothing.AllOrNothing(AES)
msgblocks = x.digest(text)
# get a new undigest-only object so there's no leakage
y = AllOrNothing.AllOrNothing(AES)
text2 = y.undigest(msgblocks)
self.assertEqual(text, text2)
def get_tests(config={}):
return [AllOrNothingTest()]
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
havatv/QGIS | python/core/additions/projectdirtyblocker.py | 43 | 1534 | # -*- coding: utf-8 -*-
"""
***************************************************************************
projectdirtyblocker.py
---------------------
Date : May 2018
Copyright : (C) 2018 by Denis Rouzaud
Email : denis@opengis.ch
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from qgis._core import QgsProjectDirtyBlocker
class ProjectDirtyBlocker():
"""
Context manager used to block project setDirty calls.
.. code-block:: python
project = QgsProject.instance()
with QgsProject.blockDirtying(project):
# do something
.. versionadded:: 3.2
"""
def __init__(self, project):
self.project = project
self.blocker = None
def __enter__(self):
self.blocker = QgsProjectDirtyBlocker(self.project)
return self.project
def __exit__(self, ex_type, ex_value, traceback):
del self.blocker
return True
| gpl-2.0 |
kkdd/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/lib2to3/fixes/fix_intern.py | 49 | 1368 | # Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from .. import pytree
from .. import fixer_base
from ..fixer_util import Name, Attr, touch_import
class FixIntern(fixer_base.BaseFix):
PATTERN = """
power< 'intern'
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any*
>
"""
def transform(self, node, results):
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
Attr(Name("sys"), Name("intern")) +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()])] + after)
new.set_prefix(node.get_prefix())
touch_import(None, 'sys', node)
return new
| apache-2.0 |
rajsadho/django | tests/prefetch_related/models.py | 255 | 7972 | import uuid
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Basic tests
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=50, unique=True)
first_book = models.ForeignKey('Book', models.CASCADE, related_name='first_time_authors')
favorite_authors = models.ManyToManyField(
'self', through='FavoriteAuthors', symmetrical=False, related_name='favors_me')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
class AuthorWithAge(Author):
author = models.OneToOneField(Author, models.CASCADE, parent_link=True)
age = models.IntegerField()
class FavoriteAuthors(models.Model):
author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='i_like')
likes_author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='likes_me')
class Meta:
ordering = ['id']
@python_2_unicode_compatible
class AuthorAddress(models.Model):
author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='addresses')
address = models.TextField()
class Meta:
ordering = ['id']
def __str__(self):
return self.address
@python_2_unicode_compatible
class Book(models.Model):
title = models.CharField(max_length=255)
authors = models.ManyToManyField(Author, related_name='books')
def __str__(self):
return self.title
class Meta:
ordering = ['id']
class BookWithYear(Book):
book = models.OneToOneField(Book, models.CASCADE, parent_link=True)
published_year = models.IntegerField()
aged_authors = models.ManyToManyField(
AuthorWithAge, related_name='books_with_year')
class Bio(models.Model):
author = models.OneToOneField(Author, models.CASCADE)
books = models.ManyToManyField(Book, blank=True)
@python_2_unicode_compatible
class Reader(models.Model):
name = models.CharField(max_length=50)
books_read = models.ManyToManyField(Book, related_name='read_by')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
class BookReview(models.Model):
book = models.ForeignKey(BookWithYear, models.CASCADE)
notes = models.TextField(null=True, blank=True)
# Models for default manager tests
class Qualification(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ['id']
class TeacherManager(models.Manager):
def get_queryset(self):
return super(TeacherManager, self).get_queryset().prefetch_related('qualifications')
@python_2_unicode_compatible
class Teacher(models.Model):
name = models.CharField(max_length=50)
qualifications = models.ManyToManyField(Qualification)
objects = TeacherManager()
def __str__(self):
return "%s (%s)" % (self.name, ", ".join(q.name for q in self.qualifications.all()))
class Meta:
ordering = ['id']
class Department(models.Model):
name = models.CharField(max_length=50)
teachers = models.ManyToManyField(Teacher)
class Meta:
ordering = ['id']
# GenericRelation/GenericForeignKey tests
@python_2_unicode_compatible
class TaggedItem(models.Model):
tag = models.SlugField()
content_type = models.ForeignKey(
ContentType,
models.CASCADE,
related_name="taggeditem_set2",
)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
created_by_ct = models.ForeignKey(
ContentType,
models.SET_NULL,
null=True,
related_name='taggeditem_set3',
)
created_by_fkey = models.PositiveIntegerField(null=True)
created_by = GenericForeignKey('created_by_ct', 'created_by_fkey',)
favorite_ct = models.ForeignKey(
ContentType,
models.SET_NULL,
null=True,
related_name='taggeditem_set4',
)
favorite_fkey = models.CharField(max_length=64, null=True)
favorite = GenericForeignKey('favorite_ct', 'favorite_fkey')
def __str__(self):
return self.tag
class Meta:
ordering = ['id']
class Bookmark(models.Model):
url = models.URLField()
tags = GenericRelation(TaggedItem, related_query_name='bookmarks')
favorite_tags = GenericRelation(TaggedItem,
content_type_field='favorite_ct',
object_id_field='favorite_fkey',
related_query_name='favorite_bookmarks')
class Meta:
ordering = ['id']
class Comment(models.Model):
comment = models.TextField()
# Content-object field
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_pk = models.TextField()
content_object = GenericForeignKey(ct_field="content_type", fk_field="object_pk")
class Meta:
ordering = ['id']
# Models for lookup ordering tests
class House(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=255)
owner = models.ForeignKey('Person', models.SET_NULL, null=True)
main_room = models.OneToOneField('Room', models.SET_NULL, related_name='main_room_of', null=True)
class Meta:
ordering = ['id']
class Room(models.Model):
name = models.CharField(max_length=50)
house = models.ForeignKey(House, models.CASCADE, related_name='rooms')
class Meta:
ordering = ['id']
class Person(models.Model):
name = models.CharField(max_length=50)
houses = models.ManyToManyField(House, related_name='occupants')
@property
def primary_house(self):
# Assume business logic forces every person to have at least one house.
return sorted(self.houses.all(), key=lambda house: -house.rooms.count())[0]
@property
def all_houses(self):
return list(self.houses.all())
class Meta:
ordering = ['id']
# Models for nullable FK tests
@python_2_unicode_compatible
class Employee(models.Model):
name = models.CharField(max_length=50)
boss = models.ForeignKey('self', models.SET_NULL, null=True, related_name='serfs')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
# Ticket #19607
@python_2_unicode_compatible
class LessonEntry(models.Model):
name1 = models.CharField(max_length=200)
name2 = models.CharField(max_length=200)
def __str__(self):
return "%s %s" % (self.name1, self.name2)
@python_2_unicode_compatible
class WordEntry(models.Model):
lesson_entry = models.ForeignKey(LessonEntry, models.CASCADE)
name = models.CharField(max_length=200)
def __str__(self):
return "%s (%s)" % (self.name, self.id)
# Ticket #21410: Regression when related_name="+"
@python_2_unicode_compatible
class Author2(models.Model):
name = models.CharField(max_length=50, unique=True)
first_book = models.ForeignKey('Book', models.CASCADE, related_name='first_time_authors+')
favorite_books = models.ManyToManyField('Book', related_name='+')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
# Models for many-to-many with UUID pk test:
class Pet(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=20)
people = models.ManyToManyField(Person, related_name='pets')
class Flea(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
current_room = models.ForeignKey(Room, models.SET_NULL, related_name='fleas', null=True)
pets_visited = models.ManyToManyField(Pet, related_name='fleas_hosted')
people_visited = models.ManyToManyField(Person, related_name='fleas_hosted')
| bsd-3-clause |
EvanK/ansible | lib/ansible/modules/network/f5/bigip_data_group.py | 14 | 45084 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_data_group
short_description: Manage data groups on a BIG-IP
description:
- Allows for managing data groups on a BIG-IP. Data groups provide a way to store collections
of values on a BIG-IP for later use in things such as LTM rules, iRules, and ASM policies.
version_added: 2.6
options:
name:
description:
- Specifies the name of the data group.
required: True
description:
description:
- The description of the monitor.
version_added: 2.8
type:
description:
- The type of records in this data group.
- This parameter is especially important because it causes BIG-IP to store your data
in different ways so-as to optimize access to it. For example, it would be wrong
to specify a list of records containing IP addresses, but label them as a C(string)
type.
- This value cannot be changed once the data group is created.
choices:
- address
- addr
- ip
- string
- str
- integer
- int
default: string
internal:
description:
- The type of this data group.
- You should only consider setting this value in cases where you know exactly what
you're doing, B(or), you are working with a pre-existing internal data group.
- Be aware that if you deliberately force this parameter to C(yes), and you have a
either a large number of records or a large total records size, this large amount
of data will be reflected in your BIG-IP configuration. This can lead to B(long)
system configuration load times due to needing to parse and verify the large
configuration.
- There is a limit of either 4 megabytes or 65,000 records (whichever is more restrictive)
for uploads when this parameter is C(yes).
- This value cannot be changed once the data group is created.
type: bool
default: no
external_file_name:
description:
- When creating a new data group, this specifies the file name that you want to give an
external data group file on the BIG-IP.
- This parameter is ignored when C(internal) is C(yes).
- This parameter can be used to select an existing data group file to use with an
existing external data group.
- If this value is not provided, it will be given the value specified in C(name) and,
therefore, match the name of the data group.
- This value may only contain letters, numbers, underscores, dashes, or a period.
records:
description:
- Specifies the records that you want to add to a data group.
- If you have a large number of records, it is recommended that you use C(records_content)
instead of typing all those records here.
- The technical limit of either 1. the number of records, or 2. the total size of all
records, varies with the size of the total resources on your system; in particular,
RAM.
- When C(internal) is C(no), at least one record must be specified in either C(records)
or C(records_content).
suboptions:
key:
description:
- The key describing the record in the data group.
- Your key will be used for validation of the C(type) parameter to this module.
required: True
value:
description:
- The value of the key describing the record in the data group.
records_src:
description:
- Path to a file with records in it.
- The file should be well-formed. This means that it includes records, one per line,
that resemble the following format "key separator value". For example, C(foo := bar).
- BIG-IP is strict about this format, but this module is a bit more lax. It will allow
you to include arbitrary amounts (including none) of empty space on either side of
the separator. For an illustration of this, see the Examples section.
- Record keys are limited in length to no more than 65520 characters.
- Values of record keys are limited in length to no more than 65520 characters.
- The total number of records you can have in your BIG-IP is limited by the memory
of the BIG-IP.
- The format of this content is slightly different depending on whether you specify
a C(type) of C(address), C(integer), or C(string). See the examples section for
examples of the different types of payload formats that are expected in your data
group file.
- When C(internal) is C(no), at least one record must be specified in either C(records)
or C(records_content).
separator:
description:
- When specifying C(records_content), this is the string of characters that will
be used to break apart entries in the C(records_content) into key/value pairs.
- By default, this parameter's value is C(:=).
- This value cannot be changed once it is set.
- This parameter is only relevant when C(internal) is C(no). It will be ignored
otherwise.
default: ":="
delete_data_group_file:
description:
- When C(yes), will ensure that the remote data group file is deleted.
- This parameter is only relevant when C(state) is C(absent) and C(internal) is C(no).
default: no
type: bool
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(state) is C(present), ensures the data group exists.
- When C(state) is C(absent), ensures that the data group is removed.
choices:
- present
- absent
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a data group of addresses
bigip_data_group:
name: foo
records:
- key: 0.0.0.0/32
value: External_NAT
- key: 10.10.10.10
value: No_NAT
type: address
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create a data group of strings
bigip_data_group:
name: foo
records:
- key: caddy
value: ""
- key: cafeteria
value: ""
- key: cactus
value: ""
type: str
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create a data group of IP addresses from a file
bigip_data_group:
name: foo
records_src: /path/to/dg-file
type: address
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Update an existing internal data group of strings
bigip_data_group:
name: foo
internal: yes
records:
- key: caddy
value: ""
- key: cafeteria
value: ""
- key: cactus
value: ""
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Show the data format expected for records_content - address 1
copy:
dest: /path/to/addresses.txt
content: |
network 10.0.0.0 prefixlen 8 := "Network1",
network 172.16.0.0 prefixlen 12 := "Network2",
network 192.168.0.0 prefixlen 16 := "Network3",
network 2402:9400:1000:0:: prefixlen 64 := "Network4",
host 192.168.20.1 := "Host1",
host 172.16.1.1 := "Host2",
host 172.16.1.1/32 := "Host3",
host 2001:0db8:85a3:0000:0000:8a2e:0370:7334 := "Host4",
host 2001:0db8:85a3:0000:0000:8a2e:0370:7334/128 := "Host5"
- name: Show the data format expected for records_content - address 2
copy:
dest: /path/to/addresses.txt
content: |
10.0.0.0/8 := "Network1",
172.16.0.0/12 := "Network2",
192.168.0.0/16 := "Network3",
2402:9400:1000:0::/64 := "Network4",
192.168.20.1 := "Host1",
172.16.1.1 := "Host2",
172.16.1.1/32 := "Host3",
2001:0db8:85a3:0000:0000:8a2e:0370:7334 := "Host4",
2001:0db8:85a3:0000:0000:8a2e:0370:7334/128 := "Host5"
- name: Show the data format expected for records_content - string
copy:
dest: /path/to/strings.txt
content: |
a := alpha,
b := bravo,
c := charlie,
x := x-ray,
y := yankee,
z := zulu,
- name: Show the data format expected for records_content - integer
copy:
dest: /path/to/integers.txt
content: |
1 := bar,
2 := baz,
3,
4,
'''
RETURN = r'''
# only common fields returned
'''
import hashlib
import os
import re
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from io import StringIO
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import compare_complex_list
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.ipaddress import is_valid_ip_interface
from library.module_utils.compat.ipaddress import ip_network
from library.module_utils.compat.ipaddress import ip_interface
from library.module_utils.network.f5.icontrol import upload_file
from library.module_utils.network.f5.compare import cmp_str_with_none
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import compare_complex_list
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.ipaddress import is_valid_ip_interface
from ansible.module_utils.compat.ipaddress import ip_network
from ansible.module_utils.compat.ipaddress import ip_interface
from ansible.module_utils.network.f5.icontrol import upload_file
from ansible.module_utils.network.f5.compare import cmp_str_with_none
LINE_LIMIT = 65000
SIZE_LIMIT_BYTES = 4000000
def zero_length(content):
content.seek(0, os.SEEK_END)
length = content.tell()
content.seek(0)
if length == 0:
return True
return False
def size_exceeded(content):
records = content
records.seek(0, os.SEEK_END)
size = records.tell()
records.seek(0)
if size > SIZE_LIMIT_BYTES:
return True
return False
def lines_exceeded(content):
result = False
for i, line in enumerate(content):
if i > LINE_LIMIT:
result = True
content.seek(0)
return result
class RecordsEncoder(object):
def __init__(self, record_type=None, separator=None):
self._record_type = record_type
self._separator = separator
self._network_pattern = re.compile(r'^network\s+(?P<addr>[^ ]+)\s+prefixlen\s+(?P<prefix>\d+)\s+.*')
self._host_pattern = re.compile(r'^host\s+(?P<addr>[^ ]+)\s+.*')
def encode(self, record):
if isinstance(record, dict):
return self.encode_dict(record)
else:
return self.encode_string(record)
def encode_dict(self, record):
if self._record_type == 'ip':
return self.encode_address_from_dict(record)
elif self._record_type == 'integer':
return self.encode_integer_from_dict(record)
else:
return self.encode_string_from_dict(record)
def encode_address_from_dict(self, record):
if is_valid_ip_interface(record['key']):
key = ip_interface(u"{0}".format(str(record['key'])))
else:
raise F5ModuleError(
"When specifying an 'address' type, the value to the left of the separator must be an IP."
)
if key and 'value' in record:
if key.network.prefixlen in [32, 128]:
return self.encode_host(str(key.ip), record['value'])
return self.encode_network(
str(key.network.network_address), key.network.prefixlen, record['value']
)
elif key:
if key.network.prefixlen in [32, 128]:
return self.encode_host(str(key.ip), str(key.ip))
return self.encode_network(
str(key.network.network_address), key.network.prefixlen, str(key.network.network_address)
)
def encode_integer_from_dict(self, record):
try:
int(record['key'])
except ValueError:
raise F5ModuleError(
"When specifying an 'integer' type, the value to the left of the separator must be a number."
)
if 'key' in record and 'value' in record:
return '{0} {1} {2}'.format(record['key'], self._separator, record['value'])
elif 'key' in record:
return str(record['key'])
def encode_string_from_dict(self, record):
if 'key' in record and 'value' in record:
return '{0} {1} {2}'.format(record['key'], self._separator, record['value'])
elif 'key' in record:
return '{0} {1} ""'.format(record['key'], self._separator)
def encode_string(self, record):
record = record.strip().strip(',')
if self._record_type == 'ip':
return self.encode_address_from_string(record)
elif self._record_type == 'integer':
return self.encode_integer_from_string(record)
else:
return self.encode_string_from_string(record)
def encode_address_from_string(self, record):
if self._network_pattern.match(record):
# network 192.168.0.0 prefixlen 16 := "Network3",
# network 2402:9400:1000:0:: prefixlen 64 := "Network4",
return record
elif self._host_pattern.match(record):
# host 172.16.1.1/32 := "Host3"
# host 2001:0db8:85a3:0000:0000:8a2e:0370:7334 := "Host4"
return record
else:
# 192.168.0.0/16 := "Network3",
# 2402:9400:1000:0::/64 := "Network4",
parts = record.split(self._separator)
if parts[0] == '':
return
if not is_valid_ip_interface(parts[0]):
raise F5ModuleError(
"When specifying an 'address' type, the value to the left of the separator must be an IP."
)
key = ip_interface(u"{0}".format(str(parts[0])))
if len(parts) == 2:
if key.network.prefixlen in [32, 128]:
return self.encode_host(str(key.ip), parts[1])
return self.encode_network(
str(key.network.network_address), key.network.prefixlen, parts[1]
)
elif len(parts) == 1 and parts[0] != '':
if key.network.prefixlen in [32, 128]:
return self.encode_host(str(key.ip), str(key.ip))
return self.encode_network(
str(key.network.network_address), key.network.prefixlen, str(key.network.network_address)
)
def encode_host(self, key, value):
return 'host {0} {1} {2}'.format(str(key), self._separator, str(value))
def encode_network(self, key, prefixlen, value):
return 'network {0} prefixlen {1} {2} {3}'.format(
str(key), str(prefixlen), self._separator, str(value)
)
def encode_integer_from_string(self, record):
parts = record.split(self._separator)
if len(parts) == 1 and parts[0] == '':
return None
try:
int(parts[0])
except ValueError:
raise F5ModuleError(
"When specifying an 'integer' type, the value to the left of the separator must be a number."
)
if len(parts) == 2:
return '{0} {1} {2}'.format(parts[0], self._separator, parts[1])
elif len(parts) == 1:
return str(parts[0])
def encode_string_from_string(self, record):
parts = record.split(self._separator)
if len(parts) == 2:
return '{0} {1} {2}'.format(parts[0], self._separator, parts[1])
elif len(parts) == 1 and parts[0] != '':
return '{0} {1} ""'.format(parts[0], self._separator)
class RecordsDecoder(object):
def __init__(self, record_type=None, separator=None):
self._record_type = record_type
self._separator = separator
self._network_pattern = re.compile(r'^network\s+(?P<addr>[^ ]+)\s+prefixlen\s+(?P<prefix>\d+)\s+.*')
self._host_pattern = re.compile(r'^host\s+(?P<addr>[^ ]+)\s+.*')
def decode(self, record):
record = record.strip().strip(',')
if self._record_type == 'ip':
return self.decode_address_from_string(record)
else:
return self.decode_from_string(record)
def decode_address_from_string(self, record):
matches = self._network_pattern.match(record)
if matches:
# network 192.168.0.0 prefixlen 16 := "Network3",
# network 2402:9400:1000:0:: prefixlen 64 := "Network4",
key = u"{0}/{1}".format(matches.group('addr'), matches.group('prefix'))
addr = ip_network(key)
value = record.split(self._separator)[1].strip().strip('"')
result = dict(name=str(addr), data=value)
return result
matches = self._host_pattern.match(record)
if matches:
# host 172.16.1.1/32 := "Host3"
# host 2001:0db8:85a3:0000:0000:8a2e:0370:7334 := "Host4"
key = matches.group('addr')
addr = ip_interface(u"{0}".format(str(key)))
value = record.split(self._separator)[1].strip().strip('"')
result = dict(name=str(addr), data=value)
return result
raise F5ModuleError(
'The value "{0}" is not an address'.format(record)
)
def decode_from_string(self, record):
parts = record.split(self._separator)
if len(parts) == 2:
return dict(name=parts[0].strip(), data=parts[1].strip('"').strip())
else:
return dict(name=parts[0].strip(), data="")
class Parameters(AnsibleF5Parameters):
api_map = {
'externalFileName': 'external_file_name',
}
api_attributes = [
'records',
'type',
'description',
]
returnables = [
'type',
'records',
'description',
]
updatables = [
'records',
'checksum',
'description',
]
@property
def type(self):
if self._values['type'] in ['address', 'addr', 'ip']:
return 'ip'
elif self._values['type'] in ['integer', 'int']:
return 'integer'
elif self._values['type'] in ['string']:
return 'string'
@property
def records_src(self):
try:
self._values['records_src'].seek(0)
return self._values['records_src']
except AttributeError:
pass
if self._values['records_src']:
records = open(self._values['records_src'])
else:
records = self._values['records']
if records is None:
return None
# There is a 98% chance that the user will supply a data group that is < 1MB.
# 99.917% chance it is less than 10 MB. This is well within the range of typical
# memory available on a system.
#
# If this changes, this may need to be changed to use temporary files instead.
self._values['records_src'] = StringIO()
self._write_records_to_file(records)
return self._values['records_src']
def _write_records_to_file(self, records):
bucket_size = 1000000
bucket = []
encoder = RecordsEncoder(record_type=self.type, separator=self.separator)
for record in records:
result = encoder.encode(record)
if result:
bucket.append(to_text(result + ",\n"))
if len(bucket) == bucket_size:
self._values['records_src'].writelines(bucket)
bucket = []
self._values['records_src'].writelines(bucket)
self._values['records_src'].seek(0)
class ApiParameters(Parameters):
@property
def checksum(self):
if self._values['checksum'] is None:
return None
result = self._values['checksum'].split(':')[2]
return result
@property
def records(self):
if self._values['records'] is None:
return None
return self._values['records']
@property
def records_list(self):
return self.records
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
class ModuleParameters(Parameters):
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
@property
def checksum(self):
if self._values['checksum']:
return self._values['checksum']
if self.records_src is None:
return None
result = hashlib.sha1()
records = self.records_src
while True:
data = records.read(4096)
if not data:
break
result.update(data.encode('utf-8'))
result = result.hexdigest()
self._values['checksum'] = result
return result
@property
def external_file_name(self):
if self._values['external_file_name'] is None:
name = self.name
else:
name = self._values['external_file_name']
if re.search(r'[^a-zA-Z0-9-_.]', name):
raise F5ModuleError(
"'external_file_name' may only contain letters, numbers, underscores, dashes, or a period."
)
return name
@property
def records(self):
results = []
if self.records_src is None:
return None
decoder = RecordsDecoder(record_type=self.type, separator=self.separator)
for record in self.records_src:
result = decoder.decode(record)
if result:
results.append(result)
return results
@property
def records_list(self):
if self._values['records'] is None:
return None
return self.records
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def records(self):
# External data groups are compared by their checksum, not their records. This
# is because the BIG-IP does not store the actual records in the API. It instead
# stores the checksum of the file. External DGs have the possibility of being huge
# and we would never want to do a comparison of such huge files.
#
# Therefore, comparison is no-op if the DG being worked with is an external DG.
if self.want.internal is False:
return None
if self.have.records is None and self.want.records == []:
return None
if self.have.records is None:
return self.want.records
result = compare_complex_list(self.want.records, self.have.records)
return result
@property
def type(self):
return None
@property
def checksum(self):
if self.want.internal:
return None
if self.want.checksum is None:
return None
if self.want.checksum != self.have.checksum:
return True
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _set_changed_options(self):
changed = {}
for key in ApiParameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = ApiParameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
class InternalManager(BaseManager):
def create(self):
self._set_changed_options()
if size_exceeded(self.want.records_src) or lines_exceeded(self.want.records_src):
raise F5ModuleError(
"The size of the provided data (or file) is too large for an internal data group."
)
if self.module.check_mode:
return True
self.create_on_device()
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/internal/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/internal/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/internal/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/internal/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/internal/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ExternalManager(BaseManager):
def absent(self):
result = False
if self.exists():
result = self.remove()
if self.external_file_exists() and self.want.delete_data_group_file:
result = self.remove_data_group_file_from_device()
return result
def create(self):
if zero_length(self.want.records_src):
raise F5ModuleError(
"An external data group cannot be empty."
)
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.changes.records_src and zero_length(self.want.records_src):
raise F5ModuleError(
"An external data group cannot be empty."
)
if self.module.check_mode:
return True
self.update_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/external/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def external_file_exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/file/data-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.external_file_name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def upload_file_to_device(self, content, name):
url = 'https://{0}:{1}/mgmt/shared/file-transfer/uploads'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
try:
upload_file(self.client, url, content, name)
except F5ModuleError:
raise F5ModuleError(
"Failed to upload the file."
)
def _upload_to_file(self, name, type, remote_path, update=False):
self.upload_file_to_device(self.want.records_src, name)
if update:
uri = "https://{0}:{1}/mgmt/tm/sys/file/data-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, name)
)
params = {'sourcePath': 'file:{0}'.format(remote_path)}
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
else:
uri = "https://{0}:{1}/mgmt/tm/sys/file/data-group/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
params = dict(
name=name,
type=type,
sourcePath='file:{0}'.format(remote_path)
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['name']
def remove_file_on_device(self, remote_path):
uri = "https://{0}:{1}/mgmt/tm/util/unix-rm/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs=remote_path
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_on_device(self):
name = self.want.external_file_name
remote_path = '/var/config/rest/downloads/{0}'.format(name)
external_file = self._upload_to_file(name, self.want.type, remote_path, update=False)
params = dict(
name=self.want.name,
partition=self.want.partition,
externalFileName=external_file,
)
if self.want.description:
params['description'] = self.want.description
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/external/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
self.remove_file_on_device(remote_path)
def update_on_device(self):
params = {}
if self.want.records_src is not None:
name = self.want.external_file_name
remote_path = '/var/config/rest/downloads/{0}'.format(name)
external_file = self._upload_to_file(name, self.have.type, remote_path, update=True)
params['externalFileName'] = external_file
if self.changes.description is not None:
params['description'] = self.changes.description
if not params:
return
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/external/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/external/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
# Remove the remote data group file if asked to
if self.want.delete_data_group_file:
self.remove_data_group_file_from_device()
if resp.status == 200:
return True
def remove_data_group_file_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/file/data-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.external_file_name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
else:
return False
def read_current_from_device(self):
"""Reads the current configuration from the device
For an external data group, we are interested in two things from the
current configuration
* ``checksum``
* ``type``
The ``checksum`` will allow us to compare the data group value we have
with the data group value being provided.
The ``type`` will allow us to do validation on the data group value being
provided (if any).
Returns:
ExternalApiParameters: Attributes of the remote resource.
"""
uri = "https://{0}:{1}/mgmt/tm/ltm/data-group/external/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp_dg = self.client.api.get(uri)
try:
response_dg = resp_dg.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response_dg and response_dg['code'] == 400:
if 'message' in response_dg:
raise F5ModuleError(response_dg['message'])
else:
raise F5ModuleError(resp_dg.content)
external_file = os.path.basename(response_dg['externalFileName'])
external_file_partition = os.path.dirname(response_dg['externalFileName']).strip('/')
uri = "https://{0}:{1}/mgmt/tm/sys/file/data-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(external_file_partition, external_file)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
result = ApiParameters(params=response)
result.update({'description': response_dg.get('description', None)})
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.kwargs = kwargs
self.module = kwargs.get('module')
self.client = kwargs.get('client', None)
def exec_module(self):
if self.module.params['internal']:
manager = self.get_manager('internal')
else:
manager = self.get_manager('external')
return manager.exec_module()
def get_manager(self, type):
if type == 'internal':
return InternalManager(**self.kwargs)
elif type == 'external':
return ExternalManager(**self.kwargs)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
type=dict(
choices=['address', 'addr', 'ip', 'string', 'str', 'integer', 'int'],
default='string'
),
delete_data_group_file=dict(type='bool'),
internal=dict(type='bool', default='no'),
records=dict(
type='list',
suboptions=dict(
key=dict(required=True),
value=dict(type='raw')
)
),
records_src=dict(type='path'),
external_file_name=dict(),
separator=dict(default=':='),
description=dict(),
state=dict(choices=['absent', 'present'], default='present'),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.mutually_exclusive = [
['records', 'records_content', 'external_file_name']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
oczkers/fut | docs/core.py | 2 | 8519 | # -*- coding: utf-8 -*-
"""
futmarket.core
~~~~~~~~~~~~~~~~~~~~~
This module implements the futmarket's basic methods.
"""
# Imports
## Relies heavily on fut package.
import fut
import pandas as pd
from time import sleep
# Login
## Login to EA Sports. May require two-factor authentication. You will be prompted for code, which is likely in email inbox.
def login():
global fut
print('Email: ')
email = raw_input()
print('Password: ')
password = raw_input()
print('Secret: ')
secret = raw_input()
print('platform: [pc/ps3/ps4/xbox/xbox360] ')
platform = raw_input()
print('Loading...')
fut = fut.Core(email, password, secret, platform)
print('You have logged in successfully.')
# Keepailve
## Run this every ~10 mins so the program continues to run
def keepalive():
global coins
coins = fut.keepalive()
return(coins)
# Sold
## Clean up tradepile of those who sold
def sold():
tradepile = fut.tradepile()
sold = []
bids = []
for i in range(0, len(tradepile)):
if tradepile[i]['tradeState'] == 'closed':
sold.append(fut.players[tradepile[i]['assetId']]['lastname'])
bids.append(tradepile[i]['currentBid'])
print('Sold %s %s for %s coins' % (fut.players[tradepile[i]['assetId']]['firstname'], fut.players[tradepile[i]['assetId']]['lastname'], tradepile[i]['currentBid']))
fut.tradepileDelete(tradepile[i]['tradeId'])
return('Sold %s players for %s coins' % (len(sold), sum(bids)))
# Not Sold
## Clean up tradepile of those that did not sell
def not_sold():
tradepile = fut.tradepile()
for i in range(0, len(tradepile)):
if (tradepile[i]['tradeState'] == 'expired') or (tradepile[i]['tradeState'] == None):
print('Did not sell %s %s. Moved back to team.' % (fut.players[tradepile[i]['assetId']]['firstname'], fut.players[tradepile[i]['assetId']]['lastname']))
fut.sendToClub(tradepile[i]['id'])
# Active
## Gets active trades in tradepile
def active():
tradepile = fut.tradepile()
global active_players
active_players = []
for i in range(0, len(tradepile)):
if (tradepile[i]['tradeState'] == 'active'):
active_players.append(tradepile[i]['assetId'])
print("""Actively selling %s %s. Expires in %s minutes. %s bids so far and a current price of %s.""" %
(fut.players[tradepile[i]['assetId']]['firstname'], fut.players[tradepile[i]['assetId']]['lastname'],
int(round(tradepile[i]['expires']/60)), tradepile[i]['offers'], tradepile[i]['currentBid']))
# My Team
## Get names and attributes of team members, including last sale price
def my_team():
sold()
not_sold()
myclub = fut.club()
my_auction = pd.DataFrame(myclub)
my_auction = my_auction[my_auction['untradeable'] == False]
assetIds = my_auction['assetId'].tolist()
ids = my_auction['id'].tolist()
firstnames = []
lastnames = []
for i in assetIds:
firstnames.append(fut.players[i]['firstname'])
lastnames.append(fut.players[i]['lastname'])
players = [i + ' ' + j for i, j in zip(firstnames, lastnames)]
position = my_auction['position'].tolist()
rating = my_auction['rating'].tolist()
contract = my_auction['contract'].tolist()
lastSalePrice = my_auction['lastSalePrice'].tolist()
discardValue = my_auction['discardValue'].tolist()
my_values = [max(lastSalePrice, discardValue) for lastSalePrice, discardValue in zip(lastSalePrice, discardValue)]
global team
team = pd.DataFrame(
{'assetId': assetIds,
'id': ids,
'name': players,
'position': position,
'rating': rating,
'contract': contract,
'my_value': my_values
}
)
return(team)
# Min value function
def mins(items, n):
mins = items[:n]
mins.sort()
for i in items[n:]:
if i < mins[-1]:
mins.append(i)
mins.sort()
mins= mins[:n]
return(mins)
# Median value function
def median(lst):
n = len(lst)
if n < 1:
return None
if n % 2 == 1:
return sorted(lst)[n//2]
else:
return sum(sorted(lst)[n//2-1:n//2+1])/2.0
# My Market
## Get market for my tradeable players. Constrain by page depth (time) and strategy option.
def my_market(depth=1, strategy=1):
# See if team exists yet as a variable
try:
team
except NameError:
my_team()
else:
mkt_value = []
# Loop through each team member to get market values
for i in range(0, len(team)):
print('Getting market value for: %s' % (team['name'][i]))
mkt_values = []
for page in range(0, depth):
for d in fut.search(ctype='player', assetId=str(team['assetId'][i]), page_size='50', start=page):
mkt_values.append({'buy': d['buyNowPrice'], 'tradeId': d['tradeId']})
if strategy == 1:
# Gets median of min 5 market values
mkt_value.append(median(mins(mkt_values, 5)))
print('Checked %s players. Market value of %s coins added for %s' % (depth*50, mkt_value[i]['buy'], team['name'][i]))
if strategy == 2:
# New strategy here
###
print('Checked %s players. Market value of %s coins added for %s' % (depth*50, mkt_value[i]['buy'], team['name'][i]))
if strategy == 3:
# New strategy here
###
print('Checked %s players. Market value of %s coins added for %s' % (depth*50, mkt_value[i]['buy'], team['name'][i]))
# Create a dataframe of market values that merges with team members
mkt_value = pd.Series(mkt_value).values
sell = []
for i in mkt_value:
sell.append(i['buy'])
market = pd.DataFrame(
{'mkt_value': sell}
)
global team_market
team_market = pd.merge(team, market, left_index=True, right_index=True)
# List
## Put players on the block, send to tradepile and sell
def list_players(min_value=300, strategy=1):
my_market()
print('Cleaning up tradepile...')
sold()
not_sold()
active()
tradepile = fut.tradepile()
# Get players on the block
global block
block = team_market[team_market['my_value']>min_value]
print('%s players on the block with a value to you of %s coins and a market value of %s coins' % (len(block), block['my_value'].sum(), block['mkt_value'].sum()))
global quicksell
quicksell = team_market[team_market['my_value']<=min_value]
if len(quicksell) == 0:
print('No items to quicksell.')
else:
for index, row in quicksell.iterrows():
fut.quickSell(row['id'])
print('Quick sold %s items for %s coins' % (len(quicksell), quicksell['my_value'].sum()))
# Get available space and send players from block to tradepile
available_space = fut.pileSize().get('tradepile') - len(tradepile)
block = block.head(n=available_space)
for index, row in block.iterrows():
if row['assetId'] in active_players:
block.drop(index, inplace=True)
if len(block) == 0:
print('No players to list on market.')
else:
print('%s players can be added to tradepile.' % (len(block)))
for index, row in block.iterrows():
fut.sendToTradepile(row['id'])
sleep(2)
print('%s added to tradepile.' % (row['name']))
print('%s players successfully added to tradepile.' % (len(block)))
# List players on market
# Strategy 1: List at my value, buy now at market value.
if strategy==1:
for index, row in block.iterrows():
if row['mkt_value'] > row['my_value']:
fut.sell(item_id = row['id'], bid = row['my_value'], buy_now = row['mkt_value'], duration = 3600)
print('%s has been listed on the market for %s coins to buy now and %s coins starting bid' % (row['name'], row['mkt_value'], row['my_value']))
if row['mkt_value'] <= row['my_value']:
print('The market value for %s is below or equal to what you originally paid. Moving this card back to team for now.' % (row['name']))
fut.sendToClub(row['id'])
sleep(10)
print('All tradeable players have been listed on the market. Check back in an hour to see if they sold')
| gpl-3.0 |
bzbarsky/servo | tests/wpt/css-tests/tools/py/testing/path/test_cacheutil.py | 163 | 2949 | import py
from py._path import cacheutil
class BasicCacheAPITest:
cache = None
def test_getorbuild(self):
val = self.cache.getorbuild(-42, lambda: 42)
assert val == 42
val = self.cache.getorbuild(-42, lambda: 23)
assert val == 42
def test_cache_get_key_error(self):
py.test.raises(KeyError, "self.cache._getentry(-23)")
def test_delentry_non_raising(self):
val = self.cache.getorbuild(100, lambda: 100)
self.cache.delentry(100)
py.test.raises(KeyError, "self.cache._getentry(100)")
def test_delentry_raising(self):
val = self.cache.getorbuild(100, lambda: 100)
self.cache.delentry(100)
py.test.raises(KeyError, "self.cache.delentry(100, raising=True)")
def test_clear(self):
self.cache.clear()
class TestBuildcostAccess(BasicCacheAPITest):
cache = cacheutil.BuildcostAccessCache(maxentries=128)
def test_cache_works_somewhat_simple(self, monkeypatch):
cache = cacheutil.BuildcostAccessCache()
# the default gettime
# BuildcostAccessCache.build can
# result into time()-time() == 0 which makes the below
# test fail randomly. Let's rather use incrementing
# numbers instead.
l = [0]
def counter():
l[0] = l[0] + 1
return l[0]
monkeypatch.setattr(cacheutil, 'gettime', counter)
for x in range(cache.maxentries):
y = cache.getorbuild(x, lambda: x)
assert x == y
for x in range(cache.maxentries):
assert cache.getorbuild(x, None) == x
halfentries = int(cache.maxentries / 2)
for x in range(halfentries):
assert cache.getorbuild(x, None) == x
assert cache.getorbuild(x, None) == x
# evict one entry
val = cache.getorbuild(-1, lambda: 42)
assert val == 42
# check that recently used ones are still there
# and are not build again
for x in range(halfentries):
assert cache.getorbuild(x, None) == x
assert cache.getorbuild(-1, None) == 42
class TestAging(BasicCacheAPITest):
maxsecs = 0.10
cache = cacheutil.AgingCache(maxentries=128, maxseconds=maxsecs)
def test_cache_eviction(self):
self.cache.getorbuild(17, lambda: 17)
endtime = py.std.time.time() + self.maxsecs * 10
while py.std.time.time() < endtime:
try:
self.cache._getentry(17)
except KeyError:
break
py.std.time.sleep(self.maxsecs*0.3)
else:
py.test.fail("waiting for cache eviction failed")
def test_prune_lowestweight():
maxsecs = 0.05
cache = cacheutil.AgingCache(maxentries=10, maxseconds=maxsecs)
for x in range(cache.maxentries):
cache.getorbuild(x, lambda: x)
py.std.time.sleep(maxsecs*1.1)
cache.getorbuild(cache.maxentries+1, lambda: 42)
| mpl-2.0 |
glatard/nipype | nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py | 9 | 1497 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.fsl.maths import IsotropicSmooth
def test_IsotropicSmooth_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
fwhm=dict(argstr='-s %.5f',
mandatory=True,
position=4,
xor=['sigma'],
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='%s',
mandatory=True,
position=2,
),
internal_datatype=dict(argstr='-dt %s',
position=1,
),
nan2zeros=dict(argstr='-nan',
position=3,
),
out_file=dict(argstr='%s',
genfile=True,
hash_files=False,
position=-2,
),
output_datatype=dict(argstr='-odt %s',
position=-1,
),
output_type=dict(),
sigma=dict(argstr='-s %.5f',
mandatory=True,
position=4,
xor=['fwhm'],
),
terminal_output=dict(nohash=True,
),
)
inputs = IsotropicSmooth.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_IsotropicSmooth_outputs():
output_map = dict(out_file=dict(),
)
outputs = IsotropicSmooth.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
drawks/ansible | test/units/modules/network/vyos/test_vyos_facts.py | 15 | 3761 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from ansible.modules.network.vyos import vyos_facts
from units.modules.utils import set_module_args
from .vyos_module import TestVyosModule, load_fixture
class TestVyosFactsModule(TestVyosModule):
module = vyos_facts
def setUp(self):
super(TestVyosFactsModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.vyos.vyos_facts.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_get_capabilities = patch('ansible.modules.network.vyos.vyos_facts.get_capabilities')
self.get_capabilities = self.mock_get_capabilities.start()
self.get_capabilities.return_value = {
'device_info': {
'network_os': 'vyos',
'network_os_hostname': 'vyos01',
'network_os_model': 'VMware',
'network_os_version': 'VyOS 1.1.7'
},
'network_api': 'cliconf'
}
def tearDown(self):
super(TestVyosFactsModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_get_capabilities.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item)
command = obj['command']
except ValueError:
command = item
filename = str(command).replace(' ', '_')
output.append(load_fixture(filename))
return output
self.run_commands.side_effect = load_from_file
def test_vyos_facts_default(self):
set_module_args(dict(gather_subset='default'))
result = self.execute_module()
facts = result.get('ansible_facts')
self.assertEqual(len(facts), 8)
self.assertEqual(facts['ansible_net_hostname'].strip(), 'vyos01')
self.assertEqual(facts['ansible_net_version'], 'VyOS 1.1.7')
def test_vyos_facts_not_all(self):
set_module_args(dict(gather_subset='!all'))
result = self.execute_module()
facts = result.get('ansible_facts')
self.assertEqual(len(facts), 8)
self.assertEqual(facts['ansible_net_hostname'].strip(), 'vyos01')
self.assertEqual(facts['ansible_net_version'], 'VyOS 1.1.7')
def test_vyos_facts_exclude_most(self):
set_module_args(dict(gather_subset=['!neighbors', '!config']))
result = self.execute_module()
facts = result.get('ansible_facts')
self.assertEqual(len(facts), 8)
self.assertEqual(facts['ansible_net_hostname'].strip(), 'vyos01')
self.assertEqual(facts['ansible_net_version'], 'VyOS 1.1.7')
def test_vyos_facts_invalid_subset(self):
set_module_args(dict(gather_subset='cereal'))
result = self.execute_module(failed=True)
| gpl-3.0 |
czcorpus/kontext | lib/kwiclib.py | 1 | 24384 | # Copyright (c) 2003-2009 Pavel Rychly
# Copyright (c) 2014 Institute of the Czech National Corpus
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# dated June, 1991.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from typing import Any, List, Mapping, Dict, Tuple, Union
from collections import defaultdict
import re
import itertools
import math
import manatee
from structures import FixedDict
from conclib.empty import InitialConc
from kwiclib_common import tokens2strclass
from corplib.corpus import KCorpus
SortCritType = List[Tuple[str, Union[str, int]]]
LabelMapType = List[Dict[str, List[Dict[str, Union[str, int]]]]]
def lngrp_sortcrit(lab: str, separator: str = '.') -> SortCritType:
# TODO
def num2sort(n: str) -> Tuple[str, Union[str, int]]:
if re.compile('[0-9]+$').match(n):
return 'n', int(n)
else:
return 'c', n
if not lab:
return [('x', 'x')]
return list(map(num2sort, lab.split(separator, 3)))
def format_labelmap(labelmap: Mapping[str, str], separator: str = '.') -> LabelMapType:
# TODO analyze purpose of this function (it seems to be not used)
matrix: Dict[str, List[Tuple[SortCritType, str, str]]] = defaultdict(list)
for n, lab in labelmap.items():
if lab:
pref = lab.split(separator)[0]
matrix[pref].append((lngrp_sortcrit(lab), lab, n))
prefixes = [(lngrp_sortcrit(p), p) for p in matrix.keys()]
prefixes.sort()
lines: LabelMapType = []
for s, pref in prefixes:
line = matrix[pref]
line.sort()
lines.append(
{'Items': [{'n': n, 'lab': lab} for (s, lab, n) in line]})
return lines
class EmptyKWiclines:
def nextline(self):
return False
class Pagination(object):
first_page = 1
prev_page = None
next_page = None
last_page = None
def export(self):
return dict(firstPage=self.first_page, prevPage=self.prev_page,
nextPage=self.next_page, lastPage=self.last_page)
class KwicPageData(FixedDict):
"""
Defines data required to render a KWIC page
"""
Lines = None
GroupNumbers = None
fromp = None
Page = None
pagination = None
concsize = None
result_arf = None
result_relative_freq = None
KWICCorps = ()
CorporaColumns = ()
class KwicLinesArgs(object):
"""
note: please see KwicPageArgs attributes for more information
"""
speech_segment = None
fromline = None
toline = None
leftctx = '-5'
rightctx = '5'
attrs = 'word'
ctxattrs = 'word'
refs = '#'
user_structs = 'p'
labelmap: Dict[str, str] = {}
righttoleft = False
alignlist = ()
attr_vmode = 'visible-kwic'
base_attr = 'word'
def copy(self, **kw):
ans = KwicLinesArgs()
for k, v in list(self.__dict__.items()):
setattr(ans, k, v)
for k, v in list(kw.items()):
setattr(ans, k, v)
return ans
class KwicPageArgs(object):
# 2-tuple sets a name of a speech attribute and structure (struct, attr) or None if speech is not present
speech_attr = None
# page number (starts from 1)
fromp = 1
# first line of the listing (starts from 0)
line_offset = 0
# how many characters/positions/whatever_struct_attrs display on the left side; Use 'str' type!
leftctx = '-5'
# how many characters/positions/whatever_struct_attrs display on the right side; Use 'str' type!
rightctx = '5'
# positional attributes to be displayed for KWIC (word, lemma, tag,...)
attrs = 'word'
# positional attributes to be displayed for non-KWIC tokens (word, lemma, tag)
ctxattrs = 'word'
# references (text type information derived from structural attributes) to be displayed
refs = '#'
# structures to be displayed
structs = 'p'
# number of lines per page
pagesize = 40
# ???
labelmap: Dict[str, str] = {}
# whether the text flows from right to left
righttoleft = False
# ???
alignlist: List[Any] = [] # TODO better type
# whether display ===EMPTY=== or '' in case a value is empty
hidenone = 0
# determine whether the non-word attributes should be rendered directly or as a meta-data
attr_vmode = 'visible-kwic'
def __init__(self, argmapping: Dict[str, Any], base_attr: str):
for k, v in argmapping.items():
if hasattr(self, k):
setattr(self, k, self._import_val(k, v))
self.base_attr = base_attr
self.ctxattrs = self.attrs
def _import_val(self, k, v):
t = type(getattr(self, k))
if t is int:
return int(v)
elif t is float:
return float(v)
elif t is str:
return v if v is not None else ''
else:
return v
def to_dict(self):
return self.__dict__
def calc_fromline(self):
return (self.fromp - 1) * self.pagesize + self.line_offset
def calc_toline(self):
return self.fromp * self.pagesize + self.line_offset
def create_kwicline_args(self, **kw):
ans = KwicLinesArgs()
ans.speech_segment = self.speech_attr
ans.fromline = self.calc_fromline()
ans.toline = self.calc_toline()
ans.leftctx = self.leftctx
ans.rightctx = self.rightctx
ans.attrs = self.attrs
ans.ctxattrs = self.ctxattrs
ans.refs = self.refs
ans.structs = self.structs
ans.labelmap = self.labelmap
ans.righttoleft = self.righttoleft
ans.alignlist = self.alignlist
ans.attr_vmode = self.attr_vmode
for k, v in list(kw.items()):
setattr(ans, k, v)
return ans
class Kwic:
"""
KWIC related data preparation utilities
arguments:
corpus --
corpus_fullname -- full (internal) name of the corpus (e.g. with path prefix if used)
conc -- a manatee.Concordance instance
"""
def __init__(self, corpus: KCorpus, corpus_fullname, conc):
self.corpus = corpus
self.corpus_fullname = corpus_fullname
self.conc = conc
def kwicpage(self, args: KwicPageArgs):
"""
Generates template data for page displaying provided concordance
arguments:
args -- a KwicArgs instance
returns:
KwicPageData converted into a dict
"""
args.refs = getattr(args, 'refs', '').replace('.MAP_OUP', '') # to be removed ...
try:
fromp = int(args.fromp)
if fromp < 1:
fromp = 1
except:
fromp = 1
out = KwicPageData()
pagination = Pagination()
pagination.first_page = 1
out.Lines = self.kwiclines(args.create_kwicline_args())
self.add_aligns(out, args.create_kwicline_args(speech_segment=None))
if len(out.CorporaColumns) == 0:
out.CorporaColumns = [dict(n=self.corpus.corpname, label=self.corpus.get_conf('NAME'))]
out.KWICCorps = [self.corpus.corpname]
if args.labelmap:
out.GroupNumbers = format_labelmap(args.labelmap)
if fromp > 1:
pagination.prev_page = fromp - 1
if args.pagesize <= 0:
raise ValueError('pagesize must be > 0')
if self.conc.size() > args.pagesize:
out.fromp = fromp
numofpages = math.ceil(self.conc.size() / args.pagesize) if self.conc.size() > 0 else 1
if numofpages < 30:
out.Page = [{'page': x} for x in range(1, numofpages + 1)]
if fromp < numofpages:
pagination.next_page = fromp + 1
pagination.last_page = numofpages
else:
pagination.last_page = 1
out.concsize = self.conc.size()
if self.corpus.is_subcorpus:
out.result_arf = ''
else:
out.result_arf = round(self.conc.compute_ARF(), 2)
corpsize = self.corpus.search_size
out.result_relative_freq = round(
self.conc.size() / (float(corpsize) / 1e6), 2)
if args.hidenone:
for line, part in itertools.product(out.Lines, ('Kwic', 'Left', 'Right')):
for item in line[part]:
item['str'] = item['str'].replace('===NONE===', '')
out.pagination = pagination.export()
return dict(out)
def add_aligns(self, result, args):
"""
Adds lines from aligned corpora. Method modifies passed KwicPageData instance by setting
respective attributes.
arguments:
result -- KwicPageData type is required
"""
def create_empty_cell():
return {'rightsize': 0, 'hitlen': ';hitlen=9', 'Right': [], 'Kwic': [], 'linegroup': '_', 'leftsize': 0,
'ref': '', 'rightspace': '', 'leftspace': '', 'kwiclen': 0, 'toknum': None,
'Left': []}
def fix_length(arr, length):
return arr + [create_empty_cell() for _ in range(length - len(arr))]
if not args.alignlist:
return
al_lines = []
corps_with_colls = manatee.StrVector()
self.conc.get_aligned(corps_with_colls)
result.KWICCorps = [c for c in corps_with_colls]
if self.corpus.corpname not in result.KWICCorps:
result.KWICCorps = [self.corpus.corpname] + result.KWICCorps
result.CorporaColumns = [dict(n=c.get_conffile(), label=c.get_conf('NAME') or c.get_conffile())
for c in [self.conc.orig_corp] + args.alignlist]
for al_corp in args.alignlist:
al_corpname = al_corp.get_conffile()
if al_corpname in corps_with_colls:
self.conc.switch_aligned(al_corp.get_conffile())
al_lines.append(self.kwiclines(args))
else:
self.conc.switch_aligned(self.conc.orig_corp.get_conffile())
self.conc.add_aligned(al_corp.get_conffile())
self.conc.switch_aligned(al_corp.get_conffile())
al_lines.append(
self.kwiclines(args.copy(leftctx='0', rightctx='0', attrs='word', ctxattrs=''))
)
# It appears that Manatee returns lists of different lengths in case some translations
# are missing at the end of a concordance. Following block fixes this issue.
al_lines_fixed = [fix_length(item, len(result.Lines)) for item in al_lines]
aligns = list(zip(*al_lines_fixed))
for i, line in enumerate(result.Lines):
line['Align'] = aligns[i]
def separate_speech_struct_from_tag(self, speech_segment, text):
"""
Removes structural attribute related to speech file identification.
E.g. getting input "<seg foo=bar speechfile=1234.wav time=1234>lorem ipsum</seg>" and
having configuration directive "speech_segment == seg.speechfile" the function
returns "<seg foo=bar time=1234>lorem ipsum</seg>"
arguments:
speech_segment -- 2-tuple (struct_name, attr_name)
text -- a string to be processed
returns:
2-tuple (modified string, structural attribute value)
"""
import re
if self.speech_segment_has_audio(speech_segment):
pattern = r"^(<%s\s+.*)%s=([^\s>]+)(\s.+|>)$" % tuple(speech_segment)
srch = re.search(pattern, text)
if srch is not None:
return srch.group(1).rstrip() + srch.group(3), srch.group(2)
return text, ''
@staticmethod
def remove_tag_from_line(line, tag_name):
"""
arguments:
line -- list of dicts containing at least the key 'str'
line as used in postproc_kwicline
tag_name -- str
returns:
the same object as the 'line' parameter
"""
import re
for item in line:
item['str'] = re.sub('<%s[^>]*>' % tag_name, '', re.sub(
'</%s>' % tag_name, '', item['str']))
return line
@staticmethod
def line_parts_contain_speech(line_left, line_right):
"""
Tests whether the line's left and right parts contain speech information
"""
for fragment in line_left + line_right:
if 'open_link' in fragment or 'close_link' in fragment:
return True
return False
def update_speech_boundaries(self, speech_segment, line, column, filter_speech_tag, prev_speech_id=None):
"""
arguments:
speech_attr -- 2-tuple (struct_name, attr_name)
line -- list of dicts {'str': '...', 'class': '...'}
column -- str, one of {'left', 'kwic', 'right'}; specifies position according to KWIC
filter_speech_tag -- if True then whole speech tag is removed else only its 'speech attribute'
prev_speech_id -- str identifier of the previously processed speech segment
| left | kwic | right |
---------------------------------------------------------------
| <sp>....</sp> <sp>..|..</sp> <sp>..</sp> <sp>..|..</sp> |
Returns:
2-tuple: modified line and the last speech id (which is necessary to obtain proper speech ID in case of partial
segment on the "left" part of a concordance line and similarly in case of a partial segment on the "right"
part of a concordance line).
"""
newline = []
speech_struct_str = speech_segment[0] if speech_segment and len(
speech_segment) > 0 else None
fragment_separator = '<%s' % speech_struct_str
last_fragment = None
last_speech_id = prev_speech_id
for item in line:
fragments = [x for x in re.split('(<%s[^>]*>|</%s>)' % (speech_struct_str, speech_struct_str), item['str'])
if x != '']
for fragment in fragments:
frag_ext, speech_id = self.separate_speech_struct_from_tag(speech_segment, fragment)
if not speech_id:
speech_id = last_speech_id
else:
last_speech_id = speech_id
newline_item = {
'str': frag_ext,
'class': item['class']
}
if frag_ext.startswith(fragment_separator):
newline_item['open_link'] = {'speech_path': speech_id}
elif frag_ext.endswith('</%s>' % speech_struct_str):
newline_item['close_link'] = {'speech_path': speech_id}
newline.append(newline_item)
last_fragment = newline_item
# we have to treat specific situations related to the end of the
# concordance line
if (last_fragment is not None and
re.search('^<%s(>|[^>]+>)$' % speech_struct_str, last_fragment['str']) and
column == 'right'):
del(last_fragment['open_link'])
if filter_speech_tag:
self.remove_tag_from_line(newline, speech_struct_str)
return newline, last_speech_id
@staticmethod
def non1hitlen(hitlen):
return '' if hitlen == 1 else '%i' % hitlen
@staticmethod
def isengword(strclass):
# return bidirectional(word[0]) in ('L', 'LRE', 'LRO')
return 'ltr' in strclass['class'].split()
@staticmethod
def update_right_to_left(leftwords, rightwords):
"""
change order for "English" context of "English" keywords
"""
# preceding words
nprev = len(leftwords) - 1
while nprev >= 0 and Kwic.isengword(leftwords[nprev]):
nprev -= 1
if nprev == -1:
# move whole context
moveleft = leftwords
leftwords = []
else:
moveleft = leftwords[nprev + 1:]
del leftwords[nprev + 1:]
# following words
nfollow = 0
while (nfollow < len(rightwords)
and Kwic.isengword(rightwords[nfollow])):
nfollow += 1
moveright = rightwords[:nfollow]
del rightwords[:nfollow]
leftwords = leftwords + moveright
rightwords = moveleft + rightwords
return leftwords, rightwords
def speech_segment_has_audio(self, s):
return s and s[1]
def postproc_text_chunk(self, tokens):
prev = {}
ans = []
for item in tokens:
if item.get('class') == 'attr':
# TODO configurable delimiter
# a list is used for future compatibility
prev['tail_posattrs'] = item['str'].strip('/').split('/')
else:
ans.append(item)
prev = item
return ans
def kwiclines(self, args):
"""
Generates list of 'kwic' (= keyword in context) lines according to
the provided Concordance object and additional parameters (like
page number, width of the left and right context etc.).
arguments:
args -- a KwicLinesArgs instance
returns:
a dictionary containing all the required line data (left context, kwic, right context,...)
"""
# add structures needed to render speech playback information
all_structs = args.structs
if self.speech_segment_has_audio(args.speech_segment):
speech_struct_attr_name = '.'.join(args.speech_segment)
speech_struct_attr = self.corpus.get_attr(speech_struct_attr_name)
if speech_struct_attr_name not in args.structs:
all_structs += ',' + speech_struct_attr_name
else:
speech_struct_attr_name = ''
speech_struct_attr = None
lines = []
if args.righttoleft:
rightlabel, leftlabel = 'Left', 'Right'
args.structs += ',ltr'
# from unicodedata import bidirectional
else:
leftlabel, rightlabel = 'Left', 'Right'
# self.conc.corp() must be used here instead of self.corpus
# because in case of parallel corpora these two are different and only the latter one is correct
if isinstance(self.conc, InitialConc):
kl = EmptyKWiclines()
else:
kl = manatee.KWICLines(self.conc.corp(), self.conc.RS(True, args.fromline, args.toline),
args.leftctx, args.rightctx,
args.attrs, args.ctxattrs, all_structs, args.refs)
labelmap = args.labelmap.copy()
labelmap['_'] = '_'
maxleftsize = 0
maxrightsize = 0
filter_out_speech_tag = args.speech_segment and args.speech_segment[0] not in args.structs \
and speech_struct_attr_name in all_structs
i = args.fromline
while kl.nextline():
linegroup = kl.get_linegroup()
if not linegroup: # manatee returns 0 in case of no group (but None will work too here)
linegroup = -1 # client-side uses -1 as "no group"
if self.speech_segment_has_audio(args.speech_segment):
leftmost_speech_id = speech_struct_attr.pos2str(kl.get_ctxbeg())
else:
leftmost_speech_id = None
leftwords, last_left_speech_id = self.update_speech_boundaries(args.speech_segment,
tokens2strclass(
kl.get_left()),
'left', filter_out_speech_tag,
leftmost_speech_id)
kwicwords, last_left_speech_id = self.update_speech_boundaries(args.speech_segment,
tokens2strclass(
kl.get_kwic()),
'kwic',
filter_out_speech_tag,
last_left_speech_id)
rightwords = self.update_speech_boundaries(args.speech_segment, tokens2strclass(kl.get_right()), 'right',
filter_out_speech_tag, last_left_speech_id)[0]
leftwords = self.postproc_text_chunk(leftwords)
kwicwords = self.postproc_text_chunk(kwicwords)
rightwords = self.postproc_text_chunk(rightwords)
if args.righttoleft and Kwic.isengword(kwicwords[0]):
leftwords, rightwords = Kwic.update_right_to_left(leftwords, rightwords)
leftsize = 0
for w in leftwords:
if not w['class'] == 'strc':
leftsize += len(w['str']) + 1
if leftsize > maxleftsize:
maxleftsize = leftsize
rightsize = 0
for w in rightwords:
if not w['class'] == 'strc':
rightsize += len(w['str']) + 1
if rightsize > maxrightsize:
maxrightsize = rightsize
line_data = dict(toknum=kl.get_pos(),
hitlen=Kwic.non1hitlen(kl.get_kwiclen()),
kwiclen=kl.get_kwiclen(),
ref=[s for s in kl.get_ref_list()],
Kwic=kwicwords,
linegroup=linegroup,
leftsize=leftsize,
rightsize=rightsize,
linenum=i)
line_data[leftlabel] = leftwords
line_data[rightlabel] = rightwords
lines.append(line_data)
i += 1
for line in lines:
line['leftspace'] = ' ' * (maxleftsize - line['leftsize'])
line['rightspace'] = ' ' * (maxrightsize - line['rightsize'])
return lines
def get_sort_idx(self, q=(), pagesize=20):
"""
In case sorting is active this method generates shortcuts to pages where new
first letter of sorted keys (it can be 'left', 'kwic', 'right') starts.
arguments:
q -- a query (as a list)
pagesize -- number of items per page
returns:
a list of dicts with following structure (example):
[{'page': 1, 'label': u'a'}, {'page': 1, 'label': u'A'}, {'page': 2, 'label': u'b'},...]
"""
crit = ''
for qq in q:
if qq.startswith('s') and not qq.startswith('s*'):
crit = qq[1:]
if not crit:
return []
vals = manatee.StrVector()
idx = manatee.IntVector()
if '.' in crit.split('/')[0]:
just_letters = False
else:
just_letters = True
self.conc.sort_idx(crit, vals, idx, just_letters)
out = [(v, pos / pagesize + 1) for v, pos in zip(vals, idx)]
if just_letters:
result = []
keys = []
for v, p in out:
if not v[0] in keys:
result.append((v[0], p))
keys.append(v[0])
out = result
ans = []
for v, p in out:
try:
ans.append({'page': p, 'label': v})
except UnicodeDecodeError:
# Without manatee.set_encoding, manatee appears to produce
# few extra undecodable items. Ignoring them produces
# the same result as in case of official Bonito app.
pass
return ans
| gpl-2.0 |
rsignell-usgs/notebook | pyugrid/pyugrid/util.py | 1 | 2278 | """
Miscellaneous util functions.
"""
from __future__ import (absolute_import, division, print_function)
import numpy as np
epsilon = 1.e-5
def point_in_tri(face_points, point, return_weights=False):
"""
Calculates whether point is internal/external
to element by comparing summed area of sub triangles with area of triangle
element.
"""
sub_tri_areas = np.zeros(3)
sub_tri_areas[0] = _signed_area_tri(np.vstack((face_points[(0, 1), :],
point)))
sub_tri_areas[1] = _signed_area_tri(np.vstack((face_points[(1, 2), :],
point)))
sub_tri_areas[2] = _signed_area_tri(np.vstack((face_points[(0, 2), :],
point)))
tri_area = _signed_area_tri(face_points)
if abs(np.abs(sub_tri_areas).sum()-tri_area)/tri_area <= epsilon:
if return_weights:
raise NotImplementedError
# weights = sub_tri_areas/tri_area
# weights[1] = max(0., min(1., weights[1]))
# weights[2] = max(0., min(1., weights[2]))
# if (weights[0]+weights[1]>1):
# weights[2] = 0
# weights[1] = 1-weights[0]
# else:
# weights[2] = 1-weights[0]-weights[1]
#
# return weights
else:
return True
return False
def _signed_area_tri(points):
"""
points : the coordinates of the triangle vertices -- (3x2) float array
returns signed area of the triangle.
"""
x1, y1 = points[0]
x2, y2 = points[1]
x3, y3 = points[2]
return(((x1-x3)*(y2-y3)-(x2-x3)*(y1-y3))/2)
def asarraylike(obj):
"""
tests if obj acts enough like an array to be used in pyugrid.
If it does the object is returned as is. If not, then numpy's
array() will be called on it.
This should catch netCDF4 variables, etc.
Note: these won't check if the attributes required actually work right.
:param obj: The object to check if it's like an array
"""
must_have = ['dtype', 'shape']
for attr in must_have:
if not hasattr(obj, attr):
obj = np.array(obj)
break
return obj
| mit |
ahmedbodi/vertcoin | test/functional/mempool_package_onemore.py | 17 | 4981 | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test descendant package tracking carve-out allowing one final transaction in
an otherwise-full package as long as it has only one parent and is <= 10k in
size.
"""
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round
MAX_ANCESTORS = 25
MAX_DESCENDANTS = 25
class MempoolPackagesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-maxorphantx=1000"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
# Build a transaction that spends parent_txid:vout
# Return amount sent
def chain_transaction(self, node, parent_txids, vouts, value, fee, num_outputs):
send_value = satoshi_round((value - fee)/num_outputs)
inputs = []
for (txid, vout) in zip(parent_txids, vouts):
inputs.append({'txid' : txid, 'vout' : vout})
outputs = {}
for i in range(num_outputs):
outputs[node.getnewaddress()] = send_value
rawtx = node.createrawtransaction(inputs, outputs, 0, True)
signedtx = node.signrawtransactionwithwallet(rawtx)
txid = node.sendrawtransaction(signedtx['hex'])
fulltx = node.getrawtransaction(txid, 1)
assert len(fulltx['vout']) == num_outputs # make sure we didn't generate a change output
return (txid, send_value)
def run_test(self):
# Mine some blocks and have them mature.
self.nodes[0].generate(101)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
vout = utxo[0]['vout']
value = utxo[0]['amount']
fee = Decimal("0.0002")
# MAX_ANCESTORS transactions off a confirmed tx should be fine
chain = []
for _ in range(4):
(txid, sent_value) = self.chain_transaction(self.nodes[0], [txid], [vout], value, fee, 2)
vout = 0
value = sent_value
chain.append([txid, value])
for _ in range(MAX_ANCESTORS - 4):
(txid, sent_value) = self.chain_transaction(self.nodes[0], [txid], [0], value, fee, 1)
value = sent_value
chain.append([txid, value])
(second_chain, second_chain_value) = self.chain_transaction(self.nodes[0], [utxo[1]['txid']], [utxo[1]['vout']], utxo[1]['amount'], fee, 1)
# Check mempool has MAX_ANCESTORS + 1 transactions in it
assert_equal(len(self.nodes[0].getrawmempool(True)), MAX_ANCESTORS + 1)
# Adding one more transaction on to the chain should fail.
assert_raises_rpc_error(-26, "too-long-mempool-chain, too many unconfirmed ancestors [limit: 25]", self.chain_transaction, self.nodes[0], [txid], [0], value, fee, 1)
# ...even if it chains on from some point in the middle of the chain.
assert_raises_rpc_error(-26, "too-long-mempool-chain, too many descendants", self.chain_transaction, self.nodes[0], [chain[2][0]], [1], chain[2][1], fee, 1)
assert_raises_rpc_error(-26, "too-long-mempool-chain, too many descendants", self.chain_transaction, self.nodes[0], [chain[1][0]], [1], chain[1][1], fee, 1)
# ...even if it chains on to two parent transactions with one in the chain.
assert_raises_rpc_error(-26, "too-long-mempool-chain, too many descendants", self.chain_transaction, self.nodes[0], [chain[0][0], second_chain], [1, 0], chain[0][1] + second_chain_value, fee, 1)
# ...especially if its > 40k weight
assert_raises_rpc_error(-26, "too-long-mempool-chain, too many descendants", self.chain_transaction, self.nodes[0], [chain[0][0]], [1], chain[0][1], fee, 350)
# But not if it chains directly off the first transaction
(replacable_txid, replacable_orig_value) = self.chain_transaction(self.nodes[0], [chain[0][0]], [1], chain[0][1], fee, 1)
# and the second chain should work just fine
self.chain_transaction(self.nodes[0], [second_chain], [0], second_chain_value, fee, 1)
# Make sure we can RBF the chain which used our carve-out rule
second_tx_outputs = {self.nodes[0].getrawtransaction(replacable_txid, True)["vout"][0]['scriptPubKey']['addresses'][0]: replacable_orig_value - (Decimal(1) / Decimal(100))}
second_tx = self.nodes[0].createrawtransaction([{'txid': chain[0][0], 'vout': 1}], second_tx_outputs)
signed_second_tx = self.nodes[0].signrawtransactionwithwallet(second_tx)
self.nodes[0].sendrawtransaction(signed_second_tx['hex'])
# Finally, check that we added two transactions
assert_equal(len(self.nodes[0].getrawmempool(True)), MAX_ANCESTORS + 3)
if __name__ == '__main__':
MempoolPackagesTest().main()
| mit |
csparkresearch/eyes-online | app/static/scripts/ExpEYES17/pendulumVelocity.py | 1 | 5409 | import sys, time, utils, math
if utils.PQT5 == True:
from PyQt5.QtCore import Qt, QTimer
from PyQt5.QtWidgets import QApplication,QWidget, QLabel, QHBoxLayout, QVBoxLayout,\
QCheckBox, QPushButton
from PyQt5.QtGui import QPalette, QColor
else:
from PyQt4.QtCore import Qt, QTimer
from PyQt4.QtGui import QPalette, QColor, QApplication, QWidget,\
QLabel, QHBoxLayout, QVBoxLayout, QPushButton, QCheckBox
import pyqtgraph as pg
import numpy as np
import eyes17.eyemath17 as em
class Expt(QWidget):
TIMER = 5
RPWIDTH = 300
RPGAP = 4
running = False
VMIN = -5
VMAX = 5
TMIN = 0
TMAX = 5
data = [ [], [] ]
currentTrace = None
traces = []
history = [] # Data store
sources = ['A1','A2','A3', 'MIC']
pencol = 2
def __init__(self, device=None):
QWidget.__init__(self)
self.p = device # connection to the device hardware
try:
self.p.select_range('A1',4.0)
self.p.configure_trigger(0, 'A1', 0)
except:
pass
self.pwin = pg.PlotWidget() # pyqtgraph window
self.pwin.showGrid(x=True, y=True) # with grid
ax = self.pwin.getAxis('bottom')
ax.setLabel('Time (mS)')
ax = self.pwin.getAxis('left')
ax.setLabel('Voltage (V)')
self.pwin.disableAutoRange()
self.pwin.setXRange(self.TMIN, self.TMAX)
self.pwin.setYRange(self.VMIN, self.VMAX)
self.pwin.hideButtons() # Do not show the 'A' button of pg
right = QVBoxLayout() # right side vertical layout
right.setAlignment(Qt.AlignTop)
right.setSpacing(self.RPGAP)
H = QHBoxLayout()
l = QLabel(text=self.tr('Duration'))
l.setMaximumWidth(80)
H.addWidget(l)
self.TMAXtext = utils.lineEdit(40, self.TMAX, 6, None)
H.addWidget(self.TMAXtext)
l = QLabel(text=self.tr('Seconds'))
l.setMaximumWidth(60)
H.addWidget(l)
right.addLayout(H)
b = QPushButton(self.tr("Start"))
right.addWidget(b)
b.clicked.connect(self.start)
b = QPushButton(self.tr("Stop"))
right.addWidget(b)
b.clicked.connect(self.stop)
b = QPushButton(self.tr("Analyze last Trace"))
right.addWidget(b)
b.clicked.connect(self.fit_curve)
b = QPushButton(self.tr("Clear Traces"))
right.addWidget(b)
b.clicked.connect(self.clear)
H = QHBoxLayout()
self.SaveButton = QPushButton(self.tr("Save to"))
self.SaveButton.setMaximumWidth(90)
self.SaveButton.clicked.connect(self.save_data)
H.addWidget(self.SaveButton)
self.Filename = utils.lineEdit(150, 'pendulum-data.txt', 20, None)
H.addWidget(self.Filename)
right.addLayout(H)
#------------------------end of right panel ----------------
top = QHBoxLayout()
top.addWidget(self.pwin)
top.addLayout(right)
full = QVBoxLayout()
full.addLayout(top)
self.msgwin = QLabel(text=self.tr(''))
full.addWidget(self.msgwin)
self.setLayout(full)
self.timer = QTimer()
self.timer.timeout.connect(self.update)
self.timer.start(self.TIMER)
#----------------------------- end of init ---------------
def fit_curve(self):
if self.running == True or self.data[0]==[]:
return
if (len(self.data[0])%2) == 1: # make it an even size, for fitting
self.data[0] = self.data[0][:-1]
self.data[1] = self.data[1][:-1]
fa = em.fit_dsine(self.data[0], self.data[1], 1000.0) # fit in em expects khz
if fa != None:
pa = fa[1]
self.traces.append(self.pwin.plot(self.data[0], fa[0], pen = 'w'))
self.msg('Frequency of Oscillation = %5.2f Hz. Damping Factor = %5.3f'%(pa[1], pa[4]))
else:
self.msg('Analysis failed. Could not fit data')
def update(self):
if self.running == False:
return
try:
t,v = self.p.get_voltage_time('A3') # Read A3
except:
self.msg('<font color="red">Communication Error. Try Reconnect from the Device menu')
return
if len(self.data[0]) == 0:
self.start_time = t
elapsed = 0
else:
elapsed = t - self.start_time
self.data[0].append(elapsed)
self.data[1].append(v)
if elapsed > self.TMAX:
self.running = False
self.history.append(self.data)
self.traces.append(self.currentTrace)
self.msg('Time Vs Angular velocity plot completed')
return
if self.index > 1: # Draw the line
self.currentTrace.setData(self.data[0], self.data[1])
self.index += 1
def start(self):
if self.running == True: return
try:
val = float(self.TMAXtext.text())
except:
self.msg('Invalid Duration')
return
self.TMAX = val
self.pwin.setXRange(self.TMIN, self.TMAX)
self.pwin.setYRange(self.VMIN, self.VMAX)
self.running = True
self.data = [ [], [] ]
self.currentTrace = self.pwin.plot([0,0],[0,0], pen = self.pencol)
self.index = 0
self.pencol += 2
self.msg('Started Measurements')
def stop(self):
if self.running == False: return
self.running = False
self.history.append(self.data)
self.traces.append(self.currentTrace)
self.msg('User Stopped')
def clear(self):
for k in self.traces:
self.pwin.removeItem(k)
self.history = []
self.pencol = 2
self.msg('Cleared Traces and Data')
def save_data(self):
if self.history == []:
self.msg('No Traces available for saving')
return
fn = self.Filename.text()
self.p.save(self.history, fn)
self.msg('Traces saved to %s'%fn)
def msg(self, m):
self.msgwin.setText(self.tr(m))
if __name__ == '__main__':
import eyes17.eyes
dev = eyes17.eyes.open()
app = QApplication(sys.argv)
mw = Expt(dev)
mw.show()
sys.exit(app.exec_())
| gpl-3.0 |
fldc/CouchPotatoServer | libs/chardet/hebrewprober.py | 2929 | 13359 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
| gpl-3.0 |
edl00k/omim | 3party/protobuf/python/google/protobuf/internal/unknown_fields_test.py | 73 | 9102 | #! /usr/bin/python
# -*- coding: utf-8 -*-
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for preservation of unknown fields in the pure Python implementation."""
__author__ = 'bohdank@google.com (Bohdan Koval)'
from google.apputils import basetest
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import encoder
from google.protobuf.internal import missing_enum_values_pb2
from google.protobuf.internal import test_util
from google.protobuf.internal import type_checkers
class UnknownFieldsTest(basetest.TestCase):
def setUp(self):
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.all_fields = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.all_fields)
self.all_fields_data = self.all_fields.SerializeToString()
self.empty_message = unittest_pb2.TestEmptyMessage()
self.empty_message.ParseFromString(self.all_fields_data)
self.unknown_fields = self.empty_message._unknown_fields
def GetField(self, name):
field_descriptor = self.descriptor.fields_by_name[name]
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
result_dict = {}
for tag_bytes, value in self.unknown_fields:
if tag_bytes == field_tag:
decoder = unittest_pb2.TestAllTypes._decoders_by_tag[tag_bytes][0]
decoder(value, 0, len(value), self.all_fields, result_dict)
return result_dict[field_descriptor]
def testEnum(self):
value = self.GetField('optional_nested_enum')
self.assertEqual(self.all_fields.optional_nested_enum, value)
def testRepeatedEnum(self):
value = self.GetField('repeated_nested_enum')
self.assertEqual(self.all_fields.repeated_nested_enum, value)
def testVarint(self):
value = self.GetField('optional_int32')
self.assertEqual(self.all_fields.optional_int32, value)
def testFixed32(self):
value = self.GetField('optional_fixed32')
self.assertEqual(self.all_fields.optional_fixed32, value)
def testFixed64(self):
value = self.GetField('optional_fixed64')
self.assertEqual(self.all_fields.optional_fixed64, value)
def testLengthDelimited(self):
value = self.GetField('optional_string')
self.assertEqual(self.all_fields.optional_string, value)
def testGroup(self):
value = self.GetField('optionalgroup')
self.assertEqual(self.all_fields.optionalgroup, value)
def testSerialize(self):
data = self.empty_message.SerializeToString()
# Don't use assertEqual because we don't want to dump raw binary data to
# stdout.
self.assertTrue(data == self.all_fields_data)
def testCopyFrom(self):
message = unittest_pb2.TestEmptyMessage()
message.CopyFrom(self.empty_message)
self.assertEqual(self.unknown_fields, message._unknown_fields)
def testMergeFrom(self):
message = unittest_pb2.TestAllTypes()
message.optional_int32 = 1
message.optional_uint32 = 2
source = unittest_pb2.TestEmptyMessage()
source.ParseFromString(message.SerializeToString())
message.ClearField('optional_int32')
message.optional_int64 = 3
message.optional_uint32 = 4
destination = unittest_pb2.TestEmptyMessage()
destination.ParseFromString(message.SerializeToString())
unknown_fields = destination._unknown_fields[:]
destination.MergeFrom(source)
self.assertEqual(unknown_fields + source._unknown_fields,
destination._unknown_fields)
def testClear(self):
self.empty_message.Clear()
self.assertEqual(0, len(self.empty_message._unknown_fields))
def testByteSize(self):
self.assertEqual(self.all_fields.ByteSize(), self.empty_message.ByteSize())
def testUnknownExtensions(self):
message = unittest_pb2.TestEmptyMessageWithExtensions()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.empty_message._unknown_fields,
message._unknown_fields)
def testListFields(self):
# Make sure ListFields doesn't return unknown fields.
self.assertEqual(0, len(self.empty_message.ListFields()))
def testSerializeMessageSetWireFormatUnknownExtension(self):
# Create a message using the message set wire format with an unknown
# message.
raw = unittest_mset_pb2.RawMessageSet()
# Add an unknown extension.
item = raw.item.add()
item.type_id = 1545009
message1 = unittest_mset_pb2.TestMessageSetExtension1()
message1.i = 12345
item.message = message1.SerializeToString()
serialized = raw.SerializeToString()
# Parse message using the message set wire format.
proto = unittest_mset_pb2.TestMessageSet()
proto.MergeFromString(serialized)
# Verify that the unknown extension is serialized unchanged
reserialized = proto.SerializeToString()
new_raw = unittest_mset_pb2.RawMessageSet()
new_raw.MergeFromString(reserialized)
self.assertEqual(raw, new_raw)
def testEquals(self):
message = unittest_pb2.TestEmptyMessage()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.empty_message, message)
self.all_fields.ClearField('optional_string')
message.ParseFromString(self.all_fields.SerializeToString())
self.assertNotEqual(self.empty_message, message)
class UnknownFieldsTest(basetest.TestCase):
def setUp(self):
self.descriptor = missing_enum_values_pb2.TestEnumValues.DESCRIPTOR
self.message = missing_enum_values_pb2.TestEnumValues()
self.message.optional_nested_enum = (
missing_enum_values_pb2.TestEnumValues.ZERO)
self.message.repeated_nested_enum.extend([
missing_enum_values_pb2.TestEnumValues.ZERO,
missing_enum_values_pb2.TestEnumValues.ONE,
])
self.message.packed_nested_enum.extend([
missing_enum_values_pb2.TestEnumValues.ZERO,
missing_enum_values_pb2.TestEnumValues.ONE,
])
self.message_data = self.message.SerializeToString()
self.missing_message = missing_enum_values_pb2.TestMissingEnumValues()
self.missing_message.ParseFromString(self.message_data)
self.unknown_fields = self.missing_message._unknown_fields
def GetField(self, name):
field_descriptor = self.descriptor.fields_by_name[name]
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
result_dict = {}
for tag_bytes, value in self.unknown_fields:
if tag_bytes == field_tag:
decoder = missing_enum_values_pb2.TestEnumValues._decoders_by_tag[
tag_bytes][0]
decoder(value, 0, len(value), self.message, result_dict)
return result_dict[field_descriptor]
def testUnknownEnumValue(self):
self.assertFalse(self.missing_message.HasField('optional_nested_enum'))
value = self.GetField('optional_nested_enum')
self.assertEqual(self.message.optional_nested_enum, value)
def testUnknownRepeatedEnumValue(self):
value = self.GetField('repeated_nested_enum')
self.assertEqual(self.message.repeated_nested_enum, value)
def testUnknownPackedEnumValue(self):
value = self.GetField('packed_nested_enum')
self.assertEqual(self.message.packed_nested_enum, value)
def testRoundTrip(self):
new_message = missing_enum_values_pb2.TestEnumValues()
new_message.ParseFromString(self.missing_message.SerializeToString())
self.assertEqual(self.message, new_message)
if __name__ == '__main__':
basetest.main()
| apache-2.0 |
jon-choi/hillsbarber | venv/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.py | 439 | 11949 | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def base_version(self):
return self._version
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts)
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def base_version(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
return "".join(parts)
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
@property
def is_postrelease(self):
return bool(self._version.post)
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local
| apache-2.0 |
wwj718/edx-platform | common/djangoapps/student/management/tests/test_transfer_students.py | 122 | 6240 | """
Tests the transfer student management command
"""
from django.conf import settings
from mock import patch, call
from opaque_keys.edx import locator
import unittest
import ddt
from shoppingcart.models import Order, CertificateItem # pylint: disable=import-error
from course_modes.models import CourseMode
from student.management.commands import transfer_students
from student.models import CourseEnrollment, UNENROLL_DONE, EVENT_NAME_ENROLLMENT_DEACTIVATED, \
EVENT_NAME_ENROLLMENT_ACTIVATED, EVENT_NAME_ENROLLMENT_MODE_CHANGED
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.ddt
class TestTransferStudents(ModuleStoreTestCase):
"""Tests for transferring students between courses."""
PASSWORD = 'test'
signal_fired = False
def setUp(self, **kwargs):
"""Connect a stub receiver, and analytics event tracking."""
super(TestTransferStudents, self).setUp()
UNENROLL_DONE.connect(self.assert_unenroll_signal)
patcher = patch('student.models.tracker')
self.mock_tracker = patcher.start()
self.addCleanup(patcher.stop)
self.addCleanup(UNENROLL_DONE.disconnect, self.assert_unenroll_signal)
def assert_unenroll_signal(self, skip_refund=False, **kwargs): # pylint: disable=unused-argument
""" Signal Receiver stub for testing that the unenroll signal was fired. """
self.assertFalse(self.signal_fired)
self.assertTrue(skip_refund)
self.signal_fired = True
def test_transfer_students(self):
""" Verify the transfer student command works as intended. """
student = UserFactory.create()
student.set_password(self.PASSWORD)
student.save()
mode = 'verified'
# Original Course
original_course_location = locator.CourseLocator('Org0', 'Course0', 'Run0')
course = self._create_course(original_course_location)
# Enroll the student in 'verified'
CourseEnrollment.enroll(student, course.id, mode="verified")
# Create and purchase a verified cert for the original course.
self._create_and_purchase_verified(student, course.id)
# New Course 1
course_location_one = locator.CourseLocator('Org1', 'Course1', 'Run1')
new_course_one = self._create_course(course_location_one)
# New Course 2
course_location_two = locator.CourseLocator('Org2', 'Course2', 'Run2')
new_course_two = self._create_course(course_location_two)
original_key = unicode(course.id)
new_key_one = unicode(new_course_one.id)
new_key_two = unicode(new_course_two.id)
# Run the actual management command
transfer_students.Command().handle(
source_course=original_key, dest_course_list=new_key_one + "," + new_key_two
)
self.assertTrue(self.signal_fired)
# Confirm the analytics event was emitted.
self.mock_tracker.emit.assert_has_calls( # pylint: disable=maybe-no-member
[
call(
EVENT_NAME_ENROLLMENT_ACTIVATED,
{'course_id': original_key, 'user_id': student.id, 'mode': mode}
),
call(
EVENT_NAME_ENROLLMENT_MODE_CHANGED,
{'course_id': original_key, 'user_id': student.id, 'mode': mode}
),
call(
EVENT_NAME_ENROLLMENT_DEACTIVATED,
{'course_id': original_key, 'user_id': student.id, 'mode': mode}
),
call(
EVENT_NAME_ENROLLMENT_ACTIVATED,
{'course_id': new_key_one, 'user_id': student.id, 'mode': mode}
),
call(
EVENT_NAME_ENROLLMENT_MODE_CHANGED,
{'course_id': new_key_one, 'user_id': student.id, 'mode': mode}
),
call(
EVENT_NAME_ENROLLMENT_ACTIVATED,
{'course_id': new_key_two, 'user_id': student.id, 'mode': mode}
),
call(
EVENT_NAME_ENROLLMENT_MODE_CHANGED,
{'course_id': new_key_two, 'user_id': student.id, 'mode': mode}
)
]
)
self.mock_tracker.reset_mock()
# Confirm the enrollment mode is verified on the new courses, and enrollment is enabled as appropriate.
self.assertEquals((mode, False), CourseEnrollment.enrollment_mode_for_user(student, course.id))
self.assertEquals((mode, True), CourseEnrollment.enrollment_mode_for_user(student, new_course_one.id))
self.assertEquals((mode, True), CourseEnrollment.enrollment_mode_for_user(student, new_course_two.id))
# Confirm the student has not be refunded.
target_certs = CertificateItem.objects.filter(
course_id=course.id, user_id=student, status='purchased', mode=mode
)
self.assertTrue(target_certs[0])
self.assertFalse(target_certs[0].refund_requested_time)
self.assertEquals(target_certs[0].order.status, 'purchased')
def _create_course(self, course_location):
""" Creates a course """
return CourseFactory.create(
org=course_location.org,
number=course_location.course,
run=course_location.run
)
def _create_and_purchase_verified(self, student, course_id):
""" Creates a verified mode for the course and purchases it for the student. """
course_mode = CourseMode(course_id=course_id,
mode_slug="verified",
mode_display_name="verified cert",
min_price=50)
course_mode.save()
# When there is no expiration date on a verified mode, the user can always get a refund
cart = Order.get_cart_for_user(user=student)
CertificateItem.add_to_order(cart, course_id, 50, 'verified')
cart.purchase()
| agpl-3.0 |
dbiesecke/dbiesecke.github.io | repo/service.vpn.manager/infopopup.py | 1 | 1232 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Zomboided
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This module pops up a screen with a bunch of info on the system.
# It can be mapped to a button on the remote.
import xbmcaddon
import xbmcgui
from libs.sysbox import popupSysBox
from libs.utility import debugTrace, errorTrace, infoTrace, getID, getName
debugTrace("-- Entered infopopup.py")
if not getID() == "":
popupSysBox()
else:
errorTrace("infopopup.py", "VPN service is not ready")
debugTrace("-- Exit infopopup.py --") | mit |
guettli/django | django/conf/locale/el/formats.py | 58 | 1452 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd/m/Y'
TIME_FORMAT = 'P'
DATETIME_FORMAT = 'd/m/Y P'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y P'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', '%Y-%m-%d', # '25/10/2006', '25/10/06', '2006-10-25',
]
DATETIME_INPUT_FORMATS = [
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
manqala/erpnext | erpnext/setup/doctype/currency_exchange/test_currency_exchange.py | 12 | 3584 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, unittest
from erpnext.setup.utils import get_exchange_rate
test_records = frappe.get_test_records('Currency Exchange')
def save_new_records(test_records):
for record in test_records:
kwargs = dict(
doctype=record.get("doctype"),
docname=record.get("date") + '-' + record.get("from_currency") + '-' + record.get("to_currency"),
fieldname="exchange_rate",
value=record.get("exchange_rate"),
)
try:
frappe.set_value(**kwargs)
except frappe.DoesNotExistError:
curr_exchange = frappe.new_doc(record.get("doctype"))
curr_exchange.date = record["date"]
curr_exchange.from_currency = record["from_currency"]
curr_exchange.to_currency = record["to_currency"]
curr_exchange.exchange_rate = record["exchange_rate"]
curr_exchange.insert()
class TestCurrencyExchange(unittest.TestCase):
def clear_cache(self):
cache = frappe.cache()
key = "currency_exchange_rate:{0}:{1}".format("USD", "INR")
cache.delete(key)
def tearDown(self):
frappe.db.set_value("Accounts Settings", None, "allow_stale", 1)
self.clear_cache()
def test_exchange_rate(self):
save_new_records(test_records)
frappe.db.set_value("Accounts Settings", None, "allow_stale", 1)
# Start with allow_stale is True
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-01")
self.assertEqual(exchange_rate, 60.0)
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-15")
self.assertEqual(exchange_rate, 65.1)
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-30")
self.assertEqual(exchange_rate, 62.9)
# Exchange rate as on 15th Dec, 2015, should be fetched from fixer.io
self.clear_cache()
exchange_rate = get_exchange_rate("USD", "INR", "2015-12-15")
self.assertFalse(exchange_rate == 60)
self.assertEqual(exchange_rate, 66.894)
def test_exchange_rate_strict(self):
# strict currency settings
frappe.db.set_value("Accounts Settings", None, "allow_stale", 0)
frappe.db.set_value("Accounts Settings", None, "stale_days", 1)
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-01")
self.assertEqual(exchange_rate, 60.0)
# Will fetch from fixer.io
self.clear_cache()
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-15")
self.assertEqual(exchange_rate, 67.79)
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-30")
self.assertEqual(exchange_rate, 62.9)
# Exchange rate as on 15th Dec, 2015, should be fetched from fixer.io
self.clear_cache()
exchange_rate = get_exchange_rate("USD", "INR", "2015-12-15")
self.assertEqual(exchange_rate, 66.894)
exchange_rate = get_exchange_rate("INR", "NGN", "2016-01-10")
self.assertEqual(exchange_rate, 65.1)
# NGN is not available on fixer.io so these should return 0
exchange_rate = get_exchange_rate("INR", "NGN", "2016-01-09")
self.assertEqual(exchange_rate, 0)
exchange_rate = get_exchange_rate("INR", "NGN", "2016-01-11")
self.assertEqual(exchange_rate, 0)
def test_exchange_rate_strict_switched(self):
# Start with allow_stale is True
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-15")
self.assertEqual(exchange_rate, 65.1)
frappe.db.set_value("Accounts Settings", None, "allow_stale", 0)
frappe.db.set_value("Accounts Settings", None, "stale_days", 1)
# Will fetch from fixer.io
self.clear_cache()
exchange_rate = get_exchange_rate("USD", "INR", "2016-01-15")
self.assertEqual(exchange_rate, 67.79) | gpl-3.0 |
Glasgow2015/team-10 | env/lib/python2.7/site-packages/django/conf/urls/i18n.py | 310 | 1196 | import warnings
from django.conf import settings
from django.conf.urls import patterns, url
from django.core.urlresolvers import LocaleRegexURLResolver
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from django.views.i18n import set_language
def i18n_patterns(prefix, *args):
"""
Adds the language code prefix to every URL pattern within this
function. This may only be used in the root URLconf, not in an included
URLconf.
"""
if isinstance(prefix, six.string_types):
warnings.warn(
"Calling i18n_patterns() with the `prefix` argument and with tuples "
"instead of django.conf.urls.url() instances is deprecated and "
"will no longer work in Django 1.10. Use a list of "
"django.conf.urls.url() instances instead.",
RemovedInDjango110Warning, stacklevel=2
)
pattern_list = patterns(prefix, *args)
else:
pattern_list = [prefix] + list(args)
if not settings.USE_I18N:
return pattern_list
return [LocaleRegexURLResolver(pattern_list)]
urlpatterns = [
url(r'^setlang/$', set_language, name='set_language'),
]
| apache-2.0 |
vitaly4uk/django | tests/template_backends/test_jinja2.py | 315 | 3048 | # Since this package contains a "jinja2" directory, this is required to
# silence an ImportWarning warning on Python 2.
from __future__ import absolute_import
from unittest import skipIf
from django.template import TemplateSyntaxError
from .test_dummy import TemplateStringsTests
try:
import jinja2
except ImportError:
jinja2 = None
Jinja2 = None
else:
from django.template.backends.jinja2 import Jinja2
@skipIf(jinja2 is None, "this test requires jinja2")
class Jinja2Tests(TemplateStringsTests):
engine_class = Jinja2
backend_name = 'jinja2'
options = {'keep_trailing_newline': True}
def test_origin(self):
template = self.engine.get_template('template_backends/hello.html')
self.assertTrue(template.origin.name.endswith('hello.html'))
self.assertEqual(template.origin.template_name, 'template_backends/hello.html')
def test_origin_from_string(self):
template = self.engine.from_string('Hello!\n')
self.assertEqual(template.origin.name, '<template>')
self.assertEqual(template.origin.template_name, None)
def test_self_context(self):
"""
Using 'self' in the context should not throw errors (#24538).
"""
# self will be overridden to be a TemplateReference, so the self
# variable will not come through. Attempting to use one though should
# not throw an error.
template = self.engine.from_string('hello {{ foo }}!')
content = template.render(context={'self': 'self', 'foo': 'world'})
self.assertEqual(content, 'hello world!')
def test_exception_debug_info_min_context(self):
with self.assertRaises(TemplateSyntaxError) as e:
self.engine.get_template('template_backends/syntax_error.html')
debug = e.exception.template_debug
self.assertEqual(debug['after'], '')
self.assertEqual(debug['before'], '')
self.assertEqual(debug['during'], '{% block %}')
self.assertEqual(debug['bottom'], 1)
self.assertEqual(debug['top'], 0)
self.assertEqual(debug['line'], 1)
self.assertEqual(debug['total'], 1)
self.assertEqual(len(debug['source_lines']), 1)
self.assertTrue(debug['name'].endswith('syntax_error.html'))
self.assertTrue('message' in debug)
def test_exception_debug_info_max_context(self):
with self.assertRaises(TemplateSyntaxError) as e:
self.engine.get_template('template_backends/syntax_error2.html')
debug = e.exception.template_debug
self.assertEqual(debug['after'], '')
self.assertEqual(debug['before'], '')
self.assertEqual(debug['during'], '{% block %}')
self.assertEqual(debug['bottom'], 26)
self.assertEqual(debug['top'], 5)
self.assertEqual(debug['line'], 16)
self.assertEqual(debug['total'], 31)
self.assertEqual(len(debug['source_lines']), 21)
self.assertTrue(debug['name'].endswith('syntax_error2.html'))
self.assertTrue('message' in debug)
| bsd-3-clause |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/oauth2_provider/views/generic.py | 6 | 1035 | from django.views.generic import View
from ..settings import oauth2_settings
from .mixins import ProtectedResourceMixin, ScopedResourceMixin, ReadWriteScopedResourceMixin
class ProtectedResourceView(ProtectedResourceMixin, View):
"""
Generic view protecting resources by providing OAuth2 authentication out of the box
"""
server_class = oauth2_settings.OAUTH2_SERVER_CLASS
validator_class = oauth2_settings.OAUTH2_VALIDATOR_CLASS
oauthlib_backend_class = oauth2_settings.OAUTH2_BACKEND_CLASS
class ScopedProtectedResourceView(ScopedResourceMixin, ProtectedResourceView):
"""
Generic view protecting resources by providing OAuth2 authentication and Scopes handling
out of the box
"""
pass
class ReadWriteScopedResourceView(ReadWriteScopedResourceMixin, ProtectedResourceView):
"""
Generic view protecting resources with OAuth2 authentication and read/write scopes.
GET, HEAD, OPTIONS http methods require "read" scope. Otherwise "write" scope is required.
"""
pass
| agpl-3.0 |
msebire/intellij-community | python/helpers/pydev/pydev_ipython/inputhookwx.py | 50 | 6386 | # encoding: utf-8
"""
Enable wxPython to be used interacive by setting PyOS_InputHook.
Authors: Robin Dunn, Brian Granger, Ondrej Certik
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
import signal
from _pydev_imps._pydev_saved_modules import time
from timeit import default_timer as clock
import wx
from pydev_ipython.inputhook import stdin_ready
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def inputhook_wx1():
"""Run the wx event loop by processing pending events only.
This approach seems to work, but its performance is not great as it
relies on having PyOS_InputHook called regularly.
"""
try:
app = wx.GetApp() # @UndefinedVariable
if app is not None:
assert wx.Thread_IsMain() # @UndefinedVariable
# Make a temporary event loop and process system events until
# there are no more waiting, then allow idle events (which
# will also deal with pending or posted wx events.)
evtloop = wx.EventLoop() # @UndefinedVariable
ea = wx.EventLoopActivator(evtloop) # @UndefinedVariable
while evtloop.Pending():
evtloop.Dispatch()
app.ProcessIdle()
del ea
except KeyboardInterrupt:
pass
return 0
class EventLoopTimer(wx.Timer): # @UndefinedVariable
def __init__(self, func):
self.func = func
wx.Timer.__init__(self) # @UndefinedVariable
def Notify(self):
self.func()
class EventLoopRunner(object):
def Run(self, time):
self.evtloop = wx.EventLoop() # @UndefinedVariable
self.timer = EventLoopTimer(self.check_stdin)
self.timer.Start(time)
self.evtloop.Run()
def check_stdin(self):
if stdin_ready():
self.timer.Stop()
self.evtloop.Exit()
def inputhook_wx2():
"""Run the wx event loop, polling for stdin.
This version runs the wx eventloop for an undetermined amount of time,
during which it periodically checks to see if anything is ready on
stdin. If anything is ready on stdin, the event loop exits.
The argument to elr.Run controls how often the event loop looks at stdin.
This determines the responsiveness at the keyboard. A setting of 1000
enables a user to type at most 1 char per second. I have found that a
setting of 10 gives good keyboard response. We can shorten it further,
but eventually performance would suffer from calling select/kbhit too
often.
"""
try:
app = wx.GetApp() # @UndefinedVariable
if app is not None:
assert wx.Thread_IsMain() # @UndefinedVariable
elr = EventLoopRunner()
# As this time is made shorter, keyboard response improves, but idle
# CPU load goes up. 10 ms seems like a good compromise.
elr.Run(time=10) # CHANGE time here to control polling interval
except KeyboardInterrupt:
pass
return 0
def inputhook_wx3():
"""Run the wx event loop by processing pending events only.
This is like inputhook_wx1, but it keeps processing pending events
until stdin is ready. After processing all pending events, a call to
time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%.
This sleep time should be tuned though for best performance.
"""
# We need to protect against a user pressing Control-C when IPython is
# idle and this is running. We trap KeyboardInterrupt and pass.
try:
app = wx.GetApp() # @UndefinedVariable
if app is not None:
assert wx.Thread_IsMain() # @UndefinedVariable
# The import of wx on Linux sets the handler for signal.SIGINT
# to 0. This is a bug in wx or gtk. We fix by just setting it
# back to the Python default.
if not callable(signal.getsignal(signal.SIGINT)):
signal.signal(signal.SIGINT, signal.default_int_handler)
evtloop = wx.EventLoop() # @UndefinedVariable
ea = wx.EventLoopActivator(evtloop) # @UndefinedVariable
t = clock()
while not stdin_ready():
while evtloop.Pending():
t = clock()
evtloop.Dispatch()
app.ProcessIdle()
# We need to sleep at this point to keep the idle CPU load
# low. However, if sleep to long, GUI response is poor. As
# a compromise, we watch how often GUI events are being processed
# and switch between a short and long sleep time. Here are some
# stats useful in helping to tune this.
# time CPU load
# 0.001 13%
# 0.005 3%
# 0.01 1.5%
# 0.05 0.5%
used_time = clock() - t
if used_time > 10.0:
# print 'Sleep for 1 s' # dbg
time.sleep(1.0)
elif used_time > 0.1:
# Few GUI events coming in, so we can sleep longer
# print 'Sleep for 0.05 s' # dbg
time.sleep(0.05)
else:
# Many GUI events coming in, so sleep only very little
time.sleep(0.001)
del ea
except KeyboardInterrupt:
pass
return 0
if sys.platform == 'darwin':
# On OSX, evtloop.Pending() always returns True, regardless of there being
# any events pending. As such we can't use implementations 1 or 3 of the
# inputhook as those depend on a pending/dispatch loop.
inputhook_wx = inputhook_wx2
else:
# This is our default implementation
inputhook_wx = inputhook_wx3
| apache-2.0 |
eleonrk/SickRage | lib/pbr/hooks/metadata.py | 101 | 1076 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pbr.hooks import base
from pbr import packaging
class MetadataConfig(base.BaseConfig):
section = 'metadata'
def hook(self):
self.config['version'] = packaging.get_version(
self.config['name'], self.config.get('version', None))
packaging.append_text_list(
self.config, 'requires_dist',
packaging.parse_requirements())
def get_name(self):
return self.config['name']
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.