repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
iYgnohZ/crack-geetest | geetest/geetest.py | 1 | 4035 | # -*- coding: utf-8 -*-
import time
import uuid
import StringIO
from PIL import Image
from selenium.webdriver.common.action_chains import ActionChains
class BaseGeetestCrack(object):
"""验证码破解基础类"""
def __init__(self, driver):
self.driver = driver
self.driver.maximize_window()
def input_by_id(self, text=u"中国移动", element_id="keyword_qycx"):
"""输入查询关键词
:text: Unicode, 要输入的文本
:element_id: 输入框网页元素id
"""
input_el = self.driver.find_element_by_id(element_id)
input_el.clear()
input_el.send_keys(text)
time.sleep(3.5)
def click_by_id(self, element_id="popup-submit"):
"""点击查询按钮
:element_id: 查询按钮网页元素id
"""
search_el = self.driver.find_element_by_id(element_id)
search_el.click()
time.sleep(3.5)
def calculate_slider_offset(self):
"""计算滑块偏移位置,必须在点击查询按钮之后调用
:returns: Number
"""
img1 = self.crop_captcha_image()
self.drag_and_drop(x_offset=5)
img2 = self.crop_captcha_image()
w1, h1 = img1.size
w2, h2 = img2.size
if w1 != w2 or h1 != h2:
return False
left = 0
flag = False
for i in xrange(45, w1):
for j in xrange(h1):
if not self.is_pixel_equal(img1, img2, i, j):
left = i
flag = True
break
if flag:
break
if left == 45:
left -= 2
return left
def is_pixel_equal(self, img1, img2, x, y):
pix1 = img1.load()[x, y]
pix2 = img2.load()[x, y]
if (abs(pix1[0] - pix2[0] < 60) and abs(pix1[1] - pix2[1] < 60) and abs(pix1[2] - pix2[2] < 60)):
return True
else:
return False
def crop_captcha_image(self, element_id="gt_box"):
"""截取验证码图片
:element_id: 验证码图片网页元素id
:returns: StringIO, 图片内容
"""
captcha_el = self.driver.find_element_by_class_name(element_id)
location = captcha_el.location
size = captcha_el.size
left = int(location['x'])
top = int(location['y'])
left = 1010
top = 535
# right = left + int(size['width'])
# bottom = top + int(size['height'])
right = left + 523
bottom = top + 235
print(left, top, right, bottom)
screenshot = self.driver.get_screenshot_as_png()
screenshot = Image.open(StringIO.StringIO(screenshot))
captcha = screenshot.crop((left, top, right, bottom))
captcha.save("%s.png" % uuid.uuid4().get_hex())
return captcha
def get_browser_name(self):
"""获取当前使用浏览器名称
:returns: TODO
"""
return str(self.driver).split('.')[2]
def drag_and_drop(self, x_offset=0, y_offset=0, element_class="gt_slider_knob"):
"""拖拽滑块
:x_offset: 相对滑块x坐标偏移
:y_offset: 相对滑块y坐标偏移
:element_class: 滑块网页元素CSS类名
"""
dragger = self.driver.find_element_by_class_name(element_class)
action = ActionChains(self.driver)
action.drag_and_drop_by_offset(dragger, x_offset, y_offset).perform()
# 这个延时必须有,在滑动后等待回复原状
time.sleep(8)
def move_to_element(self, element_class="gt_slider_knob"):
"""鼠标移动到网页元素上
:element: 目标网页元素
"""
time.sleep(3)
element = self.driver.find_element_by_class_name(element_class)
action = ActionChains(self.driver)
action.move_to_element(element).perform()
time.sleep(4.5)
def crack(self):
"""执行破解程序
"""
raise NotImplementedError
| mit |
scztt/supercollider | editors/sced/scedwin/py/__init__.py | 44 | 1659 | # sced (SuperCollider mode for gedit)
#
# Copyright 2012 Jakob Leben
# Copyright 2009 Artem Popov and other contributors (see AUTHORS)
#
# sced is free software:
# you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gedit
import gtk
from WindowHelper import WindowHelper
import Settings
from ConfigurationDialog import ConfigurationDialog
class ScedPlugin(gedit.Plugin):
def __init__(self):
gedit.Plugin.__init__(self)
#self.settings = Settings()
self.__instances = {}
self.__settings = None
def activate(self, window):
self.__instances[window] = WindowHelper(self, window)
def deactivate(self, window):
self.__instances[window].deactivate()
del self.__instances[window]
def update_ui(self, window):
self.__instances[window].update_ui()
def create_configure_dialog(self):
dialog = ConfigurationDialog(self)
return dialog
def settings(self):
if self.__settings is None:
self.__settings = Settings.load()
return self.__settings
| gpl-3.0 |
purpleidea/macaronic-net | django/forms/models.py | 152 | 42947 | """
Helper functions for creating Form classes from Django models
and database field objects.
"""
from django.utils.encoding import smart_unicode, force_unicode
from django.utils.datastructures import SortedDict
from django.utils.text import get_text_list, capfirst
from django.utils.translation import ugettext_lazy as _, ugettext
from django.core.exceptions import ValidationError, NON_FIELD_ERRORS, \
FieldError
from django.core.validators import EMPTY_VALUES
from util import ErrorList
from forms import BaseForm, get_declared_fields
from fields import Field, ChoiceField
from widgets import SelectMultiple, HiddenInput, MultipleHiddenInput
from widgets import media_property
from formsets import BaseFormSet, formset_factory
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'save_instance', 'ModelChoiceField', 'ModelMultipleChoiceField',
)
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or not f.name in cleaned_data:
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
def save_instance(form, instance, fields=None, fail_message='saved',
commit=True, exclude=None, construct=True):
"""
Saves bound Form ``form``'s cleaned_data into model instance ``instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
If construct=False, assume ``instance`` has already been constructed and
just needs to be saved.
"""
if construct:
instance = construct_instance(form, instance, fields, exclude)
opts = instance._meta
if form.errors:
raise ValueError("The %s could not be %s because the data didn't"
" validate." % (opts.object_name, fail_message))
# Wrap up the saving of m2m data as a function.
def save_m2m():
cleaned_data = form.cleaned_data
for f in opts.many_to_many:
if fields and f.name not in fields:
continue
if f.name in cleaned_data:
f.save_form_data(instance, cleaned_data[f.name])
if commit:
# If we are committing, save the instance and the m2m data immediately.
instance.save()
save_m2m()
else:
# We're not committing. Add a method to the form to allow deferred
# saving of m2m data.
form.save_m2m = save_m2m
return instance
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
# avoid a circular import
from django.db.models.fields.related import ManyToManyField
opts = instance._meta
data = {}
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields and not f.name in fields:
continue
if exclude and f.name in exclude:
continue
if isinstance(f, ManyToManyField):
# If the object doesn't have a primry key yet, just use an empty
# list for its m2m fields. Calling f.value_from_object will raise
# an exception.
if instance.pk is None:
data[f.name] = []
else:
# MultipleChoiceWidget needs a list of pks, not object instances.
data[f.name] = [obj.pk for obj in f.value_from_object(instance)]
else:
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None, formfield_callback=None):
"""
Returns a ``SortedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
"""
field_list = []
ignored = []
opts = model._meta
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields is not None and not f.name in fields:
continue
if exclude and f.name in exclude:
continue
if widgets and f.name in widgets:
kwargs = {'widget': widgets[f.name]}
else:
kwargs = {}
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = SortedDict(field_list)
if fields:
field_dict = SortedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
class ModelFormMetaclass(type):
def __new__(cls, name, bases, attrs):
formfield_callback = attrs.pop('formfield_callback', None)
try:
parents = [b for b in bases if issubclass(b, ModelForm)]
except NameError:
# We are defining ModelForm itself.
parents = None
declared_fields = get_declared_fields(bases, attrs, False)
new_class = super(ModelFormMetaclass, cls).__new__(cls, name, bases,
attrs)
if not parents:
return new_class
if 'media' not in attrs:
new_class.media = media_property(new_class)
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
if opts.model:
# If a model is defined, extract form fields from it.
fields = fields_for_model(opts.model, opts.fields,
opts.exclude, opts.widgets, formfield_callback)
# make sure opts.fields doesn't specify an invalid field
none_model_fields = [k for k, v in fields.iteritems() if not v]
missing_fields = set(none_model_fields) - \
set(declared_fields.keys())
if missing_fields:
message = 'Unknown field(s) (%s) specified for %s'
message = message % (', '.join(missing_fields),
opts.model.__name__)
raise FieldError(message)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(declared_fields)
else:
fields = declared_fields
new_class.declared_fields = declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
opts = self._meta
if instance is None:
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
def _update_errors(self, message_dict):
for k, v in message_dict.items():
if k != NON_FIELD_ERRORS:
self._errors.setdefault(k, self.error_class()).extend(v)
# Remove the data from the cleaned_data dict since it was invalid
if k in self.cleaned_data:
del self.cleaned_data[k]
if NON_FIELD_ERRORS in message_dict:
messages = message_dict[NON_FIELD_ERRORS]
self._errors.setdefault(NON_FIELD_ERRORS, self.error_class()).extend(messages)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validaton if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field, None)
if not f.blank and not form_field.required and field_value in EMPTY_VALUES:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _post_clean(self):
opts = self._meta
# Update the model instance with self.cleaned_data.
self.instance = construct_instance(self, self.instance, opts.fields, opts.exclude)
exclude = self._get_validation_exclusions()
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for f_name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
exclude.append(f_name)
# Clean the model instance's fields.
try:
self.instance.clean_fields(exclude=exclude)
except ValidationError, e:
self._update_errors(e.message_dict)
# Call the model instance's clean method.
try:
self.instance.clean()
except ValidationError, e:
self._update_errors({NON_FIELD_ERRORS: e.messages})
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError, e:
self._update_errors(e.message_dict)
def save(self, commit=True):
"""
Saves this ``form``'s cleaned_data into model instance
``self.instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
"""
if self.instance.pk is None:
fail_message = 'created'
else:
fail_message = 'changed'
return save_instance(self, self.instance, self._meta.fields,
fail_message, commit, construct=False)
save.alters_data = True
class ModelForm(BaseModelForm):
__metaclass__ = ModelFormMetaclass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None):
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type('Meta', parent, attrs)
# Give this new form class a reasonable name.
class_name = model.__name__ + 'Form'
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
return ModelFormMetaclass(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = dict([(o.pk, o) for o in self.get_queryset()])
return self._object_dict.get(pk)
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
# Import goes here instead of module-level because importing
# django.db has side effects.
from django.db import connections
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
pk = pk_field.get_db_prep_lookup('exact', pk,
connection=connections[self.get_queryset().db])
if isinstance(pk, list):
pk = pk[0]
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and not kwargs.get('instance'):
kwargs['instance'] = self.get_queryset()[i]
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_query_set()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
for form in self.forms:
if not hasattr(form, 'cleaned_data'):
continue
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in self.forms:
# if the form doesn't have cleaned_data then we ignore it,
# it's already invalid
if not hasattr(form, "cleaned_data"):
continue
# get data for each field of each of unique_check
row_data = tuple([form.cleaned_data[field] for field in unique_check if field in form.cleaned_data])
if row_data and not None in row_data:
# if we've aready seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
del form.cleaned_data
break
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in self.forms:
# if the form doesn't have cleaned_data then we ignore it,
# it's already invalid
if not hasattr(self, 'cleaned_data'):
continue
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've aready seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
del form.cleaned_data
break
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, "
"which must be unique.") % {
"field": get_text_list(unique_check, unicode(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext("Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s.") % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': unicode(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.get_queryset():
return []
saved_instances = []
for form in self.initial_forms:
pk_name = self._pk_field.name
raw_pk_value = form._raw_value(pk_name)
# clean() for different types of PK fields can sometimes return
# the model instance, and sometimes the PK. Handle either.
pk_value = form.fields[pk_name].clean(raw_pk_value)
pk_value = getattr(pk_value, 'pk', pk_value)
obj = self._existing_object(pk_value)
if self.can_delete and self._should_delete_form(form):
self.deleted_objects.append(obj)
obj.delete()
continue
if form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete and self._should_delete_form(form):
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return ((not pk.editable) or (pk.auto_created or isinstance(pk, AutoField))
or (pk.rel and pk.rel.parent_link and pk_is_not_editable(pk.rel.to._meta.pk)))
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
pk_value = form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.rel.to._default_manager.get_query_set()
else:
qs = self.model._default_manager.get_query_set()
qs = qs.using(form.instance._state.db)
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=HiddenInput)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet,
extra=1, can_delete=False, can_order=False,
max_num=None, fields=None, exclude=None):
"""
Returns a FormSet class for the given Django model class.
"""
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback)
FormSet = formset_factory(form, formset, extra=extra, max_num=max_num,
can_order=can_order, can_delete=can_delete)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None):
from django.db.models.fields.related import RelatedObject
if instance is None:
self.instance = self.fk.rel.to()
else:
self.instance = instance
self.save_as_new = save_as_new
# is there a better way to get the object descriptor?
self.rel_name = RelatedObject(self.fk.rel.to, self.model, self.fk).get_accessor_name()
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{self.fk.name: self.instance})
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do it's validation.
setattr(form.instance, self.fk.get_attname(), self.instance.pk)
return form
#@classmethod
def get_default_prefix(cls):
from django.db.models.fields.related import RelatedObject
return RelatedObject(cls.fk.rel.to, cls.model, cls.fk).get_accessor_name().replace('+','')
get_default_prefix = classmethod(get_default_prefix)
def save_new(self, form, commit=True):
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.rel.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.rel.field_name != self.fk.rel.to._meta.pk.name:
kwargs['to_field'] = self.fk.rel.field_name
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if form._meta.fields:
if isinstance(form._meta.fields, tuple):
form._meta.fields = list(form._meta.fields)
form._meta.fields.append(self.fk.name)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unles can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.rel.to != parent_model and
fk.rel.to not in parent_model._meta.get_parent_list()):
raise Exception("fk_name '%s' is not a ForeignKey to %s" % (fk_name, parent_model))
elif len(fks_to_parent) == 0:
raise Exception("%s has no field named '%s'" % (model, fk_name))
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey)
and (f.rel.to == parent_model
or f.rel.to in parent_model._meta.get_parent_list())
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise Exception("%s has no ForeignKey to %s" % (model, parent_model))
else:
raise Exception("%s has more than 1 ForeignKey to %s" % (model, parent_model))
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True, max_num=None,
formfield_callback=None):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'max_num': max_num,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyHiddenInput(HiddenInput):
def _has_changed(self, initial, data):
return False
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
default_error_messages = {
'invalid_choice': _(u'The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
kwargs["widget"] = InlineForeignKeyHiddenInput
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in EMPTY_VALUES:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_unicode(value) != force_unicode(orig):
raise ValidationError(self.error_messages['invalid_choice'])
return self.parent_instance
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield (u"", self.field.empty_label)
if self.field.cache_choices:
if self.field.choice_cache is None:
self.field.choice_cache = [
self.choice(obj) for obj in self.queryset.all()
]
for choice in self.field.choice_cache:
yield choice
else:
for obj in self.queryset.all():
yield self.choice(obj)
def __len__(self):
return len(self.queryset)
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _(u'Select a valid choice. That choice is not one of'
u' the available choices.'),
}
def __init__(self, queryset, empty_label=u"---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text=None, to_field_name=None, *args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
self.cache_choices = cache_choices
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.choice_cache = None
self.to_field_name = to_field_name
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return smart_unicode(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in EMPTY_VALUES:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'])
return value
def validate(self, value):
return Field.validate(self, value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _(u'Enter a list of values.'),
'invalid_choice': _(u'Select a valid choice. %s is not one of the'
u' available choices.'),
'invalid_pk_value': _(u'"%s" is not a valid value for a primary key.')
}
def __init__(self, queryset, cache_choices=False, required=True,
widget=None, label=None, initial=None,
help_text=None, *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(queryset, None,
cache_choices, required, widget, label, initial, help_text,
*args, **kwargs)
def clean(self, value):
if self.required and not value:
raise ValidationError(self.error_messages['required'])
elif not self.required and not value:
return []
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'])
key = self.to_field_name or 'pk'
for pk in value:
try:
self.queryset.filter(**{key: pk})
except ValueError:
raise ValidationError(self.error_messages['invalid_pk_value'] % pk)
qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set([force_unicode(getattr(o, key)) for o in qs])
for val in value:
if force_unicode(val) not in pks:
raise ValidationError(self.error_messages['invalid_choice'] % val)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def prepare_value(self, value):
if hasattr(value, '__iter__'):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
| agpl-3.0 |
jordanemedlock/psychtruths | temboo/core/Library/Utilities/HTTP/Get.py | 4 | 4767 | # -*- coding: utf-8 -*-
###############################################################################
#
# Get
# Generates a HTTP GET request.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class Get(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the Get Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(Get, self).__init__(temboo_session, '/Library/Utilities/HTTP/Get')
def new_input_set(self):
return GetInputSet()
def _make_result_set(self, result, path):
return GetResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetChoreographyExecution(session, exec_id, path)
class GetInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the Get
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Debug(self, value):
"""
Set the value of the Debug input for this Choreo. ((optional, boolean) When set to "true", the HTTP debug log will be returned.)
"""
super(GetInputSet, self)._set_input('Debug', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((optional, password) A valid password. This is used if the request required basic authentication.)
"""
super(GetInputSet, self)._set_input('Password', value)
def set_RequestHeaders(self, value):
"""
Set the value of the RequestHeaders input for this Choreo. ((optional, json) A JSON object containing up to 10 key/value pairs that will be mapped to the HTTP request headers.)
"""
super(GetInputSet, self)._set_input('RequestHeaders', value)
def set_RequestParameters(self, value):
"""
Set the value of the RequestParameters input for this Choreo. ((optional, json) A JSON object containing up to 10 key/value pairs that will be mapped to the url string as http parameters.)
"""
super(GetInputSet, self)._set_input('RequestParameters', value)
def set_URL(self, value):
"""
Set the value of the URL input for this Choreo. ((required, string) The base URL for the request (including http:// or https://).)
"""
super(GetInputSet, self)._set_input('URL', value)
def set_Username(self, value):
"""
Set the value of the Username input for this Choreo. ((optional, string) A valid username. This is used if the request required basic authentication.)
"""
super(GetInputSet, self)._set_input('Username', value)
class GetResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the Get Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from the server.)
"""
return self._output.get('Response', None)
def get_HTTPLog(self):
"""
Retrieve the value for the "HTTPLog" output from this Choreo execution. ((string) A debug log for the http request that was sent. This is only returned when Debug is set to "true".)
"""
return self._output.get('HTTPLog', None)
def get_ResponseStatusCode(self):
"""
Retrieve the value for the "ResponseStatusCode" output from this Choreo execution. ((integer) The response status code.)
"""
return self._output.get('ResponseStatusCode', None)
class GetChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetResultSet(response, path)
| apache-2.0 |
sitsbeyou/Misago | misago/threads/migrations/0001_initial.py | 8 | 11910 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations
import django.db.models.deletion
import django.utils.timezone
from misago.core.pgutils import CreatePartialIndex, CreatePartialCompositeIndex
class Migration(migrations.Migration):
dependencies = [
('misago_forums', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Label',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('slug', models.SlugField(max_length=255)),
('css_class', models.CharField(max_length=255, null=True, blank=True)),
('forums', models.ManyToManyField(to='misago_forums.Forum')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('poster_name', models.CharField(max_length=255)),
('poster_ip', models.GenericIPAddressField()),
('original', models.TextField()),
('parsed', models.TextField()),
('checksum', models.CharField(max_length=64, default='-')),
('has_attachments', models.BooleanField(default=False)),
('pickled_attachments', models.TextField(null=True, blank=True)),
('posted_on', models.DateTimeField()),
('updated_on', models.DateTimeField()),
('edits', models.PositiveIntegerField(default=0)),
('last_editor_name', models.CharField(max_length=255, null=True, blank=True)),
('last_editor_slug', models.SlugField(max_length=255, null=True, blank=True)),
('hidden_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('hidden_by_name', models.CharField(max_length=255, null=True, blank=True)),
('hidden_by_slug', models.SlugField(max_length=255, null=True, blank=True)),
('hidden_on', models.DateTimeField(default=django.utils.timezone.now)),
('has_reports', models.BooleanField(default=False)),
('has_open_reports', models.BooleanField(default=False)),
('is_moderated', models.BooleanField(default=False, db_index=True)),
('is_hidden', models.BooleanField(default=False)),
('is_protected', models.BooleanField(default=False)),
('forum', models.ForeignKey(to='misago_forums.Forum')),
('last_editor', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('mentions', models.ManyToManyField(related_name='mention_set', to=settings.AUTH_USER_MODEL)),
('poster', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
},
bases=(models.Model,),
),
CreatePartialIndex(
field='Post.has_open_reports',
index_name='misago_post_has_open_reports_partial',
condition='has_open_reports = TRUE',
),
CreatePartialIndex(
field='Post.is_hidden',
index_name='misago_post_is_hidden_partial',
condition='is_hidden = FALSE',
),
migrations.CreateModel(
name='Thread',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255)),
('slug', models.CharField(max_length=255)),
('replies', models.PositiveIntegerField(default=0, db_index=True)),
('has_reported_posts', models.BooleanField(default=False)),
('has_open_reports', models.BooleanField(default=False)),
('has_moderated_posts', models.BooleanField(default=False)),
('has_hidden_posts', models.BooleanField(default=False)),
('has_events', models.BooleanField(default=False)),
('started_on', models.DateTimeField(db_index=True)),
('starter_name', models.CharField(max_length=255)),
('starter_slug', models.CharField(max_length=255)),
('last_post_on', models.DateTimeField(db_index=True)),
('last_poster_name', models.CharField(max_length=255, null=True, blank=True)),
('last_poster_slug', models.CharField(max_length=255, null=True, blank=True)),
('is_pinned', models.BooleanField(default=False, db_index=True)),
('is_poll', models.BooleanField(default=False)),
('is_moderated', models.BooleanField(default=False, db_index=True)),
('is_hidden', models.BooleanField(default=False)),
('is_closed', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ThreadParticipant',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('thread', models.ForeignKey(to='misago_threads.Thread')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
('is_owner', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='thread',
name='participants',
field=models.ManyToManyField(related_name='private_thread_set', through='misago_threads.ThreadParticipant', through_fields=('thread', 'user'), to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('author_name', models.CharField(max_length=255)),
('author_slug', models.CharField(max_length=255)),
('icon', models.CharField(max_length=255)),
('occured_on', models.DateTimeField(default=django.utils.timezone.now, db_index=True)),
('message', models.CharField(max_length=255)),
('checksum', models.CharField(max_length=64, default='-')),
('is_hidden', models.BooleanField(default=False)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('forum', models.ForeignKey(to='misago_forums.Forum')),
('thread', models.ForeignKey(to='misago_threads.Thread')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('reported_by_name', models.CharField(max_length=255)),
('reported_by_slug', models.CharField(max_length=255)),
('reported_by_ip', models.GenericIPAddressField()),
('reported_on', models.DateTimeField(default=django.utils.timezone.now)),
('message', models.TextField()),
('checksum', models.CharField(default=b'-', max_length=64)),
('is_closed', models.BooleanField(default=False)),
('closed_by_name', models.CharField(max_length=255)),
('closed_by_slug', models.CharField(max_length=255)),
('closed_by', models.ForeignKey(related_name='closedreport_set', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('closed_on', models.DateTimeField(default=django.utils.timezone.now)),
('forum', models.ForeignKey(to='misago_forums.Forum')),
('post', models.ForeignKey(to='misago_threads.Post')),
('reported_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('thread', models.ForeignKey(to='misago_threads.Thread')),
],
options={
},
bases=(models.Model,),
),
CreatePartialIndex(
field='Thread.has_reported_posts',
index_name='misago_thread_has_reported_posts_partial',
condition='has_reported_posts = TRUE',
),
CreatePartialIndex(
field='Thread.has_moderated_posts',
index_name='misago_thread_has_moderated_posts_partial',
condition='has_moderated_posts = TRUE',
),
CreatePartialIndex(
field='Thread.is_hidden',
index_name='misago_thread_is_hidden_partial',
condition='is_hidden = FALSE',
),
migrations.AddField(
model_name='post',
name='thread',
field=models.ForeignKey(to='misago_threads.Thread'),
preserve_default=True,
),
migrations.AddField(
model_name='thread',
name='first_post',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='misago_threads.Post', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='thread',
name='forum',
field=models.ForeignKey(to='misago_forums.Forum'),
preserve_default=True,
),
migrations.AddField(
model_name='thread',
name='last_post',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='misago_threads.Post', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='thread',
name='last_poster',
field=models.ForeignKey(related_name='last_poster_set', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='thread',
name='label',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='misago_threads.Label', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='thread',
name='starter',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AlterIndexTogether(
name='thread',
index_together=set([
('forum', 'id'),
('forum', 'last_post_on'),
('forum', 'replies'),
]),
),
CreatePartialCompositeIndex(
model='Report',
fields=('post_id', 'is_closed'),
index_name='misago_report_active_reports',
condition='is_closed = FALSE',
),
]
| gpl-2.0 |
rcarrillocruz/ansible | lib/ansible/modules/commands/command.py | 4 | 7952 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>, and others
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: command
short_description: Executes a command on a remote node
version_added: historical
description:
- The C(command) module takes the command name followed by a list of space-delimited arguments.
- The given command will be executed on all selected nodes. It will not be
processed through the shell, so variables like C($HOME) and operations
like C("<"), C(">"), C("|"), C(";") and C("&") will not work (use the M(shell)
module if you need these features).
- For Windows targets, use the M(win_command) module instead.
options:
free_form:
description:
- the command module takes a free form command to run. There is no parameter actually named 'free form'.
See the examples!
required: true
default: null
creates:
description:
- a filename or (since 2.0) glob pattern, when it already exists, this step will B(not) be run.
required: no
default: null
removes:
description:
- a filename or (since 2.0) glob pattern, when it does not exist, this step will B(not) be run.
version_added: "0.8"
required: no
default: null
chdir:
description:
- cd into this directory before running the command
version_added: "0.6"
required: false
default: null
executable:
description:
- change the shell used to execute the command. Should be an absolute path to the executable.
required: false
default: null
version_added: "0.9"
warn:
version_added: "1.8"
default: yes
description:
- if command warnings are on in ansible.cfg, do not warn about this particular line if set to no/false.
required: false
notes:
- If you want to run a command through the shell (say you are using C(<), C(>), C(|), etc), you actually want the M(shell) module instead.
The C(command) module is much more secure as it's not affected by the user's environment.
- " C(creates), C(removes), and C(chdir) can be specified after the command.
For instance, if you only want to run a command if a certain file does not exist, use this."
- For Windows targets, use the M(win_command) module instead.
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = '''
- name: return motd to registered var
command: cat /etc/motd
register: mymotd
- name: Run the command if the specified file does not exist.
command: /usr/bin/make_database.sh arg1 arg2 creates=/path/to/database
# You can also use the 'args' form to provide the options.
- name: This command will change the working directory to somedir/ and will only run when /path/to/database doesn't exist.
command: /usr/bin/make_database.sh arg1 arg2
args:
chdir: somedir/
creates: /path/to/database
- name: safely use templated variable to run command. Always use the quote filter to avoid injection issues.
command: cat {{ myfile|quote }}
register: myoutput
'''
import datetime
import glob
import shlex
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b
def check_command(commandline):
arguments = { 'chown': 'owner', 'chmod': 'mode', 'chgrp': 'group',
'ln': 'state=link', 'mkdir': 'state=directory',
'rmdir': 'state=absent', 'rm': 'state=absent', 'touch': 'state=touch' }
commands = { 'hg': 'hg', 'curl': 'get_url or uri', 'wget': 'get_url or uri',
'svn': 'subversion', 'service': 'service',
'mount': 'mount', 'rpm': 'yum, dnf or zypper', 'yum': 'yum', 'apt-get': 'apt',
'tar': 'unarchive', 'unzip': 'unarchive', 'sed': 'template or lineinfile',
'dnf': 'dnf', 'zypper': 'zypper' }
become = [ 'sudo', 'su', 'pbrun', 'pfexec', 'runas', 'pmrun' ]
warnings = list()
command = os.path.basename(commandline.split()[0])
if command in arguments:
warnings.append("Consider using file module with %s rather than running %s" % (arguments[command], command))
if command in commands:
warnings.append("Consider using %s module rather than running %s" % (commands[command], command))
if command in become:
warnings.append("Consider using 'become', 'become_method', and 'become_user' rather than running %s" % (command,))
return warnings
def main():
# the command module is the one ansible module that does not take key=value args
# hence don't copy this one if you are looking to build others!
module = AnsibleModule(
argument_spec=dict(
_raw_params = dict(),
_uses_shell = dict(type='bool', default=False),
chdir = dict(type='path'),
executable = dict(),
creates = dict(type='path'),
removes = dict(type='path'),
warn = dict(type='bool', default=True),
)
)
shell = module.params['_uses_shell']
chdir = module.params['chdir']
executable = module.params['executable']
args = module.params['_raw_params']
creates = module.params['creates']
removes = module.params['removes']
warn = module.params['warn']
if args.strip() == '':
module.fail_json(rc=256, msg="no command given")
if chdir:
chdir = os.path.abspath(chdir)
os.chdir(chdir)
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
if glob.glob(creates):
module.exit_json(
cmd=args,
stdout="skipped, since %s exists" % creates,
changed=False,
rc=0
)
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
if not glob.glob(removes):
module.exit_json(
cmd=args,
stdout="skipped, since %s does not exist" % removes,
changed=False,
rc=0
)
warnings = list()
if warn:
warnings = check_command(args)
if not shell:
args = shlex.split(args)
startd = datetime.datetime.now()
rc, out, err = module.run_command(args, executable=executable, use_unsafe_shell=shell, encoding=None)
endd = datetime.datetime.now()
delta = endd - startd
if out is None:
out = b('')
if err is None:
err = b('')
result = dict(
cmd = args,
stdout = out.rstrip(b("\r\n")),
stderr = err.rstrip(b("\r\n")),
rc = rc,
start = str(startd),
end = str(endd),
delta = str(delta),
changed = True,
warnings = warnings
)
if rc != 0:
module.fail_json(msg='non-zero return code', **result)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
drnextgis/QGIS | python/ext-libs/requests/packages/urllib3/__init__.py | 152 | 2854 | """
urllib3 - Thread-safe connection pooling and re-using.
"""
from __future__ import absolute_import
import warnings
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = '1.15.1'
__all__ = (
'HTTPConnectionPool',
'HTTPSConnectionPool',
'PoolManager',
'ProxyManager',
'HTTPResponse',
'Retry',
'Timeout',
'add_stderr_logger',
'connection_from_url',
'disable_warnings',
'encode_multipart_formdata',
'get_host',
'make_headers',
'proxy_from_url',
)
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added a stderr logging handler to logger: %s', __name__)
return handler
# ... Clean up.
del NullHandler
# All warning filters *must* be appended unless you're really certain that they
# shouldn't be: otherwise, it's very hard for users to use most Python
# mechanisms to silence them.
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
# SNIMissingWarnings should go off only once.
warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter('ignore', category)
| gpl-2.0 |
cgstudiomap/cgstudiomap | main/parts/odoo/addons/l10n_be/wizard/__init__.py | 438 | 1145 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import l10n_be_partner_vat_listing
import l10n_be_vat_intra
import l10n_be_account_vat_declaration
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
40223201/w16b_test | static/Brython3.1.1-20150328-091302/Lib/multiprocessing/pool.py | 694 | 23263 | #
# Module providing the `Pool` class for managing a process pool
#
# multiprocessing/pool.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
__all__ = ['Pool']
#
# Imports
#
import threading
import queue
import itertools
import collections
import time
from multiprocessing import Process, cpu_count, TimeoutError
from multiprocessing.util import Finalize, debug
#
# Constants representing the state of a pool
#
RUN = 0
CLOSE = 1
TERMINATE = 2
#
# Miscellaneous
#
job_counter = itertools.count()
def mapstar(args):
return list(map(*args))
def starmapstar(args):
return list(itertools.starmap(args[0], args[1]))
#
# Code run by worker processes
#
class MaybeEncodingError(Exception):
"""Wraps possible unpickleable errors, so they can be
safely sent through the socket."""
def __init__(self, exc, value):
self.exc = repr(exc)
self.value = repr(value)
super(MaybeEncodingError, self).__init__(self.exc, self.value)
def __str__(self):
return "Error sending result: '%s'. Reason: '%s'" % (self.value,
self.exc)
def __repr__(self):
return "<MaybeEncodingError: %s>" % str(self)
def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
put = outqueue.put
get = inqueue.get
if hasattr(inqueue, '_writer'):
inqueue._writer.close()
outqueue._reader.close()
if initializer is not None:
initializer(*initargs)
completed = 0
while maxtasks is None or (maxtasks and completed < maxtasks):
try:
task = get()
except (EOFError, IOError):
debug('worker got EOFError or IOError -- exiting')
break
if task is None:
debug('worker got sentinel -- exiting')
break
job, i, func, args, kwds = task
try:
result = (True, func(*args, **kwds))
except Exception as e:
result = (False, e)
try:
put((job, i, result))
except Exception as e:
wrapped = MaybeEncodingError(e, result[1])
debug("Possible encoding error while sending result: %s" % (
wrapped))
put((job, i, (False, wrapped)))
completed += 1
debug('worker exiting after %d tasks' % completed)
#
# Class representing a process pool
#
class Pool(object):
'''
Class which supports an async version of applying functions to arguments.
'''
Process = Process
def __init__(self, processes=None, initializer=None, initargs=(),
maxtasksperchild=None):
self._setup_queues()
self._taskqueue = queue.Queue()
self._cache = {}
self._state = RUN
self._maxtasksperchild = maxtasksperchild
self._initializer = initializer
self._initargs = initargs
if processes is None:
try:
processes = cpu_count()
except NotImplementedError:
processes = 1
if processes < 1:
raise ValueError("Number of processes must be at least 1")
if initializer is not None and not callable(initializer):
raise TypeError('initializer must be a callable')
self._processes = processes
self._pool = []
self._repopulate_pool()
self._worker_handler = threading.Thread(
target=Pool._handle_workers,
args=(self, )
)
self._worker_handler.daemon = True
self._worker_handler._state = RUN
self._worker_handler.start()
self._task_handler = threading.Thread(
target=Pool._handle_tasks,
args=(self._taskqueue, self._quick_put, self._outqueue, self._pool)
)
self._task_handler.daemon = True
self._task_handler._state = RUN
self._task_handler.start()
self._result_handler = threading.Thread(
target=Pool._handle_results,
args=(self._outqueue, self._quick_get, self._cache)
)
self._result_handler.daemon = True
self._result_handler._state = RUN
self._result_handler.start()
self._terminate = Finalize(
self, self._terminate_pool,
args=(self._taskqueue, self._inqueue, self._outqueue, self._pool,
self._worker_handler, self._task_handler,
self._result_handler, self._cache),
exitpriority=15
)
def _join_exited_workers(self):
"""Cleanup after any worker processes which have exited due to reaching
their specified lifetime. Returns True if any workers were cleaned up.
"""
cleaned = False
for i in reversed(range(len(self._pool))):
worker = self._pool[i]
if worker.exitcode is not None:
# worker exited
debug('cleaning up worker %d' % i)
worker.join()
cleaned = True
del self._pool[i]
return cleaned
def _repopulate_pool(self):
"""Bring the number of pool processes up to the specified number,
for use after reaping workers which have exited.
"""
for i in range(self._processes - len(self._pool)):
w = self.Process(target=worker,
args=(self._inqueue, self._outqueue,
self._initializer,
self._initargs, self._maxtasksperchild)
)
self._pool.append(w)
w.name = w.name.replace('Process', 'PoolWorker')
w.daemon = True
w.start()
debug('added worker')
def _maintain_pool(self):
"""Clean up any exited workers and start replacements for them.
"""
if self._join_exited_workers():
self._repopulate_pool()
def _setup_queues(self):
from .queues import SimpleQueue
self._inqueue = SimpleQueue()
self._outqueue = SimpleQueue()
self._quick_put = self._inqueue._writer.send
self._quick_get = self._outqueue._reader.recv
def apply(self, func, args=(), kwds={}):
'''
Equivalent of `func(*args, **kwds)`.
'''
assert self._state == RUN
return self.apply_async(func, args, kwds).get()
def map(self, func, iterable, chunksize=None):
'''
Apply `func` to each element in `iterable`, collecting the results
in a list that is returned.
'''
return self._map_async(func, iterable, mapstar, chunksize).get()
def starmap(self, func, iterable, chunksize=None):
'''
Like `map()` method but the elements of the `iterable` are expected to
be iterables as well and will be unpacked as arguments. Hence
`func` and (a, b) becomes func(a, b).
'''
return self._map_async(func, iterable, starmapstar, chunksize).get()
def starmap_async(self, func, iterable, chunksize=None, callback=None,
error_callback=None):
'''
Asynchronous version of `starmap()` method.
'''
return self._map_async(func, iterable, starmapstar, chunksize,
callback, error_callback)
def imap(self, func, iterable, chunksize=1):
'''
Equivalent of `map()` -- can be MUCH slower than `Pool.map()`.
'''
if self._state != RUN:
raise ValueError("Pool not running")
if chunksize == 1:
result = IMapIterator(self._cache)
self._taskqueue.put((((result._job, i, func, (x,), {})
for i, x in enumerate(iterable)), result._set_length))
return result
else:
assert chunksize > 1
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = IMapIterator(self._cache)
self._taskqueue.put((((result._job, i, mapstar, (x,), {})
for i, x in enumerate(task_batches)), result._set_length))
return (item for chunk in result for item in chunk)
def imap_unordered(self, func, iterable, chunksize=1):
'''
Like `imap()` method but ordering of results is arbitrary.
'''
if self._state != RUN:
raise ValueError("Pool not running")
if chunksize == 1:
result = IMapUnorderedIterator(self._cache)
self._taskqueue.put((((result._job, i, func, (x,), {})
for i, x in enumerate(iterable)), result._set_length))
return result
else:
assert chunksize > 1
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = IMapUnorderedIterator(self._cache)
self._taskqueue.put((((result._job, i, mapstar, (x,), {})
for i, x in enumerate(task_batches)), result._set_length))
return (item for chunk in result for item in chunk)
def apply_async(self, func, args=(), kwds={}, callback=None,
error_callback=None):
'''
Asynchronous version of `apply()` method.
'''
if self._state != RUN:
raise ValueError("Pool not running")
result = ApplyResult(self._cache, callback, error_callback)
self._taskqueue.put(([(result._job, None, func, args, kwds)], None))
return result
def map_async(self, func, iterable, chunksize=None, callback=None,
error_callback=None):
'''
Asynchronous version of `map()` method.
'''
return self._map_async(func, iterable, mapstar, chunksize, callback,
error_callback)
def _map_async(self, func, iterable, mapper, chunksize=None, callback=None,
error_callback=None):
'''
Helper function to implement map, starmap and their async counterparts.
'''
if self._state != RUN:
raise ValueError("Pool not running")
if not hasattr(iterable, '__len__'):
iterable = list(iterable)
if chunksize is None:
chunksize, extra = divmod(len(iterable), len(self._pool) * 4)
if extra:
chunksize += 1
if len(iterable) == 0:
chunksize = 0
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = MapResult(self._cache, chunksize, len(iterable), callback,
error_callback=error_callback)
self._taskqueue.put((((result._job, i, mapper, (x,), {})
for i, x in enumerate(task_batches)), None))
return result
@staticmethod
def _handle_workers(pool):
thread = threading.current_thread()
# Keep maintaining workers until the cache gets drained, unless the pool
# is terminated.
while thread._state == RUN or (pool._cache and thread._state != TERMINATE):
pool._maintain_pool()
time.sleep(0.1)
# send sentinel to stop workers
pool._taskqueue.put(None)
debug('worker handler exiting')
@staticmethod
def _handle_tasks(taskqueue, put, outqueue, pool):
thread = threading.current_thread()
for taskseq, set_length in iter(taskqueue.get, None):
i = -1
for i, task in enumerate(taskseq):
if thread._state:
debug('task handler found thread._state != RUN')
break
try:
put(task)
except IOError:
debug('could not put task on queue')
break
else:
if set_length:
debug('doing set_length()')
set_length(i+1)
continue
break
else:
debug('task handler got sentinel')
try:
# tell result handler to finish when cache is empty
debug('task handler sending sentinel to result handler')
outqueue.put(None)
# tell workers there is no more work
debug('task handler sending sentinel to workers')
for p in pool:
put(None)
except IOError:
debug('task handler got IOError when sending sentinels')
debug('task handler exiting')
@staticmethod
def _handle_results(outqueue, get, cache):
thread = threading.current_thread()
while 1:
try:
task = get()
except (IOError, EOFError):
debug('result handler got EOFError/IOError -- exiting')
return
if thread._state:
assert thread._state == TERMINATE
debug('result handler found thread._state=TERMINATE')
break
if task is None:
debug('result handler got sentinel')
break
job, i, obj = task
try:
cache[job]._set(i, obj)
except KeyError:
pass
while cache and thread._state != TERMINATE:
try:
task = get()
except (IOError, EOFError):
debug('result handler got EOFError/IOError -- exiting')
return
if task is None:
debug('result handler ignoring extra sentinel')
continue
job, i, obj = task
try:
cache[job]._set(i, obj)
except KeyError:
pass
if hasattr(outqueue, '_reader'):
debug('ensuring that outqueue is not full')
# If we don't make room available in outqueue then
# attempts to add the sentinel (None) to outqueue may
# block. There is guaranteed to be no more than 2 sentinels.
try:
for i in range(10):
if not outqueue._reader.poll():
break
get()
except (IOError, EOFError):
pass
debug('result handler exiting: len(cache)=%s, thread._state=%s',
len(cache), thread._state)
@staticmethod
def _get_tasks(func, it, size):
it = iter(it)
while 1:
x = tuple(itertools.islice(it, size))
if not x:
return
yield (func, x)
def __reduce__(self):
raise NotImplementedError(
'pool objects cannot be passed between processes or pickled'
)
def close(self):
debug('closing pool')
if self._state == RUN:
self._state = CLOSE
self._worker_handler._state = CLOSE
def terminate(self):
debug('terminating pool')
self._state = TERMINATE
self._worker_handler._state = TERMINATE
self._terminate()
def join(self):
debug('joining pool')
assert self._state in (CLOSE, TERMINATE)
self._worker_handler.join()
self._task_handler.join()
self._result_handler.join()
for p in self._pool:
p.join()
@staticmethod
def _help_stuff_finish(inqueue, task_handler, size):
# task_handler may be blocked trying to put items on inqueue
debug('removing tasks from inqueue until task handler finished')
inqueue._rlock.acquire()
while task_handler.is_alive() and inqueue._reader.poll():
inqueue._reader.recv()
time.sleep(0)
@classmethod
def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool,
worker_handler, task_handler, result_handler, cache):
# this is guaranteed to only be called once
debug('finalizing pool')
worker_handler._state = TERMINATE
task_handler._state = TERMINATE
debug('helping task handler/workers to finish')
cls._help_stuff_finish(inqueue, task_handler, len(pool))
assert result_handler.is_alive() or len(cache) == 0
result_handler._state = TERMINATE
outqueue.put(None) # sentinel
# We must wait for the worker handler to exit before terminating
# workers because we don't want workers to be restarted behind our back.
debug('joining worker handler')
if threading.current_thread() is not worker_handler:
worker_handler.join()
# Terminate workers which haven't already finished.
if pool and hasattr(pool[0], 'terminate'):
debug('terminating workers')
for p in pool:
if p.exitcode is None:
p.terminate()
debug('joining task handler')
if threading.current_thread() is not task_handler:
task_handler.join()
debug('joining result handler')
if threading.current_thread() is not result_handler:
result_handler.join()
if pool and hasattr(pool[0], 'terminate'):
debug('joining pool workers')
for p in pool:
if p.is_alive():
# worker has not yet exited
debug('cleaning up worker %d' % p.pid)
p.join()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.terminate()
#
# Class whose instances are returned by `Pool.apply_async()`
#
class ApplyResult(object):
def __init__(self, cache, callback, error_callback):
self._event = threading.Event()
self._job = next(job_counter)
self._cache = cache
self._callback = callback
self._error_callback = error_callback
cache[self._job] = self
def ready(self):
return self._event.is_set()
def successful(self):
assert self.ready()
return self._success
def wait(self, timeout=None):
self._event.wait(timeout)
def get(self, timeout=None):
self.wait(timeout)
if not self.ready():
raise TimeoutError
if self._success:
return self._value
else:
raise self._value
def _set(self, i, obj):
self._success, self._value = obj
if self._callback and self._success:
self._callback(self._value)
if self._error_callback and not self._success:
self._error_callback(self._value)
self._event.set()
del self._cache[self._job]
AsyncResult = ApplyResult # create alias -- see #17805
#
# Class whose instances are returned by `Pool.map_async()`
#
class MapResult(ApplyResult):
def __init__(self, cache, chunksize, length, callback, error_callback):
ApplyResult.__init__(self, cache, callback,
error_callback=error_callback)
self._success = True
self._value = [None] * length
self._chunksize = chunksize
if chunksize <= 0:
self._number_left = 0
self._event.set()
del cache[self._job]
else:
self._number_left = length//chunksize + bool(length % chunksize)
def _set(self, i, success_result):
success, result = success_result
if success:
self._value[i*self._chunksize:(i+1)*self._chunksize] = result
self._number_left -= 1
if self._number_left == 0:
if self._callback:
self._callback(self._value)
del self._cache[self._job]
self._event.set()
else:
self._success = False
self._value = result
if self._error_callback:
self._error_callback(self._value)
del self._cache[self._job]
self._event.set()
#
# Class whose instances are returned by `Pool.imap()`
#
class IMapIterator(object):
def __init__(self, cache):
self._cond = threading.Condition(threading.Lock())
self._job = next(job_counter)
self._cache = cache
self._items = collections.deque()
self._index = 0
self._length = None
self._unsorted = {}
cache[self._job] = self
def __iter__(self):
return self
def next(self, timeout=None):
self._cond.acquire()
try:
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
raise StopIteration
self._cond.wait(timeout)
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
raise StopIteration
raise TimeoutError
finally:
self._cond.release()
success, value = item
if success:
return value
raise value
__next__ = next # XXX
def _set(self, i, obj):
self._cond.acquire()
try:
if self._index == i:
self._items.append(obj)
self._index += 1
while self._index in self._unsorted:
obj = self._unsorted.pop(self._index)
self._items.append(obj)
self._index += 1
self._cond.notify()
else:
self._unsorted[i] = obj
if self._index == self._length:
del self._cache[self._job]
finally:
self._cond.release()
def _set_length(self, length):
self._cond.acquire()
try:
self._length = length
if self._index == self._length:
self._cond.notify()
del self._cache[self._job]
finally:
self._cond.release()
#
# Class whose instances are returned by `Pool.imap_unordered()`
#
class IMapUnorderedIterator(IMapIterator):
def _set(self, i, obj):
self._cond.acquire()
try:
self._items.append(obj)
self._index += 1
self._cond.notify()
if self._index == self._length:
del self._cache[self._job]
finally:
self._cond.release()
#
#
#
class ThreadPool(Pool):
from .dummy import Process
def __init__(self, processes=None, initializer=None, initargs=()):
Pool.__init__(self, processes, initializer, initargs)
def _setup_queues(self):
self._inqueue = queue.Queue()
self._outqueue = queue.Queue()
self._quick_put = self._inqueue.put
self._quick_get = self._outqueue.get
@staticmethod
def _help_stuff_finish(inqueue, task_handler, size):
# put sentinels at head of inqueue to make workers finish
inqueue.not_empty.acquire()
try:
inqueue.queue.clear()
inqueue.queue.extend([None] * size)
inqueue.not_empty.notify_all()
finally:
inqueue.not_empty.release()
| agpl-3.0 |
eamuntz/Django-Tut | env/lib/python2.7/site-packages/django/contrib/gis/geos/tests/test_mutable_list.py | 109 | 14746 | # Copyright (c) 2008-2009 Aryeh Leib Taurog, http://www.aryehleib.com
# All rights reserved.
#
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
from django.contrib.gis.geos.mutable_list import ListMixin
from django.utils import six
from django.utils import unittest
class UserListA(ListMixin):
_mytype = tuple
def __init__(self, i_list, *args, **kwargs):
self._list = self._mytype(i_list)
super(UserListA, self).__init__(*args, **kwargs)
def __len__(self): return len(self._list)
def __str__(self): return str(self._list)
def __repr__(self): return repr(self._list)
def _set_list(self, length, items):
# this would work:
# self._list = self._mytype(items)
# but then we wouldn't be testing length parameter
itemList = ['x'] * length
for i, v in enumerate(items):
itemList[i] = v
self._list = self._mytype(itemList)
def _get_single_external(self, index):
return self._list[index]
class UserListB(UserListA):
_mytype = list
def _set_single(self, index, value):
self._list[index] = value
def nextRange(length):
nextRange.start += 100
return range(nextRange.start, nextRange.start + length)
nextRange.start = 0
class ListMixinTest(unittest.TestCase):
"""
Tests base class ListMixin by comparing a list clone which is
a ListMixin subclass with a real Python list.
"""
limit = 3
listType = UserListA
def lists_of_len(self, length=None):
if length is None: length = self.limit
pl = list(range(length))
return pl, self.listType(pl)
def limits_plus(self, b):
return range(-self.limit - b, self.limit + b)
def step_range(self):
return list(range(-1 - self.limit, 0)) + list(range(1, 1 + self.limit))
def test01_getslice(self):
'Slice retrieval'
pl, ul = self.lists_of_len()
for i in self.limits_plus(1):
self.assertEqual(pl[i:], ul[i:], 'slice [%d:]' % (i))
self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i))
for j in self.limits_plus(1):
self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i,j))
for k in self.step_range():
self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i,j,k))
for k in self.step_range():
self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i,k))
self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i,k))
for k in self.step_range():
self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k))
def test02_setslice(self):
'Slice assignment'
def setfcn(x,i,j,k,L): x[i:j:k] = range(L)
pl, ul = self.lists_of_len()
for slen in range(self.limit + 1):
ssl = nextRange(slen)
ul[:] = ssl
pl[:] = ssl
self.assertEqual(pl, ul[:], 'set slice [:]')
for i in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:] = ssl
pl[i:] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:]' % (i))
ssl = nextRange(slen)
ul[:i] = ssl
pl[:i] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d]' % (i))
for j in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:j] = ssl
pl[i:j] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d]' % (i, j))
for k in self.step_range():
ssl = nextRange( len(ul[i:j:k]) )
ul[i:j:k] = ssl
pl[i:j:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d:%d]' % (i, j, k))
sliceLen = len(ul[i:j:k])
self.assertRaises(ValueError, setfcn, ul, i, j, k, sliceLen + 1)
if sliceLen > 2:
self.assertRaises(ValueError, setfcn, ul, i, j, k, sliceLen - 1)
for k in self.step_range():
ssl = nextRange( len(ul[i::k]) )
ul[i::k] = ssl
pl[i::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d::%d]' % (i, k))
ssl = nextRange( len(ul[:i:k]) )
ul[:i:k] = ssl
pl[:i:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d:%d]' % (i, k))
for k in self.step_range():
ssl = nextRange(len(ul[::k]))
ul[::k] = ssl
pl[::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [::%d]' % (k))
def test03_delslice(self):
'Delete slice'
for Len in range(self.limit):
pl, ul = self.lists_of_len(Len)
del pl[:]
del ul[:]
self.assertEqual(pl[:], ul[:], 'del slice [:]')
for i in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:]
del ul[i:]
self.assertEqual(pl[:], ul[:], 'del slice [%d:]' % (i))
pl, ul = self.lists_of_len(Len)
del pl[:i]
del ul[:i]
self.assertEqual(pl[:], ul[:], 'del slice [:%d]' % (i))
for j in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:j]
del ul[i:j]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i,j))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[i:j:k]
del ul[i:j:k]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i,j,k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[:i:k]
del ul[:i:k]
self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i,k))
pl, ul = self.lists_of_len(Len)
del pl[i::k]
del ul[i::k]
self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i,k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[::k]
del ul[::k]
self.assertEqual(pl[:], ul[:], 'del slice [::%d]' % (k))
def test04_get_set_del_single(self):
'Get/set/delete single item'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
self.assertEqual(pl[i], ul[i], 'get single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
pl[i] = 100
ul[i] = 100
self.assertEqual(pl[:], ul[:], 'set single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
del pl[i]
del ul[i]
self.assertEqual(pl[:], ul[:], 'del single item [%d]' % i)
def test05_out_of_range_exceptions(self):
'Out of range exceptions'
def setfcn(x, i): x[i] = 20
def getfcn(x, i): return x[i]
def delfcn(x, i): del x[i]
pl, ul = self.lists_of_len()
for i in (-1 - self.limit, self.limit):
self.assertRaises(IndexError, setfcn, ul, i) # 'set index %d' % i)
self.assertRaises(IndexError, getfcn, ul, i) # 'get index %d' % i)
self.assertRaises(IndexError, delfcn, ul, i) # 'del index %d' % i)
def test06_list_methods(self):
'List methods'
pl, ul = self.lists_of_len()
pl.append(40)
ul.append(40)
self.assertEqual(pl[:], ul[:], 'append')
pl.extend(range(50,55))
ul.extend(range(50,55))
self.assertEqual(pl[:], ul[:], 'extend')
pl.reverse()
ul.reverse()
self.assertEqual(pl[:], ul[:], 'reverse')
for i in self.limits_plus(1):
pl, ul = self.lists_of_len()
pl.insert(i,50)
ul.insert(i,50)
self.assertEqual(pl[:], ul[:], 'insert at %d' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
self.assertEqual(pl.pop(i), ul.pop(i), 'popped value at %d' % i)
self.assertEqual(pl[:], ul[:], 'after pop at %d' % i)
pl, ul = self.lists_of_len()
self.assertEqual(pl.pop(), ul.pop(i), 'popped value')
self.assertEqual(pl[:], ul[:], 'after pop')
pl, ul = self.lists_of_len()
def popfcn(x, i): x.pop(i)
self.assertRaises(IndexError, popfcn, ul, self.limit)
self.assertRaises(IndexError, popfcn, ul, -1 - self.limit)
pl, ul = self.lists_of_len()
for val in range(self.limit):
self.assertEqual(pl.index(val), ul.index(val), 'index of %d' % val)
for val in self.limits_plus(2):
self.assertEqual(pl.count(val), ul.count(val), 'count %d' % val)
for val in range(self.limit):
pl, ul = self.lists_of_len()
pl.remove(val)
ul.remove(val)
self.assertEqual(pl[:], ul[:], 'after remove val %d' % val)
def indexfcn(x, v): return x.index(v)
def removefcn(x, v): return x.remove(v)
self.assertRaises(ValueError, indexfcn, ul, 40)
self.assertRaises(ValueError, removefcn, ul, 40)
def test07_allowed_types(self):
'Type-restricted list'
pl, ul = self.lists_of_len()
ul._allowed = six.integer_types
ul[1] = 50
ul[:2] = [60, 70, 80]
def setfcn(x, i, v): x[i] = v
self.assertRaises(TypeError, setfcn, ul, 2, 'hello')
self.assertRaises(TypeError, setfcn, ul, slice(0,3,2), ('hello','goodbye'))
def test08_min_length(self):
'Length limits'
pl, ul = self.lists_of_len()
ul._minlength = 1
def delfcn(x,i): del x[:i]
def setfcn(x,i): x[:i] = []
for i in range(self.limit - ul._minlength + 1, self.limit + 1):
self.assertRaises(ValueError, delfcn, ul, i)
self.assertRaises(ValueError, setfcn, ul, i)
del ul[:ul._minlength]
ul._maxlength = 4
for i in range(0, ul._maxlength - len(ul)):
ul.append(i)
self.assertRaises(ValueError, ul.append, 10)
def test09_iterable_check(self):
'Error on assigning non-iterable to slice'
pl, ul = self.lists_of_len(self.limit + 1)
def setfcn(x, i, v): x[i] = v
self.assertRaises(TypeError, setfcn, ul, slice(0,3,2), 2)
def test10_checkindex(self):
'Index check'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
if i < 0:
self.assertEqual(ul._checkindex(i), i + self.limit, '_checkindex(neg index)')
else:
self.assertEqual(ul._checkindex(i), i, '_checkindex(pos index)')
for i in (-self.limit - 1, self.limit):
self.assertRaises(IndexError, ul._checkindex, i)
ul._IndexError = TypeError
self.assertRaises(TypeError, ul._checkindex, -self.limit - 1)
def test_11_sorting(self):
'Sorting'
pl, ul = self.lists_of_len()
pl.insert(0, pl.pop())
ul.insert(0, ul.pop())
pl.sort()
ul.sort()
self.assertEqual(pl[:], ul[:], 'sort')
mid = pl[len(pl) // 2]
pl.sort(key=lambda x: (mid-x)**2)
ul.sort(key=lambda x: (mid-x)**2)
self.assertEqual(pl[:], ul[:], 'sort w/ key')
pl.insert(0, pl.pop())
ul.insert(0, ul.pop())
pl.sort(reverse=True)
ul.sort(reverse=True)
self.assertEqual(pl[:], ul[:], 'sort w/ reverse')
mid = pl[len(pl) // 2]
pl.sort(key=lambda x: (mid-x)**2)
ul.sort(key=lambda x: (mid-x)**2)
self.assertEqual(pl[:], ul[:], 'sort w/ key')
def test_12_arithmetic(self):
'Arithmetic'
pl, ul = self.lists_of_len()
al = list(range(10,14))
self.assertEqual(list(pl + al), list(ul + al), 'add')
self.assertEqual(type(ul), type(ul + al), 'type of add result')
self.assertEqual(list(al + pl), list(al + ul), 'radd')
self.assertEqual(type(al), type(al + ul), 'type of radd result')
objid = id(ul)
pl += al
ul += al
self.assertEqual(pl[:], ul[:], 'in-place add')
self.assertEqual(objid, id(ul), 'in-place add id')
for n in (-1,0,1,3):
pl, ul = self.lists_of_len()
self.assertEqual(list(pl * n), list(ul * n), 'mul by %d' % n)
self.assertEqual(type(ul), type(ul * n), 'type of mul by %d result' % n)
self.assertEqual(list(n * pl), list(n * ul), 'rmul by %d' % n)
self.assertEqual(type(ul), type(n * ul), 'type of rmul by %d result' % n)
objid = id(ul)
pl *= n
ul *= n
self.assertEqual(pl[:], ul[:], 'in-place mul by %d' % n)
self.assertEqual(objid, id(ul), 'in-place mul by %d id' % n)
pl, ul = self.lists_of_len()
self.assertEqual(pl, ul, 'cmp for equal')
self.assertFalse(ul == pl + [2], 'cmp for not equal')
self.assertTrue(pl >= ul, 'cmp for gte self')
self.assertTrue(pl <= ul, 'cmp for lte self')
self.assertTrue(ul >= pl, 'cmp for self gte')
self.assertTrue(ul <= pl, 'cmp for self lte')
self.assertTrue(pl + [5] > ul, 'cmp')
self.assertTrue(pl + [5] >= ul, 'cmp')
self.assertTrue(pl < ul + [2], 'cmp')
self.assertTrue(pl <= ul + [2], 'cmp')
self.assertTrue(ul + [5] > pl, 'cmp')
self.assertTrue(ul + [5] >= pl, 'cmp')
self.assertTrue(ul < pl + [2], 'cmp')
self.assertTrue(ul <= pl + [2], 'cmp')
# Also works with a custom IndexError
ul_longer = ul + [2]
ul_longer._IndexError = TypeError
ul._IndexError = TypeError
self.assertFalse(ul_longer == pl)
self.assertFalse(ul == ul_longer)
self.assertTrue(ul_longer > ul)
pl[1] = 20
self.assertTrue(pl > ul, 'cmp for gt self')
self.assertTrue(ul < pl, 'cmp for self lt')
pl[1] = -20
self.assertTrue(pl < ul, 'cmp for lt self')
self.assertTrue(pl < ul, 'cmp for lt self')
class ListMixinTestSingle(ListMixinTest):
listType = UserListB
| mit |
heeraj123/oh-mainline | vendor/packages/twisted/twisted/words/test/test_jabbererror.py | 17 | 11918 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.words.protocols.jabber.error}.
"""
from twisted.trial import unittest
from twisted.words.protocols.jabber import error
from twisted.words.xish import domish
NS_XML = 'http://www.w3.org/XML/1998/namespace'
NS_STREAMS = 'http://etherx.jabber.org/streams'
NS_XMPP_STREAMS = 'urn:ietf:params:xml:ns:xmpp-streams'
NS_XMPP_STANZAS = 'urn:ietf:params:xml:ns:xmpp-stanzas'
class BaseErrorTest(unittest.TestCase):
def test_getElementPlain(self):
"""
Test getting an element for a plain error.
"""
e = error.BaseError('feature-not-implemented')
element = e.getElement()
self.assertIdentical(element.uri, None)
self.assertEquals(len(element.children), 1)
def test_getElementText(self):
"""
Test getting an element for an error with a text.
"""
e = error.BaseError('feature-not-implemented', 'text')
element = e.getElement()
self.assertEquals(len(element.children), 2)
self.assertEquals(unicode(element.text), 'text')
self.assertEquals(element.text.getAttribute((NS_XML, 'lang')), None)
def test_getElementTextLang(self):
"""
Test getting an element for an error with a text and language.
"""
e = error.BaseError('feature-not-implemented', 'text', 'en_US')
element = e.getElement()
self.assertEquals(len(element.children), 2)
self.assertEquals(unicode(element.text), 'text')
self.assertEquals(element.text[(NS_XML, 'lang')], 'en_US')
def test_getElementAppCondition(self):
"""
Test getting an element for an error with an app specific condition.
"""
ac = domish.Element(('testns', 'myerror'))
e = error.BaseError('feature-not-implemented', appCondition=ac)
element = e.getElement()
self.assertEquals(len(element.children), 2)
self.assertEquals(element.myerror, ac)
class StreamErrorTest(unittest.TestCase):
def test_getElementPlain(self):
"""
Test namespace of the element representation of an error.
"""
e = error.StreamError('feature-not-implemented')
element = e.getElement()
self.assertEquals(element.uri, NS_STREAMS)
def test_getElementConditionNamespace(self):
"""
Test that the error condition element has the correct namespace.
"""
e = error.StreamError('feature-not-implemented')
element = e.getElement()
self.assertEquals(NS_XMPP_STREAMS, getattr(element, 'feature-not-implemented').uri)
def test_getElementTextNamespace(self):
"""
Test that the error text element has the correct namespace.
"""
e = error.StreamError('feature-not-implemented', 'text')
element = e.getElement()
self.assertEquals(NS_XMPP_STREAMS, element.text.uri)
class StanzaErrorTest(unittest.TestCase):
"""
Tests for L{error.StreamError}.
"""
def test_typeRemoteServerTimeout(self):
"""
Remote Server Timeout should yield type wait, code 504.
"""
e = error.StanzaError('remote-server-timeout')
self.assertEquals('wait', e.type)
self.assertEquals('504', e.code)
def test_getElementPlain(self):
"""
Test getting an element for a plain stanza error.
"""
e = error.StanzaError('feature-not-implemented')
element = e.getElement()
self.assertEquals(element.uri, None)
self.assertEquals(element['type'], 'cancel')
self.assertEquals(element['code'], '501')
def test_getElementType(self):
"""
Test getting an element for a stanza error with a given type.
"""
e = error.StanzaError('feature-not-implemented', 'auth')
element = e.getElement()
self.assertEquals(element.uri, None)
self.assertEquals(element['type'], 'auth')
self.assertEquals(element['code'], '501')
def test_getElementConditionNamespace(self):
"""
Test that the error condition element has the correct namespace.
"""
e = error.StanzaError('feature-not-implemented')
element = e.getElement()
self.assertEquals(NS_XMPP_STANZAS, getattr(element, 'feature-not-implemented').uri)
def test_getElementTextNamespace(self):
"""
Test that the error text element has the correct namespace.
"""
e = error.StanzaError('feature-not-implemented', text='text')
element = e.getElement()
self.assertEquals(NS_XMPP_STANZAS, element.text.uri)
def test_toResponse(self):
"""
Test an error response is generated from a stanza.
The addressing on the (new) response stanza should be reversed, an
error child (with proper properties) added and the type set to
C{'error'}.
"""
stanza = domish.Element(('jabber:client', 'message'))
stanza['type'] = 'chat'
stanza['to'] = 'user1@example.com'
stanza['from'] = 'user2@example.com/resource'
e = error.StanzaError('service-unavailable')
response = e.toResponse(stanza)
self.assertNotIdentical(response, stanza)
self.assertEqual(response['from'], 'user1@example.com')
self.assertEqual(response['to'], 'user2@example.com/resource')
self.assertEqual(response['type'], 'error')
self.assertEqual(response.error.children[0].name,
'service-unavailable')
self.assertEqual(response.error['type'], 'cancel')
self.assertNotEqual(stanza.children, response.children)
class ParseErrorTest(unittest.TestCase):
"""
Tests for L{error._parseError}.
"""
def setUp(self):
self.error = domish.Element((None, 'error'))
def test_empty(self):
"""
Test parsing of the empty error element.
"""
result = error._parseError(self.error, 'errorns')
self.assertEqual({'condition': None,
'text': None,
'textLang': None,
'appCondition': None}, result)
def test_condition(self):
"""
Test parsing of an error element with a condition.
"""
self.error.addElement(('errorns', 'bad-request'))
result = error._parseError(self.error, 'errorns')
self.assertEqual('bad-request', result['condition'])
def test_text(self):
"""
Test parsing of an error element with a text.
"""
text = self.error.addElement(('errorns', 'text'))
text.addContent('test')
result = error._parseError(self.error, 'errorns')
self.assertEqual('test', result['text'])
self.assertEqual(None, result['textLang'])
def test_textLang(self):
"""
Test parsing of an error element with a text with a defined language.
"""
text = self.error.addElement(('errorns', 'text'))
text[NS_XML, 'lang'] = 'en_US'
text.addContent('test')
result = error._parseError(self.error, 'errorns')
self.assertEqual('en_US', result['textLang'])
def test_textLangInherited(self):
"""
Test parsing of an error element with a text with inherited language.
"""
text = self.error.addElement(('errorns', 'text'))
self.error[NS_XML, 'lang'] = 'en_US'
text.addContent('test')
result = error._parseError(self.error, 'errorns')
self.assertEqual('en_US', result['textLang'])
test_textLangInherited.todo = "xml:lang inheritance not implemented"
def test_appCondition(self):
"""
Test parsing of an error element with an app specific condition.
"""
condition = self.error.addElement(('testns', 'condition'))
result = error._parseError(self.error, 'errorns')
self.assertEqual(condition, result['appCondition'])
def test_appConditionMultiple(self):
"""
Test parsing of an error element with multiple app specific conditions.
"""
self.error.addElement(('testns', 'condition'))
condition = self.error.addElement(('testns', 'condition2'))
result = error._parseError(self.error, 'errorns')
self.assertEquals(condition, result['appCondition'])
class ExceptionFromStanzaTest(unittest.TestCase):
def test_basic(self):
"""
Test basic operations of exceptionFromStanza.
Given a realistic stanza, check if a sane exception is returned.
Using this stanza::
<iq type='error'
from='pubsub.shakespeare.lit'
to='francisco@denmark.lit/barracks'
id='subscriptions1'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscriptions/>
</pubsub>
<error type='cancel'>
<feature-not-implemented
xmlns='urn:ietf:params:xml:ns:xmpp-stanzas'/>
<unsupported xmlns='http://jabber.org/protocol/pubsub#errors'
feature='retrieve-subscriptions'/>
</error>
</iq>
"""
stanza = domish.Element((None, 'stanza'))
p = stanza.addElement(('http://jabber.org/protocol/pubsub', 'pubsub'))
p.addElement('subscriptions')
e = stanza.addElement('error')
e['type'] = 'cancel'
e.addElement((NS_XMPP_STANZAS, 'feature-not-implemented'))
uc = e.addElement(('http://jabber.org/protocol/pubsub#errors',
'unsupported'))
uc['feature'] = 'retrieve-subscriptions'
result = error.exceptionFromStanza(stanza)
self.assert_(isinstance(result, error.StanzaError))
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('cancel', result.type)
self.assertEquals(uc, result.appCondition)
self.assertEquals([p], result.children)
def test_legacy(self):
"""
Test legacy operations of exceptionFromStanza.
Given a realistic stanza with only legacy (pre-XMPP) error information,
check if a sane exception is returned.
Using this stanza::
<message type='error'
to='piers@pipetree.com/Home'
from='qmacro@jaber.org'>
<body>Are you there?</body>
<error code='502'>Unable to resolve hostname.</error>
</message>
"""
stanza = domish.Element((None, 'stanza'))
p = stanza.addElement('body', content='Are you there?')
e = stanza.addElement('error', content='Unable to resolve hostname.')
e['code'] = '502'
result = error.exceptionFromStanza(stanza)
self.assert_(isinstance(result, error.StanzaError))
self.assertEquals('service-unavailable', result.condition)
self.assertEquals('wait', result.type)
self.assertEquals('Unable to resolve hostname.', result.text)
self.assertEquals([p], result.children)
class ExceptionFromStreamErrorTest(unittest.TestCase):
def test_basic(self):
"""
Test basic operations of exceptionFromStreamError.
Given a realistic stream error, check if a sane exception is returned.
Using this error::
<stream:error xmlns:stream='http://etherx.jabber.org/streams'>
<xml-not-well-formed xmlns='urn:ietf:params:xml:ns:xmpp-streams'/>
</stream:error>
"""
e = domish.Element(('http://etherx.jabber.org/streams', 'error'))
e.addElement((NS_XMPP_STREAMS, 'xml-not-well-formed'))
result = error.exceptionFromStreamError(e)
self.assert_(isinstance(result, error.StreamError))
self.assertEquals('xml-not-well-formed', result.condition)
| agpl-3.0 |
v-legoff/pa-poc3 | src/router/__init__.py | 2 | 1612 | # Copyright (c) 2012 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package containing the router for Python Aboard with Cherrypy."""
| bsd-3-clause |
bchareyre/ratchet | scripts/default-test.py | 6 | 3838 | #!/usr/local/bin/yade-trunk
# coding=UTF-8
# this must be run inside yade
#
# pass 'mail:sender@somewhere.org,recipient@elsewhere.com' as an argument so that the crash report is e-mailed
# using the default SMTP settings (sendmail?) on your system
#
import os,time,sys
from yade import *
import yade.runtime,yade.system,yade.config
simulFile='/tmp/yade-test-%d.xml'%(os.getpid()) # generated simulations here
pyCmdFile='/tmp/yade-test-%d.py'%(os.getpid()) # generated script here
msgFile='/tmp/yade-test-%d.msg'%(os.getpid()) # write message here
runSimul="""
# generated file
from yade import *
simulFile='%s'; msgFile='%s'; nIter=%d;
import time
try:
O.load(simulFile)
O.run(10); O.wait() # run first 10 iterations
start=time.time(); O.run(nIter); O.wait(); finish=time.time() # run nIter iterations, wait to finish, measure elapsed time
speed=nIter/(finish-start); open(msgFile,'w').write('%%g iter/sec'%%speed)
except:
import sys, traceback
traceback.print_exc()
sys.exit(1)
print 'main: Yade: normal exit.'
O.exitNoBacktrace()
quit()
"""%(simulFile,msgFile,100)
runGenerator="""
#generated file
from yade import *
%%s(%%s).generate('%s')
print 'main: Yade: normal exit.'
O.exitNoBacktrace()
quit()
"""%(simulFile)
def crashProofRun(cmd,quiet=True):
import subprocess,os,os.path,yade.runtime
f=open(pyCmdFile,'w'); f.write(cmd); f.close();
if os.path.exists(msgFile): os.remove(msgFile)
p=subprocess.Popen([sys.executable,pyCmdFile],stdout=subprocess.PIPE,stderr=subprocess.STDOUT,env=dict(os.environ,**{'PYTHONPATH':os.path.join(yade.config.prefix,'lib','yade'+yade.config.suffix,'py'),'DISPLAY':''}))
pout=p.communicate()[0]
retval=p.wait()
if not quiet: print pout
msg=''
if os.path.exists(msgFile): msg=open(msgFile,'r').readlines()[0]
if retval==0: return True,msg,pout
else:
# handle crash at exit :-(
if 'main: Yade: normal exit.' in pout: return True,msg,pout
return False,msg,pout
reports=[]
summary=[]
#broken=['SDECLinkedSpheres','SDECMovingWall','SDECSpheresPlane','ThreePointBending']
broken=[]
genParams={
#'USCTGen':{'spheresFile':'examples/small.sdec.xyz'}
}
for pp in yade.system.childClasses('FileGenerator'):
if pp in broken:
summary.append(pp,'skipped (broken)','');
params='' if pp not in genParams else (","+",".join(["%s=%s"%(k,repr(genParams[pp][k])) for k in genParams[pp]]))
ok1,msg1,out1=crashProofRun(runGenerator%(pp,params))
if not ok1:
reports.append([pp,'generator CRASH',out1]); summary.append([pp,'generator CRASH'])
else:
ok2,msg2,out2=crashProofRun(runSimul)
if not ok2: reports.append([pp,'simulation CRASH',out2]); summary.append([pp,'simulation CRASH'])
else: summary.append([pp,'passed (%s)'%msg2])
print summary[-1][0]+':',summary[-1][1]
# delete temporaries
for f in simulFile,msgFile,pyCmdFile:
if os.path.exists(f): os.remove(f)
# handle crash reports, if any
if reports:
mailFrom,mailTo=None,None
for a in yade.runtime.argv:
if 'mail:' in a: mailFrom,mailTo=a.replace('mail:','').split(',')
reportText='\n'.join([80*'#'+'\n'+r[0]+': '+r[1]+'\n'+80*'#'+'\n'+r[2] for r in reports])
if mailTo and mailFrom:
from email.mime.text import MIMEText
import yade.config
msg=MIMEText(reportText)
msg['Subject']="Automated crash report for Yade "+yade.config.revision+": "+",".join([r[0] for r in reports])
msg['From']=mailFrom
msg['To']=mailTo
msg['Reply-To']='yade-dev@lists.launchpad.net'
import smtplib
s=smtplib.SMTP()
s.connect()
s.sendmail(mailFrom,[mailTo],msg.as_string())
s.close()
print "Sent crash report to ",mailTo
else:
print "\n\n=================================== PROBLEM DETAILS ===================================\n"
print reportText
print "\n\n========================================= SUMMARY ======================================\n"
for l in summary: print "%30s: %s"%(l[0],l[1])
quit()
| gpl-2.0 |
jmartinm/inspire-next | inspire/modules/audit/receivers.py | 3 | 1131 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Signal receivers for audit."""
from inspire.modules.audit.models import Audit
from inspire.modules.audit.signals import audit_action_taken
def add_audit_entry(sender, logging_info, **kwargs):
"""Given a dict of values, creates a new Audit instance."""
audit = Audit(**logging_info)
audit.save()
audit_action_taken.connect(add_audit_entry)
| gpl-2.0 |
fitermay/intellij-community | python/testData/inspections/PyCompatibilityInspection/argumentsUnpackingGeneralizations.py | 33 | 1128 | def foo(*args, **kwargs):
print(args, kwargs)
foo(0,
*[1],
<warning descr="Python versions < 3.5 do not allow positional arguments after *expression">2</warning>,
<warning descr="Python versions < 3.5 do not allow duplicate *expressions">*[3]</warning>,
<warning descr="Python versions < 3.5 do not allow positional arguments after *expression">4</warning>,
<warning descr="Python versions < 2.6 do not allow keyword arguments after *expression">a='a'</warning>,
<warning descr="Python versions < 3.5 do not allow duplicate *expressions">*[6]</warning>,
<warning descr="Python versions < 2.6 do not allow keyword arguments after *expression">b='b'</warning>,
<warning descr="Python versions < 3.5 do not allow duplicate *expressions">*[7]</warning>,
<warning descr="Python versions < 2.6 do not allow keyword arguments after *expression">c='c'</warning>,
**{'d': 'd'},
<warning descr="Python versions < 2.6 do not allow keyword arguments after *expression">e='e'</warning>,
<warning descr="Python versions < 3.5 do not allow duplicate **expressions">**{'f': 'f'}</warning>)
| apache-2.0 |
Immortalin/python-for-android | python-build/python-libs/gdata/samples/oauth/oauth_on_appengine/appengine_utilities/flash.py | 130 | 4641 | """
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import sys
import Cookie
import pickle
from time import strftime
from django.utils import simplejson
COOKIE_NAME = 'appengine-utilities-flash'
class Flash(object):
"""
Send messages to the user between pages.
When you instantiate the class, the attribute 'msg' will be set from the
cookie, and the cookie will be deleted. If there is no flash cookie, 'msg'
will default to None.
To set a flash message for the next page, simply set the 'msg' attribute.
Example psuedocode:
if new_entity.put():
flash = Flash()
flash.msg = 'Your new entity has been created!'
return redirect_to_entity_list()
Then in the template on the next page:
{% if flash.msg %}
<div class="flash-msg">{{ flash.msg }}</div>
{% endif %}
"""
def __init__(self, cookie=None):
"""
Load the flash message and clear the cookie.
"""
self.no_cache_headers()
# load cookie
if cookie is None:
browser_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(browser_cookie)
else:
self.cookie = cookie
# check for flash data
if self.cookie.get(COOKIE_NAME):
# set 'msg' attribute
cookie_val = self.cookie[COOKIE_NAME].value
# we don't want to trigger __setattr__(), which creates a cookie
try:
self.__dict__['msg'] = simplejson.loads(cookie_val)
except:
# not able to load the json, so do not set message. This should
# catch for when the browser doesn't delete the cookie in time for
# the next request, and only blanks out the content.
pass
# clear the cookie
self.cookie[COOKIE_NAME] = ''
self.cookie[COOKIE_NAME]['path'] = '/'
self.cookie[COOKIE_NAME]['expires'] = 0
print self.cookie[COOKIE_NAME]
else:
# default 'msg' attribute to None
self.__dict__['msg'] = None
def __setattr__(self, name, value):
"""
Create a cookie when setting the 'msg' attribute.
"""
if name == 'cookie':
self.__dict__['cookie'] = value
elif name == 'msg':
self.__dict__['msg'] = value
self.__dict__['cookie'][COOKIE_NAME] = simplejson.dumps(value)
self.__dict__['cookie'][COOKIE_NAME]['path'] = '/'
print self.cookie
else:
raise ValueError('You can only set the "msg" attribute.')
def no_cache_headers(self):
"""
Adds headers, avoiding any page caching in the browser. Useful for highly
dynamic sites.
"""
print "Expires: Tue, 03 Jul 2001 06:00:00 GMT"
print strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z")
print "Cache-Control: no-store, no-cache, must-revalidate, max-age=0"
print "Cache-Control: post-check=0, pre-check=0"
print "Pragma: no-cache"
| apache-2.0 |
SOKP/external_chromium_org | tools/site_compare/operators/equals_with_mask.py | 189 | 1589 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Compare two images for equality, subject to a mask."""
from PIL import Image
from PIL import ImageChops
import os.path
def Compare(file1, file2, **kwargs):
"""Compares two images to see if they're identical subject to a mask.
An optional directory containing masks is supplied. If a mask exists
which matches file1's name, areas under the mask where it's black
are ignored.
Args:
file1: path to first image to compare
file2: path to second image to compare
kwargs: ["maskdir"] contains the directory holding the masks
Returns:
None if the images are identical
A tuple of (errorstring, image) if they're not
"""
maskdir = None
if "maskdir" in kwargs:
maskdir = kwargs["maskdir"]
im1 = Image.open(file1)
im2 = Image.open(file2)
if im1.size != im2.size:
return ("The images are of different size (%r vs %r)" %
(im1.size, im2.size), im1)
diff = ImageChops.difference(im1, im2)
if maskdir:
maskfile = os.path.join(maskdir, os.path.basename(file1))
if os.path.exists(maskfile):
mask = Image.open(maskfile)
if mask.size != im1.size:
return ("The mask is of a different size than the images (%r vs %r)" %
(mask.size, im1.size), mask)
diff = ImageChops.multiply(diff, mask.convert(diff.mode))
if max(diff.getextrema()) != (0, 0):
return ("The images differ", diff)
else:
return None
| bsd-3-clause |
golismero/golismero-devel | plugins/testing/recon/punkspider.py | 2 | 5681 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__license__ = """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from golismero.api.data.resource.domain import Domain
from golismero.api.data.resource.url import URL
from golismero.api.data.vulnerability.injection.sql import SQLInjection
from golismero.api.data.vulnerability.injection.xss import XSS
from golismero.api.logger import Logger
from golismero.api.plugin import TestingPlugin
from golismero.api.text.text_utils import to_utf8
from golismero.api.net.web_utils import parse_url
import requests
import traceback
#------------------------------------------------------------------------------
class PunkSPIDER(TestingPlugin):
"""
This plugin tries to perform passive reconnaissance on a target using
the PunkSPIDER vulnerability lookup engine.
"""
#--------------------------------------------------------------------------
def get_accepted_types(self):
return [Domain]
#--------------------------------------------------------------------------
def run(self, info):
# Query PunkSPIDER.
host_id = info.hostname
host_id = parse_url(host_id).hostname
host_id = ".".join(reversed(host_id.split(".")))
d = self.query_punkspider(host_id)
# Stop if we have no results.
if not d:
Logger.log("No results found for host: %s" % info.hostname)
return
# This is where we'll collect the data we'll return.
results = []
# For each vulnerability...
for v in d["data"]:
try:
# Future-proof checks.
if v["protocol"] not in ("http", "https"):
Logger.log_more_verbose(
"Skipped non-web vulnerability: %s"
% to_utf8(v["id"]))
continue
if v["bugType"] not in ("xss", "sqli", "bsqli"):
Logger.log_more_verbose(
"Skipped unknown vulnerability type: %s"
% to_utf8(v["bugType"]))
continue
# Get the vulnerable URL, parameter and payload.
url = to_utf8(v["vulnerabilityUrl"])
param = to_utf8(v["parameter"])
parsed = parse_url(url)
payload = parsed.query_params[param]
# Get the level.
level = to_utf8(v["level"])
# Create the URL object.
url_o = URL(url)
results.append(url_o)
# Get the vulnerability class.
if v["bugType"] == "xss":
clazz = XSS
else:
clazz = SQLInjection
# Create the Vulnerability object.
vuln = clazz(
url_o,
vulnerable_params = { param: payload },
injection_point = clazz.INJECTION_POINT_URL,
injection_type = to_utf8(v["bugType"]), # FIXME
level = level,
tool_id = to_utf8(v["id"]),
)
results.append(vuln)
# Log errors.
except Exception, e:
tb = traceback.format_exc()
Logger.log_error_verbose(str(e))
Logger.log_error_more_verbose(tb)
# Log how many vulnerabilities we found.
count = int(len(results) / 2)
if count == 0:
Logger.log("No vulnerabilities found for host: " + info.hostname)
elif count == 1:
Logger.log("Found one vulnerability for host: " + info.hostname)
else:
Logger.log("Found %d vulnerabilities for host: %s"
% (count, info.hostname))
# Return the results.
return results
#--------------------------------------------------------------------------
# The PunkSPIDER API.
URL = (
"http://punkspider.hyperiongray.com/service/search/detail/%s"
)
HEADERS = {
"Accept": "*/*",
"Referer": "http://punkspider.hyperiongray.com/",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64)"
" AppleWebKit/537.36 (KHTML, like Gecko)"
" Chrome/31.0.1650.63 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
def query_punkspider(self, host_id):
try:
r = requests.get(self.URL % host_id,
headers = self.HEADERS)
assert r.headers["Content-Type"].startswith("application/json"),\
"Response from server is not a JSON encoded object"
return r.json()
except requests.RequestException, e:
Logger.log_error(
"Query to PunkSPIDER failed, reason: %s" % str(e))
| gpl-2.0 |
j5shi/Thruster | pylibs/idlelib/IdleHistory.py | 1 | 4239 | "Implement Idle Shell history mechanism with History class"
from idlelib.configHandler import idleConf
class History:
''' Implement Idle Shell history mechanism.
store - Store source statement (called from PyShell.resetoutput).
fetch - Fetch stored statement matching prefix already entered.
history_next - Bound to <<history-next>> event (default Alt-N).
history_prev - Bound to <<history-prev>> event (default Alt-P).
'''
def __init__(self, text):
'''Initialize data attributes and bind event methods.
.text - Idle wrapper of tk Text widget, with .bell().
.history - source statements, possibly with multiple lines.
.prefix - source already entered at prompt; filters history list.
.pointer - index into history.
.cyclic - wrap around history list (or not).
'''
self.text = text
self.history = []
self.prefix = None
self.pointer = None
self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool")
text.bind("<<history-previous>>", self.history_prev)
text.bind("<<history-next>>", self.history_next)
def history_next(self, event):
"Fetch later statement; start with ealiest if cyclic."
self.fetch(reverse=False)
return "break"
def history_prev(self, event):
"Fetch earlier statement; start with most recent."
self.fetch(reverse=True)
return "break"
def fetch(self, reverse):
'''Fetch statememt and replace current line in text widget.
Set prefix and pointer as needed for successive fetches.
Reset them to None, None when returning to the start line.
Sound bell when return to start line or cannot leave a line
because cyclic is False.
'''
nhist = len(self.history)
pointer = self.pointer
prefix = self.prefix
if pointer is not None and prefix is not None:
if self.text.compare("insert", "!=", "end-1c") or \
self.text.get("iomark", "end-1c") != self.history[pointer]:
pointer = prefix = None
self.text.mark_set("insert", "end-1c") # != after cursor move
if pointer is None or prefix is None:
prefix = self.text.get("iomark", "end-1c")
if reverse:
pointer = nhist # will be decremented
else:
if self.cyclic:
pointer = -1 # will be incremented
else: # abort history_next
self.text.bell()
return
nprefix = len(prefix)
while 1:
pointer += -1 if reverse else 1
if pointer < 0 or pointer >= nhist:
self.text.bell()
if not self.cyclic and pointer < 0: # abort history_prev
return
else:
if self.text.get("iomark", "end-1c") != prefix:
self.text.delete("iomark", "end-1c")
self.text.insert("iomark", prefix)
pointer = prefix = None
break
item = self.history[pointer]
if item[:nprefix] == prefix and len(item) > nprefix:
self.text.delete("iomark", "end-1c")
self.text.insert("iomark", item)
break
self.text.see("insert")
self.text.tag_remove("sel", "1.0", "end")
self.pointer = pointer
self.prefix = prefix
def store(self, source):
"Store Shell input statement into history list."
source = source.strip()
if len(source) > 2:
# avoid duplicates
try:
self.history.remove(source)
except ValueError:
pass
self.history.append(source)
self.pointer = None
self.prefix = None
if __name__ == "__main__":
from test import test_support as support
support.use_resources = ['gui']
from unittest import main
main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False)
| gpl-2.0 |
Youwotma/splash | splash/kernel/kernel.py | 1 | 9476 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import six
import sys
import lupa
from ipykernel.kernelapp import IPKernelApp
from ipykernel.eventloops import loop_qt5
from jupyter_client.kernelspec import install_kernel_spec
from twisted.internet import defer
import splash
from splash.lua import get_version, get_main_sandboxed, get_main
from splash.browser_tab import BrowserTab
from splash.lua_runtime import SplashLuaRuntime
from splash.qtrender_lua import (
Splash, MainCoroutineRunner, StoredExceptions, Extras
)
from splash.qtutils import init_qt_app
from splash.render_options import RenderOptions
from splash import defaults
from splash.kernel.kernelbase import Kernel
from splash.utils import BinaryCapsule
from splash.kernel.completer import Completer
from splash.kernel.inspections import Inspector
from splash.kernel.errors import error_repr
import splash.server as server
def install(user=True):
""" Install IPython kernel specification """
name = 'splash-py2' if six.PY2 else 'splash-py3'
folder = os.path.join(os.path.dirname(__file__), 'kernels', name)
install_kernel_spec(folder, kernel_name="splash", user=user, replace=True)
def init_browser(network_manager_factory):
# TODO: support the same command-line options as HTTP server.
# from splash.server import start_logging
# class opts(object):
# logfile = "./kernel.log"
# start_logging(opts)
proxy_factory = None # TODO
data = {}
data['uid'] = id(data)
tab = BrowserTab(
network_manager=network_manager_factory(),
splash_proxy_factory=proxy_factory,
verbosity=2, # TODO
render_options=RenderOptions(data, defaults.MAX_TIMEOUT), # TODO: timeout
visible=True,
)
return tab
class DeferredSplashRunner(object):
def __init__(self, lua, splash, sandboxed, log=None, render_options=None):
self.lua = lua
self.splash = splash
self.sandboxed = sandboxed
if log is None:
self.log = self.splash.tab.logger.log
else:
self.log = log
self.runner = MainCoroutineRunner(
lua=self.lua,
log=self.log,
splash=splash,
sandboxed=self.sandboxed,
)
def run(self, main_coro):
"""
Run main_coro Lua coroutine, passing it a Splash
instance as an argument. Return a Deferred.
"""
d = defer.Deferred()
def return_result(result):
d.callback(result)
def return_error(err):
d.errback(err)
self.runner.start(
main_coro=main_coro,
return_result=return_result,
return_error=return_error,
)
return d
class SplashKernel(Kernel):
implementation = 'Splash'
implementation_version = splash.__version__
language = 'Lua'
language_version = get_version()
language_info = {
'name': 'Splash',
'mimetype': 'application/x-lua',
'display_name': 'Splash',
'language': 'lua',
'codemirror_mode': {
"name": "text/x-lua",
},
'file_extension': '.lua',
'pygments_lexer': 'lua',
'version': get_version(),
}
banner = "Splash kernel - write browser automation scripts interactively"
help_links = [
{
'text': "Splash Tutorial",
'url': 'http://splash.readthedocs.org/en/latest/scripting-tutorial.html'
},
{
'text': "Splash Reference",
'url': 'http://splash.readthedocs.org/en/latest/scripting-ref.html'
},
{
'text': "Programming in Lua",
'url': 'http://www.lua.org/pil/contents.html'
},
{
'text': "Lua 5.2 Manual",
'url': 'http://www.lua.org/manual/5.2/'
},
]
sandboxed = False
def __init__(self, **kwargs):
super(SplashKernel, self).__init__(**kwargs)
self.tab = init_browser(SplashKernel.network_manager_factory)
self.lua = SplashLuaRuntime(self.sandboxed, "", ())
self.exceptions = StoredExceptions()
self.splash = Splash(
lua=self.lua,
exceptions=self.exceptions,
tab=self.tab
)
self.lua.add_to_globals("splash", self.splash.get_wrapped())
self.extras = Extras(self.lua, self.exceptions)
self.extras.inject_to_globals()
self.runner = DeferredSplashRunner(self.lua, self.splash, self.sandboxed) #, self.log_msg)
self.completer = Completer(self.lua)
self.inspector = Inspector(self.lua)
#
# try:
# sys.stdout.write = self._print
# sys.stderr.write = self._print
# except:
# pass # Can't change stdout
def send_execute_reply(self, stream, ident, parent, md, reply_content):
def done(result):
reply, result, ct = result
if result:
data = {
'text/plain': result if isinstance(result, six.text_type) else str(result),
}
if isinstance(result, BinaryCapsule):
if result.content_type in {'image/png', 'image/jpeg'}:
data[result.content_type] = result.as_b64()
self._publish_execute_result(parent, data, {}, self.execution_count)
super(SplashKernel, self).send_execute_reply(stream, ident, parent, md, reply)
assert isinstance(reply_content, defer.Deferred)
reply_content.addCallback(done)
def do_execute(self, code, silent, store_history=True, user_expressions=None,
allow_stdin=False):
def success(res):
result, content_type, headers, status_code = res
reply = {
'status': 'ok',
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
return reply, result, content_type or 'text/plain'
def error(failure):
text = "<unknown error>"
try:
failure.raiseException()
except Exception as e:
text = error_repr(e)
reply = {
'status': 'error',
'execution_count': self.execution_count,
'ename': '',
'evalue': text,
'traceback': []
}
return reply, text, 'text/plain'
try:
try:
# XXX: this ugly formatting is important for exception
# line numbers to be displayed properly!
lua_source = 'local repr = require("repr"); function main(splash) return repr(%s) end' % code
main_coro = self._get_main(lua_source)
except lupa.LuaSyntaxError:
try:
lines = code.splitlines(False)
lua_source = '''local repr = require("repr"); function main(splash) %s
return repr(%s)
end
''' % ("\n".join(lines[:-1]), lines[-1])
main_coro = self._get_main(lua_source)
except lupa.LuaSyntaxError:
lua_source = "function main(splash) %s end" % code
main_coro = self._get_main(lua_source)
except (lupa.LuaSyntaxError, lupa.LuaError) as e:
d = defer.Deferred()
d.addCallbacks(success, error)
d.errback(e)
return d
except Exception:
d = defer.Deferred()
d.addCallbacks(success, error)
d.errback()
return d
d = self.runner.run(main_coro)
d.addCallbacks(success, error)
return d
def do_complete(self, code, cursor_pos):
return self.completer.complete(code, cursor_pos)
def do_inspect(self, code, cursor_pos, detail_level=0):
return self.inspector.help(code, cursor_pos, detail_level)
def _publish_execute_result(self, parent, data, metadata, execution_count):
msg = {
u'data': data,
u'metadata': metadata,
u'execution_count': execution_count
}
self.session.send(self.iopub_socket, u'execute_result', msg,
parent=parent, ident=self._topic('execute_result')
)
def log_msg(self, text, min_level=2):
self._print(text + "\n")
def _print(self, message):
stream_content = {'name': 'stdout', 'text': message, 'metadata': dict()}
self.log.debug('Write: %s' % message)
self.send_response(self.iopub_socket, 'stream', stream_content)
def _get_main(self, lua_source):
if self.sandboxed:
main, env = get_main_sandboxed(self.lua, lua_source)
else:
main, env = get_main(self.lua, lua_source)
return self.lua.create_coroutine(main)
def server_factory(network_manager_factory, verbosity, **kwargs):
init_qt_app(verbose=verbosity >= 5)
SplashKernel.network_manager_factory = network_manager_factory
kernel = IPKernelApp.instance(kernel_class=SplashKernel)
kernel.initialize()
kernel.kernel.eventloop = loop_qt5
kernel.start()
def start():
splash_args = os.environ.get('SPLASH_ARGS', '').split()
server.main(jupyter=True, argv=splash_args, server_factory=server_factory)
| bsd-3-clause |
thfield/sf-base-election-data | venv/lib/python3.4/site-packages/pip/_vendor/progress/spinner.py | 404 | 1341 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import unicode_literals
from . import Infinite
from .helpers import WriteMixin
class Spinner(WriteMixin, Infinite):
message = ''
phases = ('-', '\\', '|', '/')
hide_cursor = True
def update(self):
i = self.index % len(self.phases)
self.write(self.phases[i])
class PieSpinner(Spinner):
phases = ['◷', '◶', '◵', '◴']
class MoonSpinner(Spinner):
phases = ['◑', '◒', '◐', '◓']
class LineSpinner(Spinner):
phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻']
| bsd-3-clause |
TzarIvan/profitpy | profit/workbench/sessionreplay.py | 18 | 4671 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2007 Troy Melhase
# Distributed under the terms of the GNU General Public License v2
# Author: Troy Melhase <troy@gci.net>
##
# This module defines the SessionReplay dialog class.
#
# SessionReplay dialogs offer the user widgets to control the replay
# of a session. It includes a delivery interval spinner and
# associated slider, and also a button to restart the session replay.
##
from PyQt4.QtCore import QTimer
from PyQt4.QtGui import QDialog, QMessageBox
from profit.lib import logging
from profit.lib import Signals
from profit.workbench.widgets.ui_sessionreplay import Ui_SessionReplayWidget
class SessionReplay(QDialog, Ui_SessionReplayWidget):
""" Dialog for controlling the replay of a session.
After the dialog instance is constructed, clients should call
the 'setSession' to associate the dialog with a session.
Clients should use 'exec_()' to display the dialog, not 'show'.
"""
def __init__(self, interval=50, parent=None):
""" Initializer.
@param interval=50 milliseconds between message delivery
@param parent=None ancestor of this dialog
"""
QDialog.__init__(self, parent)
self.setupUi(self)
self.interval = interval
self.session = None
self.filename = None
self.types = None
self.loader = None
self.importer = None
self.timer = QTimer()
def exec_(self):
""" Dialog main loop.
@return QDialog.DialogCode result
"""
connect = self.connect
setInterval = self.timer.setInterval
connect(self.timerSlider, Signals.intValueChanged, setInterval)
connect(self.timerSpin, Signals.intValueChanged, setInterval)
connect(self.timer, Signals.timeout, self.on_timer_timeout)
self.timer.start(self.interval)
return QDialog.exec_(self)
def on_restartButton_clicked(self):
""" Signal handler for restart button clicked signals.
"""
if self.importer:
self.timer.setInterval(self.timerSpin.value())
self.loader = self.importer()
def on_timer_timeout(self):
""" Signal handler for the delivery timer timeout signal.
If the instance has a session but no loader, it will attempt
to import the session object and initiate the replay.
If a loader is present (possibly added by importSession), the
the next message is requested from the loader.
@return None
"""
if self.session and not self.loader:
try:
self.importSession(self.session, self.filename, self.types)
except (Exception, ), ex:
QMessageBox.critical(
self, 'Import Exception',
'Exception "%s" during import. '
'Import not completed.' % ex)
self.close()
if self.loader:
try:
msgid = self.loader.next()
except (StopIteration, ):
self.timer.setInterval(max(self.timer.interval(), 50))
else:
self.importProgress.setValue(msgid)
if msgid == self.last:
logging.debug(
'Imported %s messages from file "%s".',
self.count, self.filename)
def importSession(self, session, filename, types):
""" Initiates session import.
@param session Session instance
@param filename name of file with serialized messages
@param types sequence of message types to import
@return None
"""
importer = session.importMessages(str(filename), types)
loader = importer()
try:
self.count = loader.next()
self.last = self.count - 1
if not self.count:
raise StopIteration()
except (StopIteration, ):
self.loader = self.count = self.last = None
logging.debug('Warning messages not imported from "%s"', filename)
else:
self.importProgress.setMaximum(self.last)
self.importer = importer
self.loader = loader
def setImportParameters(self, session, filename, types):
""" Sets the parameters for the import (replay) of session messages.
@param session Session instance
@param filename name of file with serialized messages
@param types sequence of message types to import
@return None
"""
self.session = session
self.filename = filename
self.types = types
| gpl-2.0 |
nicksergeant/emilyapp | emilyapp/monitor.py | 9 | 2933 | import os
import sys
import time
import signal
import threading
import atexit
import Queue
_interval = 1.0
_times = {}
_files = []
_running = False
_queue = Queue.Queue()
_lock = threading.Lock()
def _restart(path):
_queue.put(True)
prefix = 'monitor (pid=%d):' % os.getpid()
print >> sys.stderr, '%s Change detected to \'%s\'.' % (prefix, path)
print >> sys.stderr, '%s Triggering process restart.' % prefix
os.kill(os.getpid(), signal.SIGINT)
def _modified(path):
try:
# If path doesn't denote a file and were previously
# tracking it, then it has been removed or the file type
# has changed so force a restart. If not previously
# tracking the file then we can ignore it as probably
# pseudo reference such as when file extracted from a
# collection of modules contained in a zip file.
if not os.path.isfile(path):
return path in _times
# Check for when file last modified.
mtime = os.stat(path).st_mtime
if path not in _times:
_times[path] = mtime
# Force restart when modification time has changed, even
# if time now older, as that could indicate older file
# has been restored.
if mtime != _times[path]:
return True
except:
# If any exception occured, likely that file has been
# been removed just before stat(), so force a restart.
return True
return False
def _monitor():
while 1:
# Check modification times on all files in sys.modules.
for module in sys.modules.values():
if not hasattr(module, '__file__'):
continue
path = getattr(module, '__file__')
if not path:
continue
if os.path.splitext(path)[1] in ['.pyc', '.pyo', '.pyd']:
path = path[:-1]
if _modified(path):
return _restart(path)
# Check modification times on files which have
# specifically been registered for monitoring.
for path in _files:
if _modified(path):
return _restart(path)
# Go to sleep for specified interval.
try:
return _queue.get(timeout=_interval)
except:
pass
_thread = threading.Thread(target=_monitor)
_thread.setDaemon(True)
def _exiting():
try:
_queue.put(True)
except:
pass
_thread.join()
atexit.register(_exiting)
def track(path):
if not path in _files:
_files.append(path)
def start(interval=1.0):
global _interval
if interval < _interval:
_interval = interval
global _running
_lock.acquire()
if not _running:
prefix = 'monitor (pid=%d):' % os.getpid()
print >> sys.stderr, '%s Starting change monitor.' % prefix
_running = True
_thread.start()
_lock.release()
| mit |
web2py/pydal | pydal/adapters/db2.py | 2 | 2000 | from .._compat import integer_types, long
from .base import SQLAdapter
from . import adapters, with_connection_or_raise
class DB2(SQLAdapter):
dbengine = "db2"
def _initialize_(self):
super(DB2, self)._initialize_()
self.ruri = self.uri.split("://", 1)[1]
@with_connection_or_raise
def execute(self, *args, **kwargs):
command = self.filter_sql_command(args[0])
if command[-1:] == ";":
command = command[:-1]
handlers = self._build_handlers_for_execution()
for handler in handlers:
handler.before_execute(command)
if kwargs.get("placeholders"):
args.append(kwargs["placeholders"])
del kwargs["placeholders"]
rv = self.cursor.execute(command, *args[1:], **kwargs)
for handler in handlers:
handler.after_execute(command)
return rv
def lastrowid(self, table):
self.execute(
"SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;" % table._rname
if table._rname
else table
)
return long(self.cursor.fetchone()[0])
def rowslice(self, rows, minimum=0, maximum=None):
if maximum is None:
return rows[minimum:]
return rows[minimum:maximum]
def test_connection(self):
self.execute("select * from sysibm.sysdummy1")
@adapters.register_for("db2:ibm_db_dbi")
class DB2IBM(DB2):
drivers = ("ibm_db_dbi",)
def connector(self):
uriparts = self.ruri.split(";")
cnxn = {}
for var in uriparts:
v = var.split("=")
cnxn[v[0].lower()] = v[1]
return self.driver.connect(
cnxn["dsn"], cnxn["uid"], cnxn["pwd"], **self.driver_args
)
@adapters.register_for("db2:pyodbc")
class DB2Pyodbc(DB2):
drivers = ("pyodbc",)
def connector(self):
conn = self.driver.connect(self.ruri, **self.driver_args)
conn.setencoding(encoding="utf-8")
return conn
| bsd-3-clause |
mwiencek/picard | picard/ui/util.py | 2 | 1556 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2007 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
from PyQt4 import QtGui
class StandardButton(QtGui.QPushButton):
OK = 0
CANCEL = 1
HELP = 2
__types = {
OK: (N_('&Ok'), 'SP_DialogOkButton'),
CANCEL: (N_('&Cancel'), 'SP_DialogCancelButton'),
HELP: (N_('&Help'), 'SP_DialogHelpButton'),
}
def __init__(self, btntype):
label = _(self.__types[btntype][0])
args = [label]
if sys.platform != 'win32' and sys.platform != 'darwin':
iconname = self.__types[btntype][1]
if hasattr(QtGui.QStyle, iconname):
icon = self.tagger.style().standardIcon(getattr(QtGui.QStyle, iconname))
args = [icon, label]
QtGui.QPushButton.__init__(self, *args)
| gpl-2.0 |
shaileshgoogler/pyglet | pyglet/input/directinput.py | 35 | 6896 | #!/usr/bin/python
# $Id:$
import ctypes
import pyglet
from pyglet.input import base
from pyglet.libs import win32
from pyglet.libs.win32 import dinput
from pyglet.libs.win32 import _kernel32
# These instance names are not defined anywhere, obtained by experiment. The
# GUID names (which seem to be ideally what are needed) are wrong/missing for
# most of my devices.
_abs_instance_names = {
0: 'x',
1: 'y',
2: 'z',
3: 'rx',
4: 'ry',
5: 'rz',
}
_rel_instance_names = {
0: 'x',
1: 'y',
2: 'wheel',
}
_btn_instance_names = {}
def _create_control(object_instance):
raw_name = object_instance.tszName
type = object_instance.dwType
instance = dinput.DIDFT_GETINSTANCE(type)
if type & dinput.DIDFT_ABSAXIS:
name = _abs_instance_names.get(instance)
control = base.AbsoluteAxis(name, 0, 0xffff, raw_name)
elif type & dinput.DIDFT_RELAXIS:
name = _rel_instance_names.get(instance)
control = base.RelativeAxis(name, raw_name)
elif type & dinput.DIDFT_BUTTON:
name = _btn_instance_names.get(instance)
control = base.Button(name, raw_name)
elif type & dinput.DIDFT_POV:
control = base.AbsoluteAxis(base.AbsoluteAxis.HAT,
0, 0xffffffff, raw_name)
else:
return
control._type = object_instance.dwType
return control
class DirectInputDevice(base.Device):
def __init__(self, display, device, device_instance):
name = device_instance.tszInstanceName
super(DirectInputDevice, self).__init__(display, name)
self._type = device_instance.dwDevType & 0xff
self._subtype = device_instance.dwDevType & 0xff00
self._device = device
self._init_controls()
self._set_format()
def _init_controls(self):
self.controls = []
self._device.EnumObjects(
dinput.LPDIENUMDEVICEOBJECTSCALLBACK(self._object_enum),
None, dinput.DIDFT_ALL)
def _object_enum(self, object_instance, arg):
control = _create_control(object_instance.contents)
if control:
self.controls.append(control)
return dinput.DIENUM_CONTINUE
def _set_format(self):
if not self.controls:
return
object_formats = (dinput.DIOBJECTDATAFORMAT * len(self.controls))()
offset = 0
for object_format, control in zip(object_formats, self.controls):
object_format.dwOfs = offset
object_format.dwType = control._type
offset += 4
format = dinput.DIDATAFORMAT()
format.dwSize = ctypes.sizeof(format)
format.dwObjSize = ctypes.sizeof(dinput.DIOBJECTDATAFORMAT)
format.dwFlags = 0
format.dwDataSize = offset
format.dwNumObjs = len(object_formats)
format.rgodf = ctypes.cast(ctypes.pointer(object_formats),
dinput.LPDIOBJECTDATAFORMAT)
self._device.SetDataFormat(format)
prop = dinput.DIPROPDWORD()
prop.diph.dwSize = ctypes.sizeof(prop)
prop.diph.dwHeaderSize = ctypes.sizeof(prop.diph)
prop.diph.dwObj = 0
prop.diph.dwHow = dinput.DIPH_DEVICE
prop.dwData = 64 * ctypes.sizeof(dinput.DIDATAFORMAT)
self._device.SetProperty(dinput.DIPROP_BUFFERSIZE,
ctypes.byref(prop.diph))
def open(self, window=None, exclusive=False):
if not self.controls:
return
if window is None:
# Pick any open window, or the shadow window if no windows
# have been created yet.
window = pyglet.gl._shadow_window
for window in pyglet.app.windows:
break
flags = dinput.DISCL_BACKGROUND
if exclusive:
flags |= dinput.DISCL_EXCLUSIVE
else:
flags |= dinput.DISCL_NONEXCLUSIVE
self._wait_object = _kernel32.CreateEventW(None, False, False, None)
self._device.SetEventNotification(self._wait_object)
pyglet.app.platform_event_loop.add_wait_object(self._wait_object,
self._dispatch_events)
self._device.SetCooperativeLevel(window._hwnd, flags)
self._device.Acquire()
def close(self):
if not self.controls:
return
pyglet.app.platform_event_loop.remove_wait_object(self._wait_object)
self._device.Unacquire()
self._device.SetEventNotification(None)
_kernel32.CloseHandle(self._wait_object)
def get_controls(self):
return self.controls
def _dispatch_events(self):
if not self.controls:
return
events = (dinput.DIDEVICEOBJECTDATA * 64)()
n_events = win32.DWORD(len(events))
self._device.GetDeviceData(ctypes.sizeof(dinput.DIDEVICEOBJECTDATA),
ctypes.cast(ctypes.pointer(events),
dinput.LPDIDEVICEOBJECTDATA),
ctypes.byref(n_events),
0)
for event in events[:n_events.value]:
index = event.dwOfs // 4
self.controls[index]._set_value(event.dwData)
_i_dinput = None
def _init_directinput():
global _i_dinput
if _i_dinput:
return
_i_dinput = dinput.IDirectInput8()
module = _kernel32.GetModuleHandleW(None)
dinput.DirectInput8Create(module, dinput.DIRECTINPUT_VERSION,
dinput.IID_IDirectInput8W,
ctypes.byref(_i_dinput), None)
def get_devices(display=None):
_init_directinput()
_devices = []
def _device_enum(device_instance, arg):
device = dinput.IDirectInputDevice8()
_i_dinput.CreateDevice(device_instance.contents.guidInstance,
ctypes.byref(device),
None)
_devices.append(DirectInputDevice(display,
device, device_instance.contents))
return dinput.DIENUM_CONTINUE
_i_dinput.EnumDevices(dinput.DI8DEVCLASS_ALL,
dinput.LPDIENUMDEVICESCALLBACK(_device_enum),
None, dinput.DIEDFL_ATTACHEDONLY)
return _devices
def _create_joystick(device):
if device._type in (dinput.DI8DEVTYPE_JOYSTICK,
dinput.DI8DEVTYPE_GAMEPAD):
return base.Joystick(device)
def get_joysticks(display=None):
return filter(None, [_create_joystick(d) for d in get_devices(display)])
| bsd-3-clause |
nickhdamico/py | lib/unidecode/x07b.py | 252 | 4669 | data = (
'Mang ', # 0x00
'Zhu ', # 0x01
'Utsubo ', # 0x02
'Du ', # 0x03
'Ji ', # 0x04
'Xiao ', # 0x05
'Ba ', # 0x06
'Suan ', # 0x07
'Ji ', # 0x08
'Zhen ', # 0x09
'Zhao ', # 0x0a
'Sun ', # 0x0b
'Ya ', # 0x0c
'Zhui ', # 0x0d
'Yuan ', # 0x0e
'Hu ', # 0x0f
'Gang ', # 0x10
'Xiao ', # 0x11
'Cen ', # 0x12
'Pi ', # 0x13
'Bi ', # 0x14
'Jian ', # 0x15
'Yi ', # 0x16
'Dong ', # 0x17
'Shan ', # 0x18
'Sheng ', # 0x19
'Xia ', # 0x1a
'Di ', # 0x1b
'Zhu ', # 0x1c
'Na ', # 0x1d
'Chi ', # 0x1e
'Gu ', # 0x1f
'Li ', # 0x20
'Qie ', # 0x21
'Min ', # 0x22
'Bao ', # 0x23
'Tiao ', # 0x24
'Si ', # 0x25
'Fu ', # 0x26
'Ce ', # 0x27
'Ben ', # 0x28
'Pei ', # 0x29
'Da ', # 0x2a
'Zi ', # 0x2b
'Di ', # 0x2c
'Ling ', # 0x2d
'Ze ', # 0x2e
'Nu ', # 0x2f
'Fu ', # 0x30
'Gou ', # 0x31
'Fan ', # 0x32
'Jia ', # 0x33
'Ge ', # 0x34
'Fan ', # 0x35
'Shi ', # 0x36
'Mao ', # 0x37
'Po ', # 0x38
'Sey ', # 0x39
'Jian ', # 0x3a
'Qiong ', # 0x3b
'Long ', # 0x3c
'Souke ', # 0x3d
'Bian ', # 0x3e
'Luo ', # 0x3f
'Gui ', # 0x40
'Qu ', # 0x41
'Chi ', # 0x42
'Yin ', # 0x43
'Yao ', # 0x44
'Xian ', # 0x45
'Bi ', # 0x46
'Qiong ', # 0x47
'Gua ', # 0x48
'Deng ', # 0x49
'Jiao ', # 0x4a
'Jin ', # 0x4b
'Quan ', # 0x4c
'Sun ', # 0x4d
'Ru ', # 0x4e
'Fa ', # 0x4f
'Kuang ', # 0x50
'Zhu ', # 0x51
'Tong ', # 0x52
'Ji ', # 0x53
'Da ', # 0x54
'Xing ', # 0x55
'Ce ', # 0x56
'Zhong ', # 0x57
'Kou ', # 0x58
'Lai ', # 0x59
'Bi ', # 0x5a
'Shai ', # 0x5b
'Dang ', # 0x5c
'Zheng ', # 0x5d
'Ce ', # 0x5e
'Fu ', # 0x5f
'Yun ', # 0x60
'Tu ', # 0x61
'Pa ', # 0x62
'Li ', # 0x63
'Lang ', # 0x64
'Ju ', # 0x65
'Guan ', # 0x66
'Jian ', # 0x67
'Han ', # 0x68
'Tong ', # 0x69
'Xia ', # 0x6a
'Zhi ', # 0x6b
'Cheng ', # 0x6c
'Suan ', # 0x6d
'Shi ', # 0x6e
'Zhu ', # 0x6f
'Zuo ', # 0x70
'Xiao ', # 0x71
'Shao ', # 0x72
'Ting ', # 0x73
'Ce ', # 0x74
'Yan ', # 0x75
'Gao ', # 0x76
'Kuai ', # 0x77
'Gan ', # 0x78
'Chou ', # 0x79
'Kago ', # 0x7a
'Gang ', # 0x7b
'Yun ', # 0x7c
'O ', # 0x7d
'Qian ', # 0x7e
'Xiao ', # 0x7f
'Jian ', # 0x80
'Pu ', # 0x81
'Lai ', # 0x82
'Zou ', # 0x83
'Bi ', # 0x84
'Bi ', # 0x85
'Bi ', # 0x86
'Ge ', # 0x87
'Chi ', # 0x88
'Guai ', # 0x89
'Yu ', # 0x8a
'Jian ', # 0x8b
'Zhao ', # 0x8c
'Gu ', # 0x8d
'Chi ', # 0x8e
'Zheng ', # 0x8f
'Jing ', # 0x90
'Sha ', # 0x91
'Zhou ', # 0x92
'Lu ', # 0x93
'Bo ', # 0x94
'Ji ', # 0x95
'Lin ', # 0x96
'Suan ', # 0x97
'Jun ', # 0x98
'Fu ', # 0x99
'Zha ', # 0x9a
'Gu ', # 0x9b
'Kong ', # 0x9c
'Qian ', # 0x9d
'Quan ', # 0x9e
'Jun ', # 0x9f
'Chui ', # 0xa0
'Guan ', # 0xa1
'Yuan ', # 0xa2
'Ce ', # 0xa3
'Ju ', # 0xa4
'Bo ', # 0xa5
'Ze ', # 0xa6
'Qie ', # 0xa7
'Tuo ', # 0xa8
'Luo ', # 0xa9
'Dan ', # 0xaa
'Xiao ', # 0xab
'Ruo ', # 0xac
'Jian ', # 0xad
'Xuan ', # 0xae
'Bian ', # 0xaf
'Sun ', # 0xb0
'Xiang ', # 0xb1
'Xian ', # 0xb2
'Ping ', # 0xb3
'Zhen ', # 0xb4
'Sheng ', # 0xb5
'Hu ', # 0xb6
'Shi ', # 0xb7
'Zhu ', # 0xb8
'Yue ', # 0xb9
'Chun ', # 0xba
'Lu ', # 0xbb
'Wu ', # 0xbc
'Dong ', # 0xbd
'Xiao ', # 0xbe
'Ji ', # 0xbf
'Jie ', # 0xc0
'Huang ', # 0xc1
'Xing ', # 0xc2
'Mei ', # 0xc3
'Fan ', # 0xc4
'Chui ', # 0xc5
'Zhuan ', # 0xc6
'Pian ', # 0xc7
'Feng ', # 0xc8
'Zhu ', # 0xc9
'Hong ', # 0xca
'Qie ', # 0xcb
'Hou ', # 0xcc
'Qiu ', # 0xcd
'Miao ', # 0xce
'Qian ', # 0xcf
'[?] ', # 0xd0
'Kui ', # 0xd1
'Sik ', # 0xd2
'Lou ', # 0xd3
'Yun ', # 0xd4
'He ', # 0xd5
'Tang ', # 0xd6
'Yue ', # 0xd7
'Chou ', # 0xd8
'Gao ', # 0xd9
'Fei ', # 0xda
'Ruo ', # 0xdb
'Zheng ', # 0xdc
'Gou ', # 0xdd
'Nie ', # 0xde
'Qian ', # 0xdf
'Xiao ', # 0xe0
'Cuan ', # 0xe1
'Gong ', # 0xe2
'Pang ', # 0xe3
'Du ', # 0xe4
'Li ', # 0xe5
'Bi ', # 0xe6
'Zhuo ', # 0xe7
'Chu ', # 0xe8
'Shai ', # 0xe9
'Chi ', # 0xea
'Zhu ', # 0xeb
'Qiang ', # 0xec
'Long ', # 0xed
'Lan ', # 0xee
'Jian ', # 0xef
'Bu ', # 0xf0
'Li ', # 0xf1
'Hui ', # 0xf2
'Bi ', # 0xf3
'Di ', # 0xf4
'Cong ', # 0xf5
'Yan ', # 0xf6
'Peng ', # 0xf7
'Sen ', # 0xf8
'Zhuan ', # 0xf9
'Pai ', # 0xfa
'Piao ', # 0xfb
'Dou ', # 0xfc
'Yu ', # 0xfd
'Mie ', # 0xfe
'Zhuan ', # 0xff
)
| gpl-3.0 |
konsP/synnefo | snf-cyclades-app/synnefo/helpdesk/templatetags/helpdesk_tags.py | 10 | 3434 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import template
register = template.Library()
@register.filter(name="vm_public_ip")
def vm_public_ip(vm):
"""
Identify if vm is connected to ``public`` network and return the ipv4
address
"""
try:
return vm.nics.filter(network__public=True)[0].ipv4_address
except IndexError:
return "No public ip"
VM_STATE_CSS_MAP = {
'BUILD': 'warning',
'PENDING': 'warning',
'ERROR': 'important',
'STOPPED': 'notice',
'STARTED': 'success',
'ACTIVE': 'success',
'DESTROYED': 'inverse'
}
@register.filter(name="object_status_badge", is_safe=True)
def object_status_badge(vm_or_net):
"""
Return a span badge styled based on the vm current status
"""
state = vm_or_net.operstate if hasattr(vm_or_net, 'operstate') else \
vm_or_net.state
state_cls = VM_STATE_CSS_MAP.get(state, 'notice')
badge_cls = "badge badge-%s" % state_cls
deleted_badge = ""
if vm_or_net.deleted:
deleted_badge = '<span class="badge badge-important">Deleted</span>'
return '%s\n<span class="%s">%s</span>' % (deleted_badge, badge_cls, state)
@register.filter(name="network_deleted_badge", is_safe=True)
def network_deleted_badge(network):
"""
Return a span badge styled based on the vm current status
"""
deleted_badge = ""
if network.deleted:
deleted_badge = '<span class="badge badge-important">Deleted</span>'
return deleted_badge
@register.filter(name="get_os", is_safe=True)
def get_os(vm):
try:
return vm.metadata.filter(meta_key="OS").get().meta_value
except:
return "unknown"
@register.filter(name="network_vms", is_safe=True)
def network_vms(network, account, show_deleted=False):
vms = []
nics = network.nics.filter(machine__userid=account)
if not show_deleted:
nics = nics.filter(machine__deleted=False).distinct()
for nic in nics:
vms.append(nic.machine)
return vms
@register.filter(name="network_nics")
def network_nics(network, account, show_deleted=False):
vms = []
nics = network.nics.filter(machine__userid=account)
if not show_deleted:
nics = nics.filter(machine__deleted=False).distinct()
return nics
@register.filter(name="backend_info", is_safe=True)
def backend_info(vm):
content = ""
backend = vm.backend
excluded = ['password_hash', 'hash', 'username']
if not vm.backend:
content = "No backend"
return content
for field in vm.backend._meta.fields:
if field.name in excluded:
continue
content += '<dt>Backend ' + field.name + '</dt><dd>' + \
str(getattr(backend, field.name)) + '</dd>'
return content
| gpl-3.0 |
grlee77/nipype | nipype/interfaces/ants/tests/test_auto_GenWarpFields.py | 9 | 1793 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.ants.legacy import GenWarpFields
def test_GenWarpFields_inputs():
input_map = dict(args=dict(argstr='%s',
),
bias_field_correction=dict(argstr='-n 1',
),
dimension=dict(argstr='-d %d',
position=1,
usedefault=True,
),
environ=dict(nohash=True,
usedefault=True,
),
force_proceed=dict(argstr='-f 1',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
input_image=dict(argstr='-i %s',
copyfile=False,
mandatory=True,
),
inverse_warp_template_labels=dict(argstr='-l',
),
max_iterations=dict(argstr='-m %s',
sep='x',
),
num_threads=dict(nohash=True,
usedefault=True,
),
out_prefix=dict(argstr='-o %s',
usedefault=True,
),
quality_check=dict(argstr='-q 1',
),
reference_image=dict(argstr='-r %s',
copyfile=True,
mandatory=True,
),
similarity_metric=dict(argstr='-s %s',
),
terminal_output=dict(nohash=True,
),
transformation_model=dict(argstr='-t %s',
usedefault=True,
),
)
inputs = GenWarpFields.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_GenWarpFields_outputs():
output_map = dict(affine_transformation=dict(),
input_file=dict(),
inverse_warp_field=dict(),
output_file=dict(),
warp_field=dict(),
)
outputs = GenWarpFields.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
MotorolaMobilityLLC/external-chromium_org | third_party/jinja2/exceptions.py | 398 | 4530 | # -*- coding: utf-8 -*-
"""
jinja2.exceptions
~~~~~~~~~~~~~~~~~
Jinja exceptions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
class TemplateError(Exception):
"""Baseclass for all template errors."""
def __init__(self, message=None):
if message is not None:
message = unicode(message).encode('utf-8')
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message.decode('utf-8', 'replace')
class TemplateNotFound(IOError, LookupError, TemplateError):
"""Raised if a template does not exist."""
# looks weird, but removes the warning descriptor that just
# bogusly warns us about message being deprecated
message = None
def __init__(self, name, message=None):
IOError.__init__(self)
if message is None:
message = name
self.message = message
self.name = name
self.templates = [name]
def __str__(self):
return self.message.encode('utf-8')
# unicode goes after __str__ because we configured 2to3 to rename
# __unicode__ to __str__. because the 2to3 tree is not designed to
# remove nodes from it, we leave the above __str__ around and let
# it override at runtime.
def __unicode__(self):
return self.message
class TemplatesNotFound(TemplateNotFound):
"""Like :class:`TemplateNotFound` but raised if multiple templates
are selected. This is a subclass of :class:`TemplateNotFound`
exception, so just catching the base exception will catch both.
.. versionadded:: 2.2
"""
def __init__(self, names=(), message=None):
if message is None:
message = u'non of the templates given were found: ' + \
u', '.join(map(unicode, names))
TemplateNotFound.__init__(self, names and names[-1] or None, message)
self.templates = list(names)
class TemplateSyntaxError(TemplateError):
"""Raised to tell the user that there is a problem with the template."""
def __init__(self, message, lineno, name=None, filename=None):
TemplateError.__init__(self, message)
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
# this is set to True if the debug.translate_syntax_error
# function translated the syntax error into a new traceback
self.translated = False
def __str__(self):
return unicode(self).encode('utf-8')
# unicode goes after __str__ because we configured 2to3 to rename
# __unicode__ to __str__. because the 2to3 tree is not designed to
# remove nodes from it, we leave the above __str__ around and let
# it override at runtime.
def __unicode__(self):
# for translated errors we only return the message
if self.translated:
return self.message
# otherwise attach some stuff
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
# if the source is set, add the line to the output
if self.source is not None:
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
class TemplateAssertionError(TemplateSyntaxError):
"""Like a template syntax error, but covers cases where something in the
template caused an error at compile time that wasn't necessarily caused
by a syntax error. However it's a direct subclass of
:exc:`TemplateSyntaxError` and has the same attributes.
"""
class TemplateRuntimeError(TemplateError):
"""A generic runtime error in the template engine. Under some situations
Jinja may raise this exception.
"""
class UndefinedError(TemplateRuntimeError):
"""Raised if a template tries to operate on :class:`Undefined`."""
class SecurityError(TemplateRuntimeError):
"""Raised if a template tries to do something insecure if the
sandbox is enabled.
"""
class FilterArgumentError(TemplateRuntimeError):
"""This error is raised if a filter was called with inappropriate
arguments
"""
| bsd-3-clause |
cmclaughlin/cabot | setup.py | 1 | 1818 | #!/usr/bin/env python
from setuptools import setup, find_packages
from os import environ as env
# pull in active plugins
plugins = env['CABOT_PLUGINS_ENABLED'].split(',') if 'CABOT_PLUGINS_ENABLED' in env else ["cabot_alert_hipchat", "cabot_alert_twilio", "cabot_alert_email"]
setup(
name='cabot',
version='0.0.1-dev',
description="Self-hosted, easily-deployable monitoring and alerts service"
" - like a lightweight PagerDuty",
long_description=open('README.md').read(),
author="Arachnys",
author_email='info@arachnys.com',
url='http://cabotapp.com',
license='MIT',
install_requires=[
'Django==1.6.8',
'Markdown==2.5',
'PyJWT==0.1.2',
'South==1.0',
'amqp==1.4.9',
'anyjson==0.3.3',
'argparse==1.2.1',
'billiard==3.3.0.23',
'celery==3.1.23',
'distribute==0.7.3',
'dj-database-url==0.2.2',
'django-appconf==0.6',
'django-celery==3.1.1',
'django-celery-with-redis==3.0',
'django-compressor==1.4',
'django-filter==0.7',
'django-jsonify==0.2.1',
'django-polymorphic==0.5.6',
'django-redis==1.4.5',
'django-smtp-ssl==1.0',
'djangorestframework==2.4.2',
'gunicorn==18.0',
'gevent==1.0.1',
'hiredis==0.1.1',
'httplib2==0.7.7',
'icalendar==3.2',
'kombu==3.0.34',
'mock==1.0.1',
'psycogreen==1.0',
'psycopg2==2.5.1',
'pytz==2014.10',
'redis==2.9.0',
'requests==2.9.1',
'six==1.5.1',
'twilio==3.4.1',
'wsgiref==0.1.2',
'python-dateutil==2.1',
'django-auth-ldap==1.2.6',
] + plugins,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| mit |
thnee/ansible | lib/ansible/modules/monitoring/zabbix/zabbix_map.py | 2 | 32525 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017-2018, Antony Alekseyev <antony.alekseyev@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: zabbix_map
author:
- "Antony Alekseyev (@Akint)"
short_description: Create/update/delete Zabbix maps
description:
- "This module allows you to create, modify and delete Zabbix map entries,
using Graphviz binaries and text description written in DOT language.
Nodes of the graph will become map elements and edges will become links between map elements.
See U(https://en.wikipedia.org/wiki/DOT_(graph_description_language)) and U(https://www.graphviz.org/) for details.
Inspired by U(http://blog.zabbix.com/maps-for-the-lazy/)."
- "The following extra node attributes are supported:
C(zbx_host) contains name of the host in Zabbix. Use this if desired type of map element is C(host).
C(zbx_group) contains name of the host group in Zabbix. Use this if desired type of map element is C(host group).
C(zbx_map) contains name of the map in Zabbix. Use this if desired type of map element is C(map).
C(zbx_label) contains label of map element.
C(zbx_image) contains name of the image used to display the element in default state.
C(zbx_image_disabled) contains name of the image used to display disabled map element.
C(zbx_image_maintenance) contains name of the image used to display map element in maintenance.
C(zbx_image_problem) contains name of the image used to display map element with problems.
C(zbx_url) contains map element URL in C(name:url) format.
More than one URL could be specified by adding a postfix (e.g., C(zbx_url1), C(zbx_url2))."
- "The following extra link attributes are supported:
C(zbx_draw_style) contains link line draw style. Possible values: C(line), C(bold), C(dotted), C(dashed).
C(zbx_trigger) contains name of the trigger used as a link indicator in C(host_name:trigger_name) format.
More than one trigger could be specified by adding a postfix (e.g., C(zbx_trigger1), C(zbx_trigger2)).
C(zbx_trigger_color) contains indicator color specified either as CSS3 name or as a hexadecimal code starting with C(#).
C(zbx_trigger_draw_style) contains indicator draw style. Possible values are the same as for C(zbx_draw_style)."
requirements:
- "python >= 2.6"
- "zabbix-api >= 0.5.4"
- pydotplus
- webcolors
- Pillow
- Graphviz
version_added: "2.8"
options:
name:
description:
- Name of the map.
required: true
aliases: [ "map_name" ]
data:
description:
- Graph written in DOT language.
required: false
aliases: [ "dot_data" ]
state:
description:
- State of the map.
- On C(present), it will create if map does not exist or update the map if the associated data is different.
- On C(absent) will remove the map if it exists.
required: false
choices: ['present', 'absent']
default: "present"
width:
description:
- Width of the map.
required: false
default: 800
height:
description:
- Height of the map.
required: false
default: 600
margin:
description:
- Size of white space between map's borders and its elements.
required: false
default: 40
expand_problem:
description:
- Whether the problem trigger will be displayed for elements with a single problem.
required: false
type: bool
default: true
highlight:
description:
- Whether icon highlighting is enabled.
required: false
type: bool
default: true
label_type:
description:
- Map element label type.
required: false
choices: ['label', 'ip', 'name', 'status', 'nothing', 'custom']
default: "name"
default_image:
description:
- Name of the Zabbix image used to display the element if this element doesn't have the C(zbx_image) attribute defined.
required: false
aliases: [ "image" ]
extends_documentation_fragment:
- zabbix
'''
RETURN = ''' # '''
EXAMPLES = '''
###
### Example inventory:
# [web]
# web[01:03].example.com ansible_host=127.0.0.1
# [db]
# db.example.com ansible_host=127.0.0.1
# [backup]
# backup.example.com ansible_host=127.0.0.1
###
### Each inventory host is present in Zabbix with a matching name.
###
### Contents of 'map.j2':
# digraph G {
# graph [layout=dot splines=false overlap=scale]
# INTERNET [zbx_url="Google:https://google.com" zbx_image="Cloud_(96)"]
# {% for web_host in groups.web %}
# {% set web_loop = loop %}
# web{{ '%03d' % web_loop.index }} [zbx_host="{{ web_host }}"]
# INTERNET -> web{{ '%03d' % web_loop.index }} [zbx_trigger="{{ web_host }}:Zabbix agent on {HOST.NAME} is unreachable for 5 minutes"]
# {% for db_host in groups.db %}
# {% set db_loop = loop %}
# web{{ '%03d' % web_loop.index }} -> db{{ '%03d' % db_loop.index }}
# {% endfor %}
# {% endfor %}
# { rank=same
# {% for db_host in groups.db %}
# {% set db_loop = loop %}
# db{{ '%03d' % db_loop.index }} [zbx_host="{{ db_host }}"]
# {% for backup_host in groups.backup %}
# {% set backup_loop = loop %}
# db{{ '%03d' % db_loop.index }} -> backup{{ '%03d' % backup_loop.index }} [color="blue"]
# {% endfor %}
# {% endfor %}
# {% for backup_host in groups.backup %}
# {% set backup_loop = loop %}
# backup{{ '%03d' % backup_loop.index }} [zbx_host="{{ backup_host }}"]
# {% endfor %}
# }
# }
###
### Create Zabbix map "Demo Map" made of template 'map.j2'
- name: Create Zabbix map
zabbix_map:
server_url: http://zabbix.example.com
login_user: username
login_password: password
name: Demo map
state: present
data: "{{ lookup('template', 'map.j2') }}"
default_image: Server_(64)
expand_problem: no
highlight: no
label_type: label
delegate_to: localhost
run_once: yes
'''
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
import atexit
import base64
import traceback
from io import BytesIO
from operator import itemgetter
from distutils.version import StrictVersion
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import pydotplus
HAS_PYDOTPLUS = True
except ImportError:
PYDOT_IMP_ERR = traceback.format_exc()
HAS_PYDOTPLUS = False
try:
import webcolors
HAS_WEBCOLORS = True
except ImportError:
WEBCOLORS_IMP_ERR = traceback.format_exc()
HAS_WEBCOLORS = False
try:
from zabbix_api import ZabbixAPI
HAS_ZABBIX_API = True
except ImportError:
ZBX_IMP_ERR = traceback.format_exc()
HAS_ZABBIX_API = False
try:
from PIL import Image
HAS_PIL = True
except ImportError:
PIL_IMP_ERR = traceback.format_exc()
HAS_PIL = False
class Map():
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
self.map_name = module.params['name']
self.dot_data = module.params['data']
self.width = module.params['width']
self.height = module.params['height']
self.state = module.params['state']
self.default_image = module.params['default_image']
self.map_id = self._get_sysmap_id(self.map_name)
self.margin = module.params['margin']
self.expand_problem = module.params['expand_problem']
self.highlight = module.params['highlight']
self.label_type = module.params['label_type']
self.api_version = self._zapi.api_version()
self.selements_sort_keys = self._get_selements_sort_keys()
def _build_graph(self):
try:
graph_without_positions = pydotplus.graph_from_dot_data(self.dot_data)
dot_data_with_positions = graph_without_positions.create_dot()
graph_with_positions = pydotplus.graph_from_dot_data(dot_data_with_positions)
if graph_with_positions:
return graph_with_positions
except Exception as e:
self._module.fail_json(msg="Failed to build graph from DOT data: %s" % e)
def get_map_config(self):
if not self.dot_data:
self._module.fail_json(msg="'data' is mandatory with state 'present'")
graph = self._build_graph()
nodes = self._get_graph_nodes(graph)
edges = self._get_graph_edges(graph)
icon_ids = self._get_icon_ids()
map_config = {
'name': self.map_name,
'label_type': self._get_label_type_id(self.label_type),
'expandproblem': int(self.expand_problem),
'highlight': int(self.highlight),
'width': self.width,
'height': self.height,
'selements': self._get_selements(graph, nodes, icon_ids),
'links': self._get_links(nodes, edges),
}
return map_config
def _get_label_type_id(self, label_type):
label_type_ids = {
'label': 0,
'ip': 1,
'name': 2,
'status': 3,
'nothing': 4,
'custom': 5,
}
try:
label_type_id = label_type_ids[label_type]
except Exception as e:
self._module.fail_json(msg="Failed to find id for label type '%s': %s" % (label_type, e))
return label_type_id
def _get_images_info(self, data, icon_ids):
images = [
{
'dot_tag': 'zbx_image',
'zbx_property': 'iconid_off',
'mandatory': True
},
{
'dot_tag': 'zbx_image_disabled',
'zbx_property': 'iconid_disabled',
'mandatory': False
},
{
'dot_tag': 'zbx_image_maintenance',
'zbx_property': 'iconid_maintenance',
'mandatory': False
},
{
'dot_tag': 'zbx_image_problem',
'zbx_property': 'iconid_on',
'mandatory': False
}
]
images_info = {}
default_image = self.default_image if self.default_image else sorted(icon_ids.items())[0][0]
for image in images:
image_name = data.get(image['dot_tag'], None)
if not image_name:
if image['mandatory']:
image_name = default_image
else:
continue
image_name = remove_quotes(image_name)
if image_name in icon_ids:
images_info[image['zbx_property']] = icon_ids[image_name]
if not image['mandatory']:
images_info['use_iconmap'] = 0
else:
self._module.fail_json(msg="Failed to find id for image '%s'" % image_name)
return images_info
def _get_element_type(self, data):
types = {
'host': 0,
'sysmap': 1,
'trigger': 2,
'group': 3,
'image': 4
}
element_type = {
'elementtype': types['image'],
}
if StrictVersion(self.api_version) < StrictVersion('3.4'):
element_type.update({
'elementid': "0",
})
for type_name, type_id in sorted(types.items()):
field_name = 'zbx_' + type_name
if field_name in data:
method_name = '_get_' + type_name + '_id'
element_name = remove_quotes(data[field_name])
get_element_id = getattr(self, method_name, None)
if get_element_id:
elementid = get_element_id(element_name)
if elementid and int(elementid) > 0:
element_type.update({
'elementtype': type_id,
'label': element_name
})
if StrictVersion(self.api_version) < StrictVersion('3.4'):
element_type.update({
'elementid': elementid,
})
else:
element_type.update({
'elements': [{
type_name + 'id': elementid,
}],
})
break
else:
self._module.fail_json(msg="Failed to find id for %s '%s'" % (type_name, element_name))
return element_type
# get list of map elements (nodes)
def _get_selements(self, graph, nodes, icon_ids):
selements = []
icon_sizes = {}
scales = self._get_scales(graph)
for selementid, (node, data) in enumerate(nodes.items(), start=1):
selement = {
'selementid': selementid
}
data['selementid'] = selementid
images_info = self._get_images_info(data, icon_ids)
selement.update(images_info)
image_id = images_info['iconid_off']
if image_id not in icon_sizes:
icon_sizes[image_id] = self._get_icon_size(image_id)
pos = self._convert_coordinates(data['pos'], scales, icon_sizes[image_id])
selement.update(pos)
selement['label'] = remove_quotes(node)
element_type = self._get_element_type(data)
selement.update(element_type)
label = self._get_label(data)
if label:
selement['label'] = label
urls = self._get_urls(data)
if urls:
selement['urls'] = urls
selements.append(selement)
return selements
def _get_links(self, nodes, edges):
links = {}
for edge in edges:
link_id = tuple(sorted(edge.obj_dict['points']))
node1, node2 = link_id
data = edge.obj_dict['attributes']
if "style" in data and data['style'] == "invis":
continue
if link_id not in links:
links[link_id] = {
'selementid1': min(nodes[node1]['selementid'], nodes[node2]['selementid']),
'selementid2': max(nodes[node1]['selementid'], nodes[node2]['selementid']),
}
link = links[link_id]
if "color" not in link:
link['color'] = self._get_color_hex(remove_quotes(data.get('color', 'green')))
if "zbx_draw_style" not in link:
link['drawtype'] = self._get_link_draw_style_id(remove_quotes(data.get('zbx_draw_style', 'line')))
label = self._get_label(data)
if label and "label" not in link:
link['label'] = label
triggers = self._get_triggers(data)
if triggers:
if "linktriggers" not in link:
link['linktriggers'] = []
link['linktriggers'] += triggers
return list(links.values())
def _get_urls(self, data):
urls = []
for url_raw in [remove_quotes(value) for key, value in data.items() if key.startswith("zbx_url")]:
try:
name, url = url_raw.split(':', 1)
except Exception as e:
self._module.fail_json(msg="Failed to parse zbx_url='%s': %s" % (url_raw, e))
urls.append({
'name': name,
'url': url,
})
return urls
def _get_triggers(self, data):
triggers = []
for trigger_definition in [remove_quotes(value) for key, value in data.items() if key.startswith("zbx_trigger")]:
triggerid = self._get_trigger_id(trigger_definition)
if triggerid:
triggers.append({
'triggerid': triggerid,
'color': self._get_color_hex(remove_quotes(data.get('zbx_trigger_color', 'red'))),
'drawtype': self._get_link_draw_style_id(remove_quotes(data.get('zbx_trigger_draw_style', 'bold'))),
})
else:
self._module.fail_json(msg="Failed to find trigger '%s'" % (trigger_definition))
return triggers
@staticmethod
def _get_label(data, default=None):
if "zbx_label" in data:
label = remove_quotes(data['zbx_label']).replace('\\n', '\n')
elif "label" in data:
label = remove_quotes(data['label'])
else:
label = default
return label
def _get_sysmap_id(self, map_name):
exist_map = self._zapi.map.get({'filter': {'name': map_name}})
if exist_map:
return exist_map[0]['sysmapid']
return None
def _get_group_id(self, group_name):
exist_group = self._zapi.hostgroup.get({'filter': {'name': group_name}})
if exist_group:
return exist_group[0]['groupid']
return None
def map_exists(self):
return bool(self.map_id)
def create_map(self, map_config):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
result = self._zapi.map.create(map_config)
if result:
return result
except Exception as e:
self._module.fail_json(msg="Failed to create map: %s" % e)
def update_map(self, map_config):
if not self.map_id:
self._module.fail_json(msg="Failed to update map: map_id is unknown. Try to create_map instead.")
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
map_config['sysmapid'] = self.map_id
result = self._zapi.map.update(map_config)
if result:
return result
except Exception as e:
self._module.fail_json(msg="Failed to update map: %s" % e)
def delete_map(self):
if not self.map_id:
self._module.fail_json(msg="Failed to delete map: map_id is unknown.")
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.map.delete([self.map_id])
except Exception as e:
self._module.fail_json(msg="Failed to delete map, Exception: %s" % e)
def is_exist_map_correct(self, generated_map_config):
exist_map_configs = self._zapi.map.get({
'sysmapids': self.map_id,
'selectLinks': 'extend',
'selectSelements': 'extend'
})
exist_map_config = exist_map_configs[0]
if not self._is_dicts_equal(generated_map_config, exist_map_config):
return False
if not self._is_selements_equal(generated_map_config['selements'], exist_map_config['selements']):
return False
self._update_ids(generated_map_config, exist_map_config)
if not self._is_links_equal(generated_map_config['links'], exist_map_config['links']):
return False
return True
def _get_selements_sort_keys(self):
keys_to_sort = ['label']
if StrictVersion(self.api_version) < StrictVersion('3.4'):
keys_to_sort.insert(0, 'elementid')
return keys_to_sort
def _is_selements_equal(self, generated_selements, exist_selements):
if len(generated_selements) != len(exist_selements):
return False
generated_selements_sorted = sorted(generated_selements, key=itemgetter(*self.selements_sort_keys))
exist_selements_sorted = sorted(exist_selements, key=itemgetter(*self.selements_sort_keys))
for (generated_selement, exist_selement) in zip(generated_selements_sorted, exist_selements_sorted):
if StrictVersion(self.api_version) >= StrictVersion("3.4"):
if not self._is_elements_equal(generated_selement.get('elements', []), exist_selement.get('elements', [])):
return False
if not self._is_dicts_equal(generated_selement, exist_selement, ['selementid']):
return False
if not self._is_urls_equal(generated_selement.get('urls', []), exist_selement.get('urls', [])):
return False
return True
def _is_urls_equal(self, generated_urls, exist_urls):
if len(generated_urls) != len(exist_urls):
return False
generated_urls_sorted = sorted(generated_urls, key=itemgetter('name', 'url'))
exist_urls_sorted = sorted(exist_urls, key=itemgetter('name', 'url'))
for (generated_url, exist_url) in zip(generated_urls_sorted, exist_urls_sorted):
if not self._is_dicts_equal(generated_url, exist_url, ['selementid']):
return False
return True
def _is_elements_equal(self, generated_elements, exist_elements):
if len(generated_elements) != len(exist_elements):
return False
generated_elements_sorted = sorted(generated_elements, key=lambda k: k.values()[0])
exist_elements_sorted = sorted(exist_elements, key=lambda k: k.values()[0])
for (generated_element, exist_element) in zip(generated_elements_sorted, exist_elements_sorted):
if not self._is_dicts_equal(generated_element, exist_element, ['selementid']):
return False
return True
# since generated IDs differ from real Zabbix ones, make real IDs match generated ones
def _update_ids(self, generated_map_config, exist_map_config):
generated_selements_sorted = sorted(generated_map_config['selements'], key=itemgetter(*self.selements_sort_keys))
exist_selements_sorted = sorted(exist_map_config['selements'], key=itemgetter(*self.selements_sort_keys))
id_mapping = {}
for (generated_selement, exist_selement) in zip(generated_selements_sorted, exist_selements_sorted):
id_mapping[exist_selement['selementid']] = generated_selement['selementid']
for link in exist_map_config['links']:
link['selementid1'] = id_mapping[link['selementid1']]
link['selementid2'] = id_mapping[link['selementid2']]
if link['selementid2'] < link['selementid1']:
link['selementid1'], link['selementid2'] = link['selementid2'], link['selementid1']
def _is_links_equal(self, generated_links, exist_links):
if len(generated_links) != len(exist_links):
return False
generated_links_sorted = sorted(generated_links, key=itemgetter('selementid1', 'selementid2', 'color', 'drawtype'))
exist_links_sorted = sorted(exist_links, key=itemgetter('selementid1', 'selementid2', 'color', 'drawtype'))
for (generated_link, exist_link) in zip(generated_links_sorted, exist_links_sorted):
if not self._is_dicts_equal(generated_link, exist_link, ['selementid1', 'selementid2']):
return False
if not self._is_triggers_equal(generated_link.get('linktriggers', []), exist_link.get('linktriggers', [])):
return False
return True
def _is_triggers_equal(self, generated_triggers, exist_triggers):
if len(generated_triggers) != len(exist_triggers):
return False
generated_triggers_sorted = sorted(generated_triggers, key=itemgetter('triggerid'))
exist_triggers_sorted = sorted(exist_triggers, key=itemgetter('triggerid'))
for (generated_trigger, exist_trigger) in zip(generated_triggers_sorted, exist_triggers_sorted):
if not self._is_dicts_equal(generated_trigger, exist_trigger):
return False
return True
@staticmethod
def _is_dicts_equal(d1, d2, exclude_keys=None):
if exclude_keys is None:
exclude_keys = []
for key in d1.keys():
if isinstance(d1[key], dict) or isinstance(d1[key], list):
continue
if key in exclude_keys:
continue
# compare as strings since Zabbix API returns everything as strings
if key not in d2 or str(d2[key]) != str(d1[key]):
return False
return True
def _get_host_id(self, hostname):
hostid = self._zapi.host.get({'filter': {'host': hostname}})
if hostid:
return str(hostid[0]['hostid'])
def _get_trigger_id(self, trigger_definition):
try:
host, trigger = trigger_definition.split(':', 1)
except Exception as e:
self._module.fail_json(msg="Failed to parse zbx_trigger='%s': %s" % (trigger_definition, e))
triggerid = self._zapi.trigger.get({
'host': host,
'filter': {
'description': trigger
}
})
if triggerid:
return str(triggerid[0]['triggerid'])
def _get_icon_ids(self):
icons_list = self._zapi.image.get({})
icon_ids = {}
for icon in icons_list:
icon_ids[icon['name']] = icon['imageid']
return icon_ids
def _get_icon_size(self, icon_id):
icons_list = self._zapi.image.get({
'imageids': [
icon_id
],
'select_image': True
})
if len(icons_list) > 0:
icon_base64 = icons_list[0]['image']
else:
self._module.fail_json(msg="Failed to find image with id %s" % icon_id)
image = Image.open(BytesIO(base64.b64decode(icon_base64)))
icon_width, icon_height = image.size
return icon_width, icon_height
@staticmethod
def _get_node_attributes(node):
attr = {}
if "attributes" in node.obj_dict:
attr.update(node.obj_dict['attributes'])
pos = node.get_pos()
if pos is not None:
pos = remove_quotes(pos)
xx, yy = pos.split(",")
attr['pos'] = (float(xx), float(yy))
return attr
def _get_graph_nodes(self, parent):
nodes = {}
for node in parent.get_nodes():
node_name = node.get_name()
if node_name in ('node', 'graph', 'edge'):
continue
nodes[node_name] = self._get_node_attributes(node)
for subgraph in parent.get_subgraphs():
nodes.update(self._get_graph_nodes(subgraph))
return nodes
def _get_graph_edges(self, parent):
edges = []
for edge in parent.get_edges():
edges.append(edge)
for subgraph in parent.get_subgraphs():
edges += self._get_graph_edges(subgraph)
return edges
def _get_scales(self, graph):
bb = remove_quotes(graph.get_bb())
min_x, min_y, max_x, max_y = bb.split(",")
scale_x = (self.width - self.margin * 2) / (float(max_x) - float(min_x)) if float(max_x) != float(min_x) else 0
scale_y = (self.height - self.margin * 2) / (float(max_y) - float(min_y)) if float(max_y) != float(min_y) else 0
return {
'min_x': float(min_x),
'min_y': float(min_y),
'max_x': float(max_x),
'max_y': float(max_y),
'scale_x': float(scale_x),
'scale_y': float(scale_y),
}
# transform Graphviz coordinates to Zabbix's ones
def _convert_coordinates(self, pos, scales, icon_size):
return {
'x': int((pos[0] - scales['min_x']) * scales['scale_x'] - icon_size[0] / 2 + self.margin),
'y': int((scales['max_y'] - pos[1] + scales['min_y']) * scales['scale_y'] - icon_size[1] / 2 + self.margin),
}
def _get_color_hex(self, color_name):
if color_name.startswith('#'):
color_hex = color_name
else:
try:
color_hex = webcolors.name_to_hex(color_name)
except Exception as e:
self._module.fail_json(msg="Failed to get RGB hex for color '%s': %s" % (color_name, e))
color_hex = color_hex.strip('#').upper()
return color_hex
def _get_link_draw_style_id(self, draw_style):
draw_style_ids = {
'line': 0,
'bold': 2,
'dotted': 3,
'dashed': 4
}
try:
draw_style_id = draw_style_ids[draw_style]
except Exception as e:
self._module.fail_json(msg="Failed to find id for draw type '%s': %s" % (draw_style, e))
return draw_style_id
# If a string has single or double quotes around it, remove them.
def remove_quotes(s):
if (s[0] == s[-1]) and s.startswith(("'", '"')):
s = s[1:-1]
return s
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
timeout=dict(type='int', default=10),
validate_certs=dict(type='bool', required=False, default=True),
name=dict(type='str', required=True, aliases=['map_name']),
data=dict(type='str', required=False, aliases=['dot_data']),
width=dict(type='int', default=800),
height=dict(type='int', default=600),
state=dict(default="present", choices=['present', 'absent']),
default_image=dict(type='str', required=False, aliases=['image']),
margin=dict(type='int', default=40),
expand_problem=dict(type='bool', default=True),
highlight=dict(type='bool', default=True),
label_type=dict(type='str', default='name', choices=['label', 'ip', 'name', 'status', 'nothing', 'custom']),
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
if not HAS_PYDOTPLUS:
module.fail_json(msg=missing_required_lib('pydotplus', url='https://pypi.org/project/pydotplus/'), exception=PYDOT_IMP_ERR)
if not HAS_WEBCOLORS:
module.fail_json(msg=missing_required_lib('webcolors', url='https://pypi.org/project/webcolors/'), exception=WEBCOLORS_IMP_ERR)
if not HAS_PIL:
module.fail_json(msg=missing_required_lib('Pillow', url='https://pypi.org/project/Pillow/'), exception=PIL_IMP_ERR)
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
timeout = module.params['timeout']
validate_certs = module.params['validate_certs']
zbx = None
# login to zabbix
try:
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
validate_certs=validate_certs)
zbx.login(login_user, login_password)
atexit.register(zbx.logout)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
sysmap = Map(module, zbx)
if sysmap.state == "absent":
if sysmap.map_exists():
sysmap.delete_map()
module.exit_json(changed=True, result="Successfully deleted map: %s" % sysmap.map_name)
else:
module.exit_json(changed=False)
else:
map_config = sysmap.get_map_config()
if sysmap.map_exists():
if sysmap.is_exist_map_correct(map_config):
module.exit_json(changed=False)
else:
sysmap.update_map(map_config)
module.exit_json(changed=True, result="Successfully updated map: %s" % sysmap.map_name)
else:
sysmap.create_map(map_config)
module.exit_json(changed=True, result="Successfully created map: %s" % sysmap.map_name)
if __name__ == '__main__':
main()
| gpl-3.0 |
urbaniak/gunicorn | gunicorn/pidfile.py | 7 | 2273 | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import with_statement
import errno
import os
import tempfile
class Pidfile(object):
"""\
Manage a PID file. If a specific name is provided
it and '"%s.oldpid" % name' will be used. Otherwise
we create a temp file using os.mkstemp.
"""
def __init__(self, fname):
self.fname = fname
self.pid = None
def create(self, pid):
oldpid = self.validate()
if oldpid:
if oldpid == os.getpid():
return
raise RuntimeError("Already running on PID %s " \
"(or pid file '%s' is stale)" % (os.getpid(), self.fname))
self.pid = pid
# Write pidfile
fdir = os.path.dirname(self.fname)
if fdir and not os.path.isdir(fdir):
raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir)
fd, fname = tempfile.mkstemp(dir=fdir)
os.write(fd, "%s\n" % self.pid)
if self.fname:
os.rename(fname, self.fname)
else:
self.fname = fname
os.close(fd)
# set permissions to -rw-r--r--
os.chmod(self.fname, 420)
def rename(self, path):
self.unlink()
self.fname = path
self.create(self.pid)
def unlink(self):
""" delete pidfile"""
try:
with open(self.fname, "r") as f:
pid1 = int(f.read() or 0)
if pid1 == self.pid:
os.unlink(self.fname)
except:
pass
def validate(self):
""" Validate pidfile and make it stale if needed"""
if not self.fname:
return
try:
with open(self.fname, "r") as f:
wpid = int(f.read() or 0)
if wpid <= 0:
return
try:
os.kill(wpid, 0)
return wpid
except OSError as e:
if e.args[0] == errno.ESRCH:
return
raise
except IOError as e:
if e.args[0] == errno.ENOENT:
return
raise
| mit |
bureau14/qdb-benchmark | thirdparty/boost/libs/python/pyste/tests/SmartFileUT.py | 54 | 2479 | # Copyright Bruno da Silva de Oliveira 2003. Use, modification and
# distribution is subject to the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import sys
sys.path.append('../src')
from SmartFile import *
import unittest
import tempfile
import os
import time
class SmartFileTest(unittest.TestCase):
FILENAME = tempfile.mktemp()
def setUp(self):
self._Clean()
def tearDown(self):
self._Clean()
def _Clean(self):
try:
os.remove(self.FILENAME)
except OSError: pass
def testNonExistant(self):
"Must override the file, as there's no file in the disk yet"
self.assert_(not os.path.isfile(self.FILENAME))
f = SmartFile(self.FILENAME, 'w')
f.write('Testing 123\nTesting again.')
f.close()
self.assert_(os.path.isfile(self.FILENAME))
def testOverride(self):
"Must override the file, because the contents are different"
contents = 'Contents!\nContents!'
# create the file normally first
f = file(self.FILENAME, 'w')
f.write(contents)
f.close()
file_time = os.path.getmtime(self.FILENAME)
self.assert_(os.path.isfile(self.FILENAME))
time.sleep(2)
f = SmartFile(self.FILENAME, 'w')
f.write(contents + '_')
f.close()
new_file_time = os.path.getmtime(self.FILENAME)
self.assert_(new_file_time != file_time)
def testNoOverride(self):
"Must not override the file, because the contents are the same"
contents = 'Contents!\nContents!'
# create the file normally first
f = file(self.FILENAME, 'w')
f.write(contents)
f.close()
file_time = os.path.getmtime(self.FILENAME)
self.assert_(os.path.isfile(self.FILENAME))
time.sleep(2)
f = SmartFile(self.FILENAME, 'w')
f.write(contents)
f.close()
new_file_time = os.path.getmtime(self.FILENAME)
self.assert_(new_file_time == file_time)
def testAutoClose(self):
"Must be closed when garbage-collected"
def foo():
f = SmartFile(self.FILENAME)
f.write('testing')
self.assert_(not os.path.isfile(self.FILENAME))
foo()
self.assert_(os.path.isfile(self.FILENAME))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
foresterre/wren | util/libuv.py | 3 | 4582 | #!/usr/bin/env python
# Downloads and compiles libuv.
from __future__ import print_function
import os
import os.path
import platform
import shutil
import subprocess
import sys
LIB_UV_VERSION = "v1.10.0"
LIB_UV_DIR = "deps/libuv"
def python2_binary():
"""Tries to find a python 2 executable."""
# Using [0] instead of .major here to support Python 2.6.
if sys.version_info[0] == 2:
return sys.executable or "python"
else:
return "python2"
def ensure_dir(dir):
"""Creates dir if not already there."""
if os.path.isdir(dir):
return
os.makedirs(dir)
def remove_dir(dir):
"""Recursively removes dir."""
if platform.system() == "Windows":
# rmtree gives up on readonly files on Windows
# rd doesn't like paths with forward slashes
subprocess.check_call(
['cmd', '/c', 'rd', '/s', '/q', dir.replace('/', '\\')])
else:
shutil.rmtree(dir)
def download_libuv():
"""Clones libuv into deps/libuv and checks out the right version."""
# Delete it if already there so we ensure we get the correct version if the
# version number in this script changes.
if os.path.isdir(LIB_UV_DIR):
print("Cleaning output directory...")
remove_dir(LIB_UV_DIR)
ensure_dir("deps")
print("Cloning libuv...")
run([
"git", "clone", "--quiet", "--depth=1",
"https://github.com/libuv/libuv.git",
LIB_UV_DIR
])
print("Getting tags...")
run([
"git", "fetch", "--quiet", "--depth=1", "--tags"
], cwd=LIB_UV_DIR)
print("Checking out libuv " + LIB_UV_VERSION + "...")
run([
"git", "checkout", "--quiet", LIB_UV_VERSION
], cwd=LIB_UV_DIR)
# TODO: Pin gyp to a known-good commit. Update a previously downloaded gyp
# if it doesn't match that commit.
print("Downloading gyp...")
run([
"git", "clone", "--quiet", "--depth=1",
"https://chromium.googlesource.com/external/gyp.git",
LIB_UV_DIR + "/build/gyp"
])
def build_libuv_mac():
# Create the XCode project.
run([
python2_binary(), LIB_UV_DIR + "/gyp_uv.py", "-f", "xcode"
])
# Compile it.
# TODO: Support debug builds too.
run([
"xcodebuild",
# Build a 32-bit + 64-bit universal binary:
"ARCHS=i386 x86_64", "ONLY_ACTIVE_ARCH=NO",
"BUILD_DIR=out",
"-project", LIB_UV_DIR + "/uv.xcodeproj",
"-configuration", "Release",
"-target", "All"
])
def build_libuv_linux(arch):
# Set up the Makefile to build for the right architecture.
args = [python2_binary(), "gyp_uv.py", "-f", "make"]
if arch == "-32":
args.append("-Dtarget_arch=ia32")
elif arch == "-64":
args.append("-Dtarget_arch=x64")
run(args, cwd=LIB_UV_DIR)
run(["make", "-C", "out", "BUILDTYPE=Release"], cwd=LIB_UV_DIR)
def build_libuv_windows(arch):
args = ["cmd", "/c", "vcbuild.bat", "release"]
if arch == "-32":
args.append("x86")
elif arch == "-64":
args.append("x64")
run(args, cwd=LIB_UV_DIR)
def build_libuv(arch, out):
if platform.system() == "Darwin":
build_libuv_mac()
elif platform.system() == "Linux":
build_libuv_linux(arch)
elif platform.system() == "Windows":
build_libuv_windows(arch)
else:
print("Unsupported platform: " + platform.system())
sys.exit(1)
# Copy the build library to the build directory for Mac and Linux where we
# support building for multiple architectures.
if platform.system() != "Windows":
ensure_dir(os.path.dirname(out))
shutil.copyfile(
os.path.join(LIB_UV_DIR, "out", "Release", "libuv.a"), out)
def run(args, cwd=None):
"""Spawn a process to invoke [args] and mute its output."""
try:
# check_output() was added in Python 2.7.
has_check_output = (sys.version_info[0] > 2 or
(sys.version_info[0] == 2 and sys.version_info[1] >= 7))
if has_check_output:
subprocess.check_output(args, cwd=cwd, stderr=subprocess.STDOUT)
else:
proc = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE)
proc.communicate()[0].split()
except subprocess.CalledProcessError as error:
print(error.output)
sys.exit(error.returncode)
def main():
expect_usage(len(sys.argv) >= 2)
if sys.argv[1] == "download":
download_libuv()
elif sys.argv[1] == "build":
expect_usage(len(sys.argv) <= 3)
arch = ""
if len(sys.argv) == 3:
arch = sys.argv[2]
out = os.path.join("build", "libuv" + arch + ".a")
build_libuv(arch, out)
else:
expect_usage(false)
def expect_usage(condition):
if (condition): return
print("Usage: libuv.py download")
print(" libuv.py build [-32|-64]")
sys.exit(1)
main()
| mit |
Chouser/compose | compose/cli/docopt_command.py | 9 | 1601 | from __future__ import unicode_literals
from __future__ import absolute_import
import sys
from inspect import getdoc
from docopt import docopt, DocoptExit
def docopt_full_help(docstring, *args, **kwargs):
try:
return docopt(docstring, *args, **kwargs)
except DocoptExit:
raise SystemExit(docstring)
class DocoptCommand(object):
def docopt_options(self):
return {'options_first': True}
def sys_dispatch(self):
self.dispatch(sys.argv[1:], None)
def dispatch(self, argv, global_options):
self.perform_command(*self.parse(argv, global_options))
def perform_command(self, options, handler, command_options):
handler(command_options)
def parse(self, argv, global_options):
options = docopt_full_help(getdoc(self), argv, **self.docopt_options())
command = options['COMMAND']
if command is None:
raise SystemExit(getdoc(self))
command = command.replace('-', '_')
if not hasattr(self, command):
raise NoSuchCommand(command, self)
handler = getattr(self, command)
docstring = getdoc(handler)
if docstring is None:
raise NoSuchCommand(command, self)
command_options = docopt_full_help(docstring, options['ARGS'], options_first=True)
return options, handler, command_options
class NoSuchCommand(Exception):
def __init__(self, command, supercommand):
super(NoSuchCommand, self).__init__("No such command: %s" % command)
self.command = command
self.supercommand = supercommand
| apache-2.0 |
philoniare/horizon | openstack_dashboard/dashboards/admin/volumes/tests.py | 27 | 4466 | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.api import keystone
from openstack_dashboard.test import helpers as test
class VolumeTests(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('server_list',),
cinder: ('volume_list',
'volume_snapshot_list'),
keystone: ('tenant_list',)})
def test_index(self):
cinder.volume_list(IsA(http.HttpRequest), search_opts={
'all_tenants': True}).AndReturn(self.cinder_volumes.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest), search_opts={
'all_tenants': True}).AndReturn([])
api.nova.server_list(IsA(http.HttpRequest), search_opts={
'all_tenants': True}) \
.AndReturn([self.servers.list(), False])
keystone.tenant_list(IsA(http.HttpRequest)) \
.AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:volumes:index'))
self.assertTemplateUsed(res, 'admin/volumes/index.html')
volumes = res.context['volumes_table'].data
self.assertItemsEqual(volumes, self.cinder_volumes.list())
@test.create_stubs({cinder: ('volume_type_list_with_qos_associations',
'qos_spec_list',
'extension_supported',
'volume_encryption_type_list')})
def test_volume_types_tab(self):
encryption_list = (self.cinder_volume_encryption_types.list()[0],
self.cinder_volume_encryption_types.list()[1])
cinder.volume_type_list_with_qos_associations(
IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.qos_spec_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_qos_specs.list())
cinder.volume_encryption_type_list(IsA(http.HttpRequest))\
.AndReturn(encryption_list)
cinder.extension_supported(IsA(http.HttpRequest),
'VolumeTypeEncryption').MultipleTimes()\
.AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(reverse(
'horizon:admin:volumes:volume_types_tab'))
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(
res, 'admin/volumes/volume_types/volume_types_tables.html')
volume_types = res.context['volume_types_table'].data
self.assertItemsEqual(volume_types, self.volume_types.list())
qos_specs = res.context['qos_specs_table'].data
self.assertItemsEqual(qos_specs, self.cinder_qos_specs.list())
@test.create_stubs({cinder: ('volume_list',
'volume_snapshot_list',),
keystone: ('tenant_list',)})
def test_snapshots_tab(self):
cinder.volume_snapshot_list(IsA(http.HttpRequest), search_opts={
'all_tenants': True}). \
AndReturn(self.cinder_volume_snapshots.list())
cinder.volume_list(IsA(http.HttpRequest), search_opts={
'all_tenants': True}).\
AndReturn(self.cinder_volumes.list())
keystone.tenant_list(IsA(http.HttpRequest)). \
AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:volumes:snapshots_tab'))
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'horizon/common/_detail_table.html')
snapshots = res.context['volume_snapshots_table'].data
self.assertItemsEqual(snapshots, self.cinder_volume_snapshots.list())
| apache-2.0 |
smARTLab-liv/smartlabatwork-release | slaw_smach/src/slaw_smach/slaw_smach.py | 1 | 11996 | #!/usr/bin/env python
import rospy
from ArmStates import *
from MoveStates import *
from ObjectDetectState import *
from DecisionStates import *
from std_srvs.srv import Empty, EmptyResponse
from std_msgs.msg import Bool
## TODO after Eindhoven: Add failsafe if hole not detected
## add states if object too far or too close to gripper
class Smach():
def __init__(self):
rospy.init_node('slaw_smach')
self.sm = smach.StateMachine(outcomes=['end'])
with self.sm:
### MOVE STATE WITH RECOVER
smach.StateMachine.add('MoveToNext', MoveStateUserData(), transitions = {'reached':'DecideAfterMove', 'not_reached': 'RecoverMove', 'failed': 'DeleteCurGoal'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('RecoverMove', RecoverState(), transitions = {'done':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out': 'pose'})
### END MOVE STATE WITH RECOVER
##Decision state after Move:
smach.StateMachine.add('DecideAfterMove', DecideAfterMoveState(),transitions = {'BNT': 'ScanMatcher_BNT', 'Pickup':'ScanMatcher_Pickup', 'Place':'ScanMatcher_Place', 'End':'end'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
######BNT SPECIFIC
smach.StateMachine.add('ScanMatcher_BNT', ScanMatcher(), transitions = {'reached':'SleepState', 'not_reached':'ScanMatcher_BNT', 'failed':'SleepState'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('SleepState', SleepState(), transitions = {'done':'DeleteCurGoal'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
########END BNT
##### DELETE CURRENT GOAL OR GET NEXT GOAL
smach.StateMachine.add('DeleteCurGoal', DeleteCurrentGoalState(), transitions = {'done':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('GetNextGoal', GetNextGoalState(), transitions = {'done':'MoveToNext'}, remapping = {'pose_in':'pose','object_in':'object', 'pose_out':'pose'})
##### END DELETE CURRENT GOAL OR GET NEXT GOAL
### PICKUP
smach.StateMachine.add('ScanMatcher_Pickup', ScanMatcher(), transitions = {'reached':'DecideBeforePreGrip', 'not_reached':'ScanMatcher_Pickup', 'failed':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
#smach.StateMachine.add('ScanMatcher_Pickup', ScanMatcher(), transitions = {'reached':'ScanMatcher_Align', 'not_reached':'ScanMatcher_Pickup', 'failed':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
#smach.StateMachine.add('ScanMatcher_Align', AlignState(), transitions = {'done':'DecideBeforePreGrip'})
##
#Either CBT Pickup or normal Pickup
smach.StateMachine.add('DecideBeforePreGrip', DecideBeforePreGripState(),transitions = {'CBT': 'PreGrip_CBT', 'Pickup':'PreGrip'}, remapping = {'pose_in':'pose', 'pose_out':'pose', 'dist_out':'dist'})
######CBT STUFF
smach.StateMachine.add('PreGrip_CBT', PreGripCBT(), transitions = {'success':'ScanForObjectCBT', 'failed':'TuckArmPreGripCBT'},remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('TuckArmPreGripCBT', TuckArm(), transitions = {'success':'PreGrip_CBT', 'not_reached':'TuckArmPreGripCBT','failed':'end'})
smach.StateMachine.add('ScanForObjectCBT', ScanForObjectCBT(), transitions = {'success':'GripCBT'})
smach.StateMachine.add('GripCBT', GripCBT(), transitions = {'end':'DeleteCurGoal'})
#### END CBT Stuff
### NORMAL PICKUP
smach.StateMachine.add('PreGrip', PreGrip(), transitions = {'success':'Scan', 'failed':'TuckArmPreGrip'},remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('TuckArmPreGrip', TuckArm(), transitions = {'success':'PreGrip', 'not_reached':'TuckArmPreGrip','failed':'end'})
#scan
smach.StateMachine.add("Scan", ScanForObjectsState(), transitions = {'success': 'Grip', 'failed':'TuckArmMoveNext','nothing_found': 'TuckArmDelete'}, remapping = {'pose_in':'pose', 'pose_out':'pose', 'object_out':'object', 'point_out':'point', 'dist_in':'dist','dist_out':'dist'})
#if misdetection try again
smach.StateMachine.add('TuckArmMoveNext', TuckArm(), transitions = {'success':'MoveToNext', 'not_reached':'TuckArmMoveNext','failed':'end'})
#if nothing found try next Goal
smach.StateMachine.add('TuckArmDelete', TuckArm(), transitions = {'success':'DeleteCurGoal', 'not_reached':'TuckArmDelete','failed':'end'})
#Grip Object
smach.StateMachine.add("Grip", Grip(), transitions = {'success':'DecideRV20', 'too_far':'ScanMatcher_Pickup', 'failed':'TuckArmFailGrip', 'failed_after_grip':'TuckArmGrip'}, remapping = {'pose_in':'pose', 'object_in':'object', 'point_in':'point','pose_out':'pose', 'object_out':'object', 'point_out':'point'})
#Decide RV20:
smach.StateMachine.add('DecideRV20', DecideRV20State(),transitions = {'RV20': 'TuckForDriveAfterGrip', 'Normal':'TuckForDriveAfterGrip'}, remapping = {'object_in':'object', 'object_out':'object'})
#smach.StateMachine.add('DecideRV20', DecideRV20State(),transitions = {'RV20': 'RV20CheckArm', 'Normal':'TuckForDriveAfterGrip'}, remapping = {'object_in':'object', 'object_out':'object', 'pose_out':'pose'})
####CHECK if RV20 which one
smach.StateMachine.add('RV20CheckArm', RV20CheckState(), transitions = {'success':'RV20CheckVision','failed':'TuckArmPreCheckArm'}, remapping = {'pose_in':'pose'})
smach.StateMachine.add('TuckArmPreCheckArm', TuckArm(), transitions = {'success':'RV20CheckArm', 'not_reached':'TuckArmPreCheckArm','failed':'end'})
#smach.StateMachine.add('RV20CheckVision', RV20CheckVision(), transitions = {'success':'RV20RotateTake','failed':'RV20RotateReplace'}, remapping = {'pose_in':'pose', 'object_in':'object', 'pose_out':'pose'})
smach.StateMachine.add('RV20CheckVision', RV20CheckVision(), transitions = {'success':'RV20RotateTake','failed':'RV20Trash'}, remapping = {'pose_in':'pose', 'object_in':'object', 'pose_out':'pose'})
smach.StateMachine.add('RV20Trash', RV20Trash(), transitions = {'done':'PreGrip'})
#smach.StateMachine.add('RV20RotateReplace', RV20ReplaceObjectRotate(), transitions = {'success':'RV20Replace','failed':'RV20Replace'}, remapping = {'pose_in':'pose'})
smach.StateMachine.add('RV20RotateTake', RV20ReplaceObjectRotate(), transitions = {'success':'TuckForDriveAfterGrip','failed':'TuckForDriveAfterGrip'}, remapping = {'pose_in':'pose'})
#smach.StateMachine.add('RV20Replace', FinePlace(), transitions = {'success':'RV20ReplaceUp', 'failed':'TuckArmFailPlace_RV20', 'too_far':'RV20Replace','failed_after_place':'TuckArmFailPlace_RV20'}, remapping = {'object_in':'object','pose_in':'pose', 'pose_out':'pose', 'point_in':'point'})
#smach.StateMachine.add('TuckArmFailPlace_RV20', TuckArm(), transitions = {'success':'RV20Replace', 'not_reached':'TuckArmFailPlace_RV20','failed':'end'})
#smach.StateMachine.add('RV20ReplaceUp', RV20ReplaceUp(), transitions = {'done':'MoveBack10'})
#MoveBack 10 to skip object and resume scanning
#smach.StateMachine.add('MoveBack10', MoveBack(0.10), transitions = {'done':'Remove10'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
#smach.StateMachine.add('Remove10', RemoveDist(0.10), transitions = {'done':'PreGrip'}, remapping = {'dist_in':'dist', 'dist_out':'dist'})
#Tuck and Move away
##Tuck For Drive
smach.StateMachine.add('TuckForDriveAfterGrip', TuckForDrive(), transitions={'done':'MoveAwayFromPlatform'}, remapping = {'pose_in':'pose'} )
smach.StateMachine.add('TuckArmGrip', TuckArm(), transitions = {'success':'MoveAwayFromPlatform', 'not_reached':'TuckArmGrip','failed':'end'})
smach.StateMachine.add('TuckArmFailGrip', TuckArm(), transitions = {'success':'MoveToNext', 'not_reached':'TuckArmFailGrip','failed':'end'})
smach.StateMachine.add('MoveAwayFromPlatform', RecoverState(), transitions = {'done':'MoveToPlace'})
### Move to Place location
smach.StateMachine.add('MoveToPlace', MoveStateUserData(), transitions = {'reached': 'ScanMatcher_Place', 'not_reached': 'MoveAwayFromPlatform', 'failed': 'MoveAwayFromPlatform'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('ScanMatcher_Place', ScanMatcher(), transitions = {'reached':'DecideBeforePlace', 'not_reached':'ScanMatcher_Place', 'failed':'DecideBeforePlace'}, remapping = {'pose_in':'pose', 'suffix_in':'suffix', 'pose_out':'pose'})
#### Decide either Normal place or PPT place
smach.StateMachine.add('DecideBeforePlace', DecideBeforePlaceState(),transitions = {'PPT': 'PreScanHole', 'Normal':'MoveBack'}, remapping = {'object_in':'object', 'object_out':'object'})
####PPT
smach.StateMachine.add('PreScanHole', PreGrip(), transitions = {'success':'ScanHole', 'failed':'TuckArmPreScan'},remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('TuckArmPreScan', TuckArm(), transitions = {'success':'PreScanHole', 'not_reached':'TuckArmPreScan','failed':'end'})
smach.StateMachine.add("ScanHole", ScanForHoles(), transitions = {'success': 'FinePlace', 'failed':'ScanMatcher_Place','nothing_found': 'ScanMatcher_Place'}, remapping = {'pose_in':'pose', 'pose_out':'pose', 'object_in':'object', 'object_out':'object', 'point_out':'point'})
smach.StateMachine.add('FinePlace', FinePlace(), transitions = {'success':'TuckForDriveAfterPlace', 'failed':'TuckArmFailPlace_PPT', 'too_far':'ScanMatcher_Place','failed_after_place':'TuckArmFailPlace_PPT'}, remapping = {'object_in':'object','pose_in':'pose', 'pose_out':'pose', 'point_in':'point'})
smach.StateMachine.add('TuckArmFailPlace_PPT', TuckArm(), transitions = {'success':'FinePlace', 'not_reached':'TuckArmFailPlace_PPT','failed':'end'})
### END PPT
##NORMAL PLACE
smach.StateMachine.add('MoveBack', MoveBack(0.25), transitions = {'done':'Place'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('Place', Place(), transitions = {'success':'TuckForDriveAfterPlace', 'failed':'TuckArmFailPlace'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
##Tuck For Drive
smach.StateMachine.add('TuckForDriveAfterPlace', TuckForDrive(), transitions={'done':'MoveAwayFromPlatformAfterPlace'}, remapping = {'pose_in':'pose'} )
smach.StateMachine.add('TuckArmFailPlace', TuckArm(), transitions = {'success':'Place', 'not_reached':'TuckArmFailPlace','failed':'end'})
smach.StateMachine.add('MoveAwayFromPlatformAfterPlace', RecoverState(), transitions = {'done':'GetNextGoal'})
# Create and start the introspection server
self.sis = smach_ros.IntrospectionServer('server_name', self.sm, '/SLAW_SMACH')
self.sis.start()
self.serv = rospy.Service("/start_SMACH", Empty, self.go)
def go(self, req):
#sm.userdata.pose = "D2"
print "Starting SMACH"
locations = rospy.get_param('locations')
self.sm.userdata.pose = locations[0]
#self.sm.userdata.suffix = "_grip"
self.sm.execute()
return EmptyResponse()
def stop(self):
self.sis.stop()
if __name__ == '__main__':
smach = Smach()
rospy.spin()
smach.stop()
| mit |
jantman/GitPython | setup.py | 3 | 2972 | #!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from distutils.command.build_py import build_py as _build_py
from setuptools.command.sdist import sdist as _sdist
import os
import sys
from os import path
v = open(path.join(path.dirname(__file__), 'VERSION'))
VERSION = v.readline().strip()
v.close()
class build_py(_build_py):
def run(self):
init = path.join(self.build_lib, 'git', '__init__.py')
if path.exists(init):
os.unlink(init)
_build_py.run(self)
_stamp_version(init)
self.byte_compile([init])
class sdist(_sdist):
def make_release_tree (self, base_dir, files):
_sdist.make_release_tree(self, base_dir, files)
orig = path.join('git', '__init__.py')
assert path.exists(orig), orig
dest = path.join(base_dir, orig)
if hasattr(os, 'link') and path.exists(dest):
os.unlink(dest)
self.copy_file(orig, dest)
_stamp_version(dest)
def _stamp_version(filename):
found, out = False, list()
try:
f = open(filename, 'r')
except (IOError, OSError):
print >> sys.stderr, "Couldn't find file %s to stamp version" % filename
return
#END handle error, usually happens during binary builds
for line in f:
if '__version__ =' in line:
line = line.replace("'git'", "'%s'" % VERSION)
found = True
out.append(line)
f.close()
if found:
f = open(filename, 'w')
f.writelines(out)
f.close()
else:
print >> sys.stderr, "WARNING: Couldn't find version line in file %s" % filename
setup(name = "GitPython",
cmdclass={'build_py': build_py, 'sdist': sdist},
version = VERSION,
description = "Python Git Library",
author = "Sebastian Thiel, Michael Trier",
author_email = "byronimo@gmail.com, mtrier@gmail.com",
url = "http://gitorious.org/projects/git-python/",
packages = find_packages('.'),
py_modules = ['git.'+f[:-3] for f in os.listdir('./git') if f.endswith('.py')],
package_data = {'git.test' : ['fixtures/*']},
package_dir = {'git':'git'},
license = "BSD License",
requires=('gitdb (>=0.5.1)',),
install_requires='gitdb >= 0.5.1',
zip_safe=False,
long_description = """\
GitPython is a python library used to interact with Git repositories""",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
| bsd-3-clause |
webmasterraj/GaSiProMo | flask/lib/python2.7/site-packages/docutils/readers/__init__.py | 170 | 3465 | # $Id: __init__.py 7648 2013-04-18 07:36:22Z milde $
# Authors: David Goodger <goodger@python.org>; Ueli Schlaepfer
# Copyright: This module has been placed in the public domain.
"""
This package contains Docutils Reader modules.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils import utils, parsers, Component
from docutils.transforms import universal
if sys.version_info < (2,5):
from docutils._compat import __import__
class Reader(Component):
"""
Abstract base class for docutils Readers.
Each reader module or package must export a subclass also called 'Reader'.
The two steps of a Reader's responsibility are `scan()` and
`parse()`. Call `read()` to process a document.
"""
component_type = 'reader'
config_section = 'readers'
def get_transforms(self):
return Component.get_transforms(self) + [
universal.Decorations,
universal.ExposeInternals,
universal.StripComments,]
def __init__(self, parser=None, parser_name=None):
"""
Initialize the Reader instance.
Several instance attributes are defined with dummy initial values.
Subclasses may use these attributes as they wish.
"""
self.parser = parser
"""A `parsers.Parser` instance shared by all doctrees. May be left
unspecified if the document source determines the parser."""
if parser is None and parser_name:
self.set_parser(parser_name)
self.source = None
"""`docutils.io` IO object, source of input data."""
self.input = None
"""Raw text input; either a single string or, for more complex cases,
a collection of strings."""
def set_parser(self, parser_name):
"""Set `self.parser` by name."""
parser_class = parsers.get_parser_class(parser_name)
self.parser = parser_class()
def read(self, source, parser, settings):
self.source = source
if not self.parser:
self.parser = parser
self.settings = settings
self.input = self.source.read()
self.parse()
return self.document
def parse(self):
"""Parse `self.input` into a document tree."""
self.document = document = self.new_document()
self.parser.parse(self.input, document)
document.current_source = document.current_line = None
def new_document(self):
"""Create and return a new empty document tree (root node)."""
document = utils.new_document(self.source.source_path, self.settings)
return document
class ReReader(Reader):
"""
A reader which rereads an existing document tree (e.g. a
deserializer).
Often used in conjunction with `writers.UnfilteredWriter`.
"""
def get_transforms(self):
# Do not add any transforms. They have already been applied
# by the reader which originally created the document.
return Component.get_transforms(self)
_reader_aliases = {}
def get_reader_class(reader_name):
"""Return the Reader class from the `reader_name` module."""
reader_name = reader_name.lower()
if reader_name in _reader_aliases:
reader_name = _reader_aliases[reader_name]
try:
module = __import__(reader_name, globals(), locals(), level=1)
except ImportError:
module = __import__(reader_name, globals(), locals(), level=0)
return module.Reader
| gpl-2.0 |
axinging/chromium-crosswalk | tools/perf/page_sets/tough_energy_cases.py | 35 | 4626 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry import story
class ToughEnergyCasesPage(page_module.Page):
def __init__(self, url, page_set):
super(ToughEnergyCasesPage, self).__init__(
url=url, page_set=page_set, credentials_path = 'data/credentials.json')
class CodePenPage(ToughEnergyCasesPage):
def __init__(self, url, page_set):
super(CodePenPage, self).__init__(url, page_set)
self.credentials = 'codepen'
class GooglePage(ToughEnergyCasesPage):
def __init__(self, url, page_set):
super(GooglePage, self).__init__(
url=url,
page_set=page_set)
self.credentials = 'google2'
def RunNavigateSteps(self, action_runner):
super(GooglePage, self).RunNavigateSteps(action_runner)
action_runner.WaitForJavaScriptCondition(
'window.gmonkey !== undefined &&'
'document.getElementById("gb") !== null')
class ToughEnergyCasesPageSet(story.StorySet):
"""Pages for measuring Chrome power draw."""
def __init__(self):
super(ToughEnergyCasesPageSet, self).__init__(
archive_data_file='data/tough_energy_cases.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
# TODO: this part of the test is disabled because it fails when
# run with replay data and not with live data. See crbug.com/465692
# for complete details.
# Why: productivity, top google properties
#self.AddStory(GooglePage('https://mail.google.com/mail/', self))
# Disabled: pegs CPU too much to get meaningful results.
# Why: Image constantly changed in the background, above the fold
# self.AddStory(CodePenPage(
# 'http://codepen.io/testificate364/debug/eIutG', self))
# Disabled: pegs CPU too much to get meaningful results.
# Why: Image constantly changed in the background, below the fold
# self.AddStory(CodePenPage(
# 'http://codepen.io/testificate364/debug/zcDdv', self))
# Why: CSS Animation, above the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/nrbDc', self))
# Why: CSS Animation, below the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/fhKCg', self))
# Why: requestAnimationFrame, above the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/paJhg',self))
# Why: requestAnimationFrame, below the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/yaosK', self))
# Why: setTimeout animation, above the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/DLbxg', self))
# Why: setTimeout animation, below the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/kFvpd', self))
# Why: setInterval animation, above the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/lEhyw', self))
# Why: setInterval animation, below the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/zhgBD', self))
# Why: Animated GIF, above the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/jetyn', self))
# Why: Animated GIF, below the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/Kvdxs', self))
# Why: HTML5 video, above the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/lJAiH', self))
# Why: HTML5 video, below the fold
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/EFceH', self))
# Disabled: pegs CPU too much to get meaningful results.
# Why: PostMessage between frames, above the fold
# self.AddStory(CodePenPage(
# 'http://codepen.io/testificate364/debug/pgBHu', self))
# Disabled: pegs CPU too much to get meaningful results.
# Why: Asynchronous XHR continually running
# self.AddStory(CodePenPage(
# 'http://codepen.io/testificate364/debug/iwAfJ', self))
# Disabled: pegs CPU too much to get meaningful results.
# Why: Web Worker continually running
# self.AddStory(CodePenPage(
# 'http://codepen.io/testificate364/debug/ckItK', self))
# Why: flash video
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/slBue', self))
# Why: Blank page in the foreground
self.AddStory(CodePenPage(
'http://codepen.io/testificate364/debug/HdIgr', self))
| bsd-3-clause |
Pexego/sale_commission | __unported__/production_costs/wizard/product_percent_struct_costs.py | 3 | 3498 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Auxiliary object to mate products with structural costs
"""
from osv import osv, fields
import time
import decimal_precision as dp
import calendar
from tools.translate import _
class product_percent_struct_costs(osv.osv_memory):
"""
Auxiliar object to associate percentually costs to products
"""
def onchange_product_id(self, cr, uid, ids, prev_fyear_id, prev_period_id, product_id):
"""
Gets total sales for this product, fiscal year and period
"""
res ={}
total_sales = 0.0
sales_facade = self.pool.get('sale.order')
fiscalyear = self.pool.get('account.fiscalyear').browse(cr, uid, prev_fyear_id)
if not prev_period_id:
from_date =fiscalyear.date_start
to_date = fiscalyear.date_stop
else:
period = self.pool.get('account.period').browse(cr, uid, prev_period_id)
from_date = period.date_start
to_date = period.date_stop
period_sales = sales_facade.browse(cr, uid, sales_facade.search(cr, uid, [('state','not in',['draft','cancel']),('date_order','<=', to_date), ('date_order','>=', from_date)]))
for sale in period_sales:
for line in sale.order_line:
if line.product_id:
if line.product_id.id == product_id:
total_sales += line.product_uom_qty
res['total_sales'] = total_sales
res['forecasted_sales'] = total_sales
return {'value': res}
def onchange_total_sales(self, cr, uid, ids, total_sales):
"""
Refresh forecasted values according total sales
"""
res ={}
if total_sales:
res['forecasted_sales'] = total_sales
else:
res['forecasted_sales'] = 0.0
return {'value': res}
_name = 'product.percent.struct.costs'
_description = 'Structural products cost'
_columns = {
'product_id': fields.many2one('product.product','Product',required=True),
'total_sales': fields.float('Sold Units', digits_compute=dp.get_precision('Account'), required=True),
'forecasted_sales': fields.float('Forecasted Sold Units', digits_compute=dp.get_precision('Account')),
'wizard_id': fields.many2one('structural.costs.impact.wizard','Wizard'),
}
_defaults = {
'wizard_id': lambda self, cr, uid, context: context.get('parent_id') and context['parent_id'] or False,
}
product_percent_struct_costs() | agpl-3.0 |
mtrdesign/pylogwatch | pylogwatch/logwlib.py | 1 | 5917 | # Python 2.5 compatibility
from __future__ import with_statement
# Python version
import sys
if sys.version_info < (2, 5):
raise "Required python 2.5 or greater"
import os, sqlite3, itertools, time
from datetime import datetime
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
proj_path = lambda x: os.path.abspath(os.path.join(PROJECT_DIR,x))
# Check if we are bundled together with raven, and add our dir to the pythonpath if we are
if os.path.exists(proj_path( 'raven')):
sys.path.append(PROJECT_DIR)
from raven import Client
def item_import(name):
d = name.rfind(".")
classname = name[d+1:]
m = __import__(name[:d], globals(), locals(), [classname])
return getattr(m, classname)
class PyLog (object):
def __init__ (self, filenames, dbname = 'logw.db', filetable = 'file_cursor', eventtable = 'events'):
self._filetable = filetable
self._eventtable = eventtable
self.conn = self.init_db(dbname)
self.curs = self.conn.cursor()
self.fnames = filenames
def init_db (self, dbname):
"""Set up the DB"""
conn = sqlite3.connect (dbname)
curs = conn.cursor()
sql = 'create table if not exists file_cursor (filename TEXT PRIMARY KEY, inode INTEGER, lastbyte INTEGER, updated INTEGER)'
curs.execute (sql)
sql = 'create table if not exists events (event TEXT PRIMARY KEY, args TEXT, updated INTEGER)'
curs.execute (sql)
conn.commit()
return conn
def readlines (self, f, lastpos = 0):
"""Read full lines from the file object f starting from lastpos"""
self.save_fileinfo (f.name, os.stat(f.name)[1], lastpos)
f.seek(lastpos)
result = []
for line in f:
# handle lines that are not yet finished (no \n)
curpos = f.tell()
if not line.endswith('\n'):
f.seek(curpos)
raise StopIteration
yield line
def get_fileinfo (self, fname):
self.curs.execute ('SELECT filename, inode, lastbyte from file_cursor where filename=?', [fname,])
result = self.curs.fetchone()
if result and len(result)==3:
f, inode, lastbyte = result
return inode,lastbyte
else:
return None,0
def save_fileinfo (self, fname, inode, lastbyte):
self.curs.execute ("REPLACE into file_cursor (filename, inode, lastbyte, updated) \
values (?,?,?,datetime())", [fname,inode, lastbyte ])
self.conn.commit()
return
def update_bytes (self,fname, lastbyte):
"""
Only updates the lastbyte property of a file, without touching the inode.
Meant for calling after each line is processed
"""
def save_fileinfo (self, fname, inode, lastbyte):
self.curs.execute ("UPDATE into file_cursor set lastbyte=? where filename=?",\
[fname,inode, lastbyte ])
self.conn.commit()
return
def process_lines (self, fname, lines):
"""Dummy line processor - should be overridden"""
raise NotImplementedError
def open_rotated_version(self, fname):
sufxs = ['.1','.1.gz','.0']
for sufx in sufxs:
newname = fname + sufx
if not os.path.exists (newname):
continue
try:
f = open(newname)
return f
except:
continue
def run (self):
for fn in self.fnames:
if not os.path.exists (fn):
continue
newlines = []
rotated = None
lastinode, lastbyte = self.get_fileinfo (fn)
if lastbyte and not lastinode == os.stat(fn)[1]:
# handle rotated files
rotated = self.open_rotated_version(fn)
if rotated:
newlines = self.readlines (rotated, lastbyte)
lastbyte = 0
self.process_lines (fn, rotated, newlines)
try:
f = open(fn)
except:
continue
self.process_lines (fn, f, self.readlines (f, lastbyte))
lastbyte = f.tell()
lastinode = os.stat(fn)[1]
f.close()
self.save_fileinfo (fn, lastinode, lastbyte)
if rotated:
rotated.close()
class PyLogConf (PyLog):
def __init__ (self, conf):
"""
Initialize object based on the provided configuration
"""
self.conf = conf
self.client = Client (conf.RAVEN['dsn'])
self.formatters = {}
for k,v in self.conf.FILE_FORMATTERS.iteritems():
if isinstance(v,str):
raise ValueError ('Please use a list or a tuple for the file formatters values')
self.formatters[k] = [item_import(i)() for i in v]
dbname = os.path.join(os.path.dirname(conf.__file__),'pylogwatch.db')
return super(PyLogConf, self).__init__ (self.conf.FILE_FORMATTERS.keys(), dbname = dbname)
def process_lines (self, fname, fileobject, lines):
"""Main workhorse. Called with the filename that is being logged and an iterable of lines"""
for line in lines:
paramdict = {}
data = {'event_type':'Message', 'message': line.replace('%','%%'), 'data' :{'logger':fname}}
for fobj in self.formatters[fname]:
fobj.format_line(line, data, paramdict)
if not data.pop('_do_not_send', False): # Skip lines that have the '_do_not_send' key
if paramdict:
data['params'] = tuple([paramdict[i] for i in sorted(paramdict.keys())])
if self.conf.DEBUG:
print data
self.client.capture(**data)
self.update_bytes(fname, fileobject.tell())
| gpl-3.0 |
dkao-cb/perfrunner | perfSanity/scripts/query_benchmark_beta.py | 2 | 16516 | import requests
import json
import re
import fileinput
from optparse import OptionParser
import subprocess
import os
import sys
import time
from perfrunner.settings import ClusterSpec
from perfrunner.utils.install import CouchbaseInstaller
from perfrunner.utils.cluster import TestConfig, ClusterManager
from logger import logger
import urllib3
from perfrunner.helpers.rest import RestHelper
import paramiko
#from couchbase.bucket import Bucket
#import couchbase
from couchbase import Couchbase
from couchbase.exceptions import CouchbaseError
"""
# An evolving thing - takes as input:
- a build version
- a spec file
What it does:
- install the spec file on the build version
- activate the beer sample bucket
- run the tests from Keshav -flag an error if they deviate
"""
UPPER_BOUND = 1.10
LOWER_BOUND = 0.90
ARGS = None
# global variables - yuck
version = None
runStartTime = None
couchbaseConnection = None
def get_time_in_millisec(t):
try:
time_unit = t[-2:]
if time_unit == 'ms':
return float(t[:-2])
elif time_unit == u"\u00b5s":
return float(t[:-2]) / 1000
elif 'm' in t and 'ms' not in t:
t1 = t.split('m')
return int(t1[0]) * 60000 + float(t1[1][:-1]) * 1000
elif time_unit[0].isdigit and time_unit[1] == 's':
return float(t[:-1]) * 1000
else:
print '********unknown time unit', t
except:
print 'bad time', t
def generate_query(stmt):
stmt['max_parallelism'] = 1
if ARGS:
stmt['args'] = ARGS
return stmt
def generate_prepared_query(conn, q):
query = {'statement': 'PREPARE ' + q, 'max_parallelism': 1}
response = conn.request('POST', '/query/service', fields=query, encode_multipart=False)
body = json.loads(response.data.decode('utf8'))
name = str(body['results'][0]['name'])
stmt = {'prepared': '"' + name + '"'}
return generate_query(stmt)
def run_query(conn, request_desc, debug=False):
succeeded = True
query = generate_prepared_query(conn, request_desc['query'])
total_elapsed_time = 0.0
total_execution_time = 0.0
for i in range(0, request_desc['execution_count']):
"""if debug:
#t0 = time.time()"""
response = conn.request('POST', '/query/service', fields=query, encode_multipart=False)
response.read(cache_content=False)
body = json.loads(response.data.decode('utf8'))
total_elapsed_time = total_elapsed_time + get_time_in_millisec(body['metrics']['elapsedTime'])
total_execution_time = total_execution_time + get_time_in_millisec(body['metrics']['executionTime'])
avg_elapsed = float('{0:.2f}'.format(total_elapsed_time / request_desc['execution_count']))
avg_execution = float('{0:.2f}'.format(total_execution_time / request_desc['execution_count']))
log = 'Query {0} - average elapsed {1}, average execution time {2}.'.format(request_desc['query'], avg_elapsed,
avg_execution)
if avg_elapsed > (UPPER_BOUND * request_desc['expected_elapsed_time']):
log += ' Elapsed too long - expected {0}.'.format(avg_elapsed)
succeeded = False
if avg_execution > (UPPER_BOUND * request_desc['expected_execution_time']):
log += ' Execution too long - expected {0}.'.format(avg_execution)
succeeded = False
if avg_elapsed < (LOWER_BOUND * request_desc['expected_elapsed_time']):
log += ' Elapsed too short - expected {0}.'.format(avg_elapsed)
succeeded = False
if avg_execution < (LOWER_BOUND * request_desc['expected_execution_time']):
log += ' Execution too short - expected {0}.'.format(avg_execution)
succeeded = False
if succeeded:
logger.info(log)
else:
logger.error(log)
return succeeded
def execute_commands(conn, command_list, rest, host_ip, testName):
global couchbaseConnection, version, runStartTime
failure_count = 0
for command in command_list:
# print 'command', command
command_succeeded = True
total_elapsed_time = 0.0
total_execution_time = 0.0
if 'index' in command:
key = 'index'
response = rest.exec_n1ql_stmnt(host_ip, command['index'])
body = response.json() # json.loads(response.data.decode('utf8'))
avg_elapsed = total_elapsed_time + get_time_in_millisec(body['metrics']['elapsedTime'])
avg_execution = total_execution_time + get_time_in_millisec(body['metrics']['executionTime'])
elif 'query' in command:
key = 'query'
query = generate_prepared_query(conn, command['query'])
for i in range(0, command['execution_count']):
response = conn.request('POST', '/query/service', fields=query, encode_multipart=False)
response.read(cache_content=False)
body = json.loads(response.data.decode('utf8'))
total_elapsed_time = total_elapsed_time + get_time_in_millisec(body['metrics']['elapsedTime'])
total_execution_time = total_execution_time + get_time_in_millisec(body['metrics']['executionTime'])
avg_elapsed = float('{0:.2f}'.format(total_elapsed_time / command['execution_count']))
avg_execution = float('{0:.2f}'.format(total_execution_time / command['execution_count']))
log = key + ' {0} - average elapsed {1}, average execution time {2}.'.format(command[key], avg_elapsed,
avg_execution)
if avg_elapsed > (UPPER_BOUND * command['expected_elapsed_time']):
log += ' Elapsed too long - expected {0}.'.format(command['expected_elapsed_time'])
command_succeeded = False
elif avg_elapsed < (LOWER_BOUND * command['expected_elapsed_time']):
log += ' Elapsed too short - expected {0}.'.format(command['expected_elapsed_time'])
command_succeeded = False
if avg_execution > (UPPER_BOUND * command['expected_execution_time']):
log += ' Execution too long - expected {0}.'.format(command['expected_execution_time'])
command_succeeded = False
elif avg_execution < (LOWER_BOUND * command['expected_execution_time']):
log += ' Execution too short - expected {0}.'.format(command['expected_execution_time'])
command_succeeded = False
if command_succeeded:
logger.info(log)
else:
failure_count = failure_count + 1
logger.error(log)
val = {
"actualValue": avg_execution,
"expectedValue": command['expected_execution_time'],
"build": version,
"runStartTime": runStartTime,
"pass": command_succeeded,
"testMetric": command['queryDesc'],
"testStartTime": time.strftime("%m/%d/%y-%H:%M:%S", time.strptime(time.ctime() )),
"testName": testName
}
key = runStartTime + '-' + testName +'-' + command['queryDesc']
couchbaseConnection.add(key, val)
return failure_count == 0
def do_beer_queries(conn, rest, host_ip, remote):
remote.install_beer_samples()
rest.exec_n1ql_stmnt(host_ip, 'CREATE INDEX city ON `beer-sample`(city);')
rest.exec_n1ql_stmnt(host_ip, 'CREATE INDEX style ON `beer-sample`(style);')
command_list = []
command_list.append(
{'queryDesc':'select *', 'query': 'SELECT * FROM `beer-sample` USE KEYS["21st_amendment_brewery_cafe-amendment_pale_ale"];',
'expected_elapsed_time': 0.71, 'expected_execution_time': 0.65, 'execution_count': 10000})
command_list.append({'queryDesc':'select * with where', 'query': 'select * from `beer-sample` where city = "Lawton";', 'expected_elapsed_time': 1.42,
'expected_execution_time': 1.42, 'execution_count': 10000})
command_list.append(
{'queryDesc':'select * where with or', 'query': 'select abv, brewery_id from `beer-sample` where style = "Imperial or Double India Pale Ale";',
'expected_elapsed_time': 11,
'expected_execution_time': 11, 'execution_count': 10000})
command_list.append(
{'queryDesc':'select count with where', 'query': 'select COUNT(*) from `beer-sample` where style = "Imperial or Double India Pale Ale";',
'expected_elapsed_time': 3.4, 'expected_execution_time': 3.4, 'execution_count': 10000})
command_list.append(
{'queryDesc':'select sum where', 'query': 'select SUM(abv) from `beer-sample` where style = "Imperial or Double India Pale Ale";',
'expected_elapsed_time': 11, 'expected_execution_time': 11, 'execution_count': 10000})
command_list.append({
'queryDesc':'select fields order by', 'query': 'select abv, brewery_id from `beer-sample` where style = "Imperial or Double India Pale Ale" order by abv;',
'expected_elapsed_time': 14, 'expected_execution_time': 14, 'execution_count': 10000})
return execute_commands(conn, command_list, rest, host_ip, 'beer-queries')
def do_airline_benchmarks(conn, rest, host_ip, remote, cluster_spec):
if True:
resp = rest.create_bucket(host_ip + ':8091', 'ods', 1000, 0, 0, 'valueOnly', 4, None)
time.sleep(10)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(host_ip, username=cluster_spec.ssh_credentials[0], password=cluster_spec.ssh_credentials[1])
except paramiko.SSHException:
print "ssh Connection Failed"
return False
cmd = '/opt/couchbase/bin/cbrestore /root/airline-test-data-updated couchbase://127.0.0.1:8091 -b ods -B ods -u {0} -p {1}'.format(
rest.rest_username, rest.rest_password)
stdin, stdout, stderr = ssh.exec_command(cmd)
for line in stdout.readlines():
pass
ssh.close()
command_list = []
command_list.append(
{'index': 'create primary index on ods;', 'expected_elapsed_time': 27000, 'expected_execution_time': 27000})
command_list.append(
{'index': 'CREATE INDEX IDX_ODS_TAIL_NBR ON ods(`TAIL_NBR`) WHERE (`TYPE` = "OPS_FLT_LEG") USING GSI;',
'expected_elapsed_time': 38000, 'expected_execution_time': 38000})
command_list.append(
{'queryDesc':'Q1', 'query': "SELECT * FROM ods WHERE TYPE = 'OPS_FLT_LEG' AND TAIL_NBR = 'N518LR' ORDER BY GMT_EST_DEP_DTM ;",
'expected_elapsed_time': 6.1, 'expected_execution_time': 6.1, 'execution_count': 10})
# query 2
big_long_query2 = """
select
pilot.FILEN as pilot_filen,
min([p.PRFL_ACT_GMT_DEP_DTM, meta(p).id]) PRFL_ACT_GMT_DEP_DTM
from
( SELECT x.*
FROM ods x
where x.TYPE="CREW_ON_FLIGHT" AND
(
x.PRFL_ACT_GMT_DEP_DTM <= "2015-07-23T18:49:00Z"
)
) as p unnest array_concat(p.PILOT,p.CREW) pilot
WHERE
pilot.FILEN in (
select raw pilot1.FILEN
from ods f use keys [ "UA_22-07-2015_EWR_IAD_6049" ]
unnest array_concat(f.PILOT,f.CREW) pilot1
)
group by pilot.FILEN
UNION ALL
select
pilot.FILEN as pilot_filen,
min([p.GMT_EST_DEP_DTM, meta(p).id]) GMT_EST_DEP_DTM
from
(
SELECT y.*
FROM ods y
where y.TYPE="CREW_ON_FLIGHT" AND
(
y.GMT_EST_DEP_DTM <= "2015-07-23T18:49:00Z"
)
) as p unnest array_concat(y.PILOT,y.CREW) pilot
where
pilot.FILEN in (
select raw pilot1.FILEN
from ods f use keys [ "UA_22-07-2015_EWR_IAD_6049" ]
unnest array_concat(f.PILOT,f.CREW) pilot1
)"""
command_list.append({
'index': 'CREATE INDEX IDX_GMT_EST_DEP_DTM ON ods(`GMT_EST_DEP_DTM`) WHERE (`TYPE`="CREW_ON_FLIGHT") USING GSI;',
'expected_elapsed_time': 38000, 'expected_execution_time': 38000})
command_list.append({
'index': 'CREATE INDEX IDX_PRFL_ACT_GMT_DEP_DTM ON ods(`PRFL_ACT_GMT_DEP_DTM`) WHERE (`TYPE`="CREW_ON_FLIGHT") USING GSI;',
'expected_elapsed_time': 41000, 'expected_execution_time': 41000})
command_list.append(
{'queryDesc':'Q2', 'query': big_long_query2, 'expected_elapsed_time': 536, 'expected_execution_time': 536, 'execution_count': 10})
# query 3
big_long_index3 = """
create index idx_query3 on ods(INBND_LCL_EST_ARR_DTM)
where TYPE="AIRCRAFT_ROUTING"
and substr(INBND_LCL_EST_ARR_DTM, 11) < "20:00:00"
and case when OUTBND_LCL_EST_DEP_DTM is missing then true else substr(OUTBND_LCL_EST_DEP_DTM, 11) > "08:00:00" end;
"""
big_long_query3 = """
SELECT INBND_DEST_ARPT_CD
from ods
where TYPE = "AIRCRAFT_ROUTING"
and INBND_LCL_EST_ARR_DTM > "2015-07-17"
and INBND_LCL_EST_ARR_DTM < "2015-07-25"
and substr(INBND_LCL_EST_ARR_DTM, 11) < "20:00:00"
and case when OUTBND_LCL_EST_DEP_DTM is missing then true else substr(OUTBND_LCL_EST_DEP_DTM, 11) > "08:00:00" end
order by INBND_DEST_ARPT_CD
limit 10;
"""
command_list.append({'index': big_long_index3, 'expected_elapsed_time': 64000, 'expected_execution_time': 64000})
command_list.append({'queryDesc':'Q3', 'query': big_long_query3, 'expected_elapsed_time': 2500, 'expected_execution_time': 2500,
'execution_count': 10})
return execute_commands(conn, command_list, rest, host_ip, 'United-Queries')
def main():
global couchbaseConnection, version, runStartTime
usage = '%prog -v version -c cluster-spec'
parser = OptionParser(usage)
parser.add_option('-v', '--version', dest='version')
parser.add_option('-c', dest='cluster_spec_fname',
help='path to cluster specification file',
metavar='cluster.spec')
parser.add_option('--verbose', dest='verbose', action='store_true',
help='enable verbose logging')
parser.add_option('-o', dest='toy',
help='optional toy build ID', metavar='couchstore')
parser.add_option('-t', dest='test_config_fname',
help='path to test configuration file',
metavar='my_test.test')
parser.add_option('-e', '--edition', dest='cluster_edition', default='enterprise',
help='the cluster edition (community or enterprise)')
parser.add_option('--url', dest='url', default=None,
help='The http URL to a Couchbase RPM that should be'
' installed. This overrides the URL to be installed.')
options, args = parser.parse_args()
cluster_spec = ClusterSpec()
cluster_spec.parse(options.cluster_spec_fname, args)
test_config = TestConfig()
test_config.parse(options.test_config_fname, args)
cm = ClusterManager(cluster_spec, test_config, options.verbose)
couchbaseConnection = Couchbase.connect(bucket='Daily-Performance', host='172.23.105.177')
runStartTime = time.strftime("%m/%d/%y-%H:%M:%S", time.strptime(time.ctime() ))
version = options.version
#bucket = Bucket('couchbase://'+ '172.23.105.177:8091/Daily-Performance')
installer = CouchbaseInstaller(cluster_spec, options)
if True:
installer.install()
if cm.remote:
cm.tune_logging()
cm.restart_with_sfwi()
cm.restart_with_alternative_num_vbuckets()
cm.restart_with_alternative_num_cpus()
cm.restart_with_tcmalloc_aggressive_decommit()
cm.disable_moxi()
cm.configure_internal_settings()
cm.set_data_path()
cm.set_services()
cm.set_mem_quota()
cm.set_index_mem_quota()
cm.set_auth()
time.sleep(30)
"""host = cluster_spec.yield_masters().next()"""
host_ip = cluster_spec.yield_masters().next().split(':')[0]
URL = 'http://' + host_ip + ':8093'
logger.info('logging the URL: {}'.format(URL))
conn = urllib3.connection_from_url(URL)
rest = RestHelper(cluster_spec)
airline_result = do_airline_benchmarks(conn, rest, host_ip, installer.remote, cluster_spec)
beer_result = do_beer_queries(conn, rest, host_ip, installer.remote)
print 'beer_result is', beer_result
sys.exit(not (airline_result and beer_result))
if __name__ == "__main__":
if not main():
sys.exit(1)
| apache-2.0 |
UManPychron/pychron | pychron/hardware/watlow/headless_ezzone.py | 2 | 1248 | # ===============================================================================
# Copyright 2016 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
from __future__ import absolute_import
from pychron.hardware.core.headless.core_device import HeadlessCoreDevice
from pychron.hardware.watlow.base_ezzone import BaseWatlowEZZone
class HeadlessWatlowEZZone(BaseWatlowEZZone, HeadlessCoreDevice):
pass
# ============= EOF =============================================
| apache-2.0 |
sekikn/incubator-airflow | tests/providers/google/cloud/operators/test_datastore_system.py | 10 | 1815 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pytest
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_DATASTORE_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
BUCKET = os.environ.get("GCP_DATASTORE_BUCKET", "datastore-system-test")
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_DATASTORE_KEY)
class GcpDatastoreSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_DATASTORE_KEY)
def setUp(self):
super().setUp()
self.create_gcs_bucket(BUCKET, location="europe-north1")
@provide_gcp_context(GCP_DATASTORE_KEY)
def tearDown(self):
self.delete_gcs_bucket(BUCKET)
super().tearDown()
@provide_gcp_context(GCP_DATASTORE_KEY)
def test_run_example_dag(self):
self.run_dag('example_gcp_datastore', CLOUD_DAG_FOLDER)
@provide_gcp_context(GCP_DATASTORE_KEY)
def test_run_example_dag_operations(self):
self.run_dag('example_gcp_datastore_operations', CLOUD_DAG_FOLDER)
| apache-2.0 |
markwal/OctoPrint | src/octoprint/slicing/__init__.py | 6 | 22876 | # coding=utf-8
"""
In this module the slicing support of OctoPrint is encapsulated.
.. autoclass:: SlicingProfile
:members:
.. autoclass:: TemporaryProfile
:members:
.. autoclass:: SlicingManager
:members:
"""
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import octoprint.plugin
import octoprint.events
from octoprint.settings import settings
from .exceptions import *
class SlicingProfile(object):
"""
A wrapper for slicing profiles, both meta data and actual profile data.
Arguments:
slicer (str): Identifier of the slicer this profile belongs to.
name (str): Identifier of this slicing profile.
data (object): Profile data, actual structure depends on individual slicer implementation.
display_name (str): Displayable name for this slicing profile.
description (str): Description of this slicing profile.
"""
def __init__(self, slicer, name, data, display_name=None, description=None):
self.slicer = slicer
self.name = name
self.data = data
self.display_name = display_name
self.description = description
class TemporaryProfile(object):
"""
A wrapper for a temporary slicing profile to be used for a slicing job, based on a :class:`SlicingProfile` with
optional ``overrides`` applied through the supplied ``save_profile`` method.
Usage example:
.. code-block:: python
temporary = TemporaryProfile(my_slicer.save_slicer_profile, my_default_profile,
overrides=my_overrides)
with (temporary) as profile_path:
my_slicer.do_slice(..., profile_path=profile_path, ...)
Arguments:
save_profile (callable): Method to use for saving the temporary profile, also responsible for applying the
supplied ``overrides``. This will be called according to the method signature of
:meth:`~octoprint.plugin.SlicerPlugin.save_slicer_profile`.
profile (SlicingProfile): The profile from which to derive the temporary profile.
overrides (dict): Optional overrides to apply to the ``profile`` for creation of the temporary profile.
"""
def __init__(self, save_profile, profile, overrides=None):
self.save_profile = save_profile
self.profile = profile
self.overrides = overrides
def __enter__(self):
import tempfile
temp_profile = tempfile.NamedTemporaryFile(prefix="slicing-profile-temp-", suffix=".profile", delete=False)
temp_profile.close()
self.temp_path = temp_profile.name
self.save_profile(self.temp_path, self.profile, overrides=self.overrides)
return self.temp_path
def __exit__(self, type, value, traceback):
import os
try:
os.remove(self.temp_path)
except:
pass
class SlicingManager(object):
"""
The :class:`SlicingManager` is responsible for managing available slicers and slicing profiles.
Arguments:
profile_path (str): Absolute path to the base folder where all slicing profiles are stored.
printer_profile_manager (~octoprint.printer.profile.PrinterProfileManager): :class:`~octoprint.printer.profile.PrinterProfileManager`
instance to use for accessing available printer profiles, most importantly the currently selected one.
"""
def __init__(self, profile_path, printer_profile_manager):
self._profile_path = profile_path
self._printer_profile_manager = printer_profile_manager
self._slicers = dict()
self._slicer_names = dict()
def initialize(self):
"""
Initializes the slicing manager by loading and initializing all available
:class:`~octoprint.plugin.SlicerPlugin` implementations.
"""
self.reload_slicers()
def reload_slicers(self):
"""
Retrieves all registered :class:`~octoprint.plugin.SlicerPlugin` implementations and registers them as
available slicers.
"""
plugins = octoprint.plugin.plugin_manager().get_implementations(octoprint.plugin.SlicerPlugin)
slicers = dict()
for plugin in plugins:
slicers[plugin.get_slicer_properties()["type"]] = plugin
self._slicers = slicers
@property
def slicing_enabled(self):
"""
Returns:
boolean: True if there is at least one configured slicer available, False otherwise.
"""
return len(self.configured_slicers) > 0
@property
def registered_slicers(self):
"""
Returns:
list of str: Identifiers of all available slicers.
"""
return self._slicers.keys()
@property
def configured_slicers(self):
"""
Returns:
list of str: Identifiers of all available configured slicers.
"""
return map(lambda slicer: slicer.get_slicer_properties()["type"], filter(lambda slicer: slicer.is_slicer_configured(), self._slicers.values()))
@property
def default_slicer(self):
"""
Retrieves the default slicer.
Returns:
str: The identifier of the default slicer or ``None`` if the default slicer is not registered in the
system.
"""
slicer_name = settings().get(["slicing", "defaultSlicer"])
if slicer_name in self.registered_slicers:
return slicer_name
else:
return None
def get_slicer(self, slicer, require_configured=True):
"""
Retrieves the slicer named ``slicer``. If ``require_configured`` is set to True (the default) an exception
will be raised if the slicer is not yet configured.
Arguments:
slicer (str): Identifier of the slicer to return
require_configured (boolean): Whether to raise an exception if the slicer has not been configured yet (True,
the default), or also return an unconfigured slicer (False).
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The ``slicer`` is unknown.
~octoprint.slicing.exceptions.SlicerNotConfigured: The ``slicer`` is not yet configured and ``require_configured`` was set to True.
"""
if not slicer in self._slicers:
raise UnknownSlicer(slicer)
if require_configured and not self._slicers[slicer].is_slicer_configured():
raise SlicerNotConfigured(slicer)
return self._slicers[slicer]
def slice(self, slicer_name, source_path, dest_path, profile_name, callback,
callback_args=None, callback_kwargs=None, overrides=None,
on_progress=None, on_progress_args=None, on_progress_kwargs=None, printer_profile_id=None, position=None):
"""
Slices ``source_path`` to ``dest_path`` using slicer ``slicer_name`` and slicing profile ``profile_name``.
Since slicing happens asynchronously, ``callback`` will be called when slicing has finished (either successfully
or not), with ``callback_args`` and ``callback_kwargs`` supplied.
If ``callback_args`` is left out, an empty argument list will be assumed for the callback. If ``callback_kwargs``
is left out, likewise an empty keyword argument list will be assumed for the callback. Note that in any case
the callback *must* support being called with the following optional keyword arguments:
_analysis
If the slicer returned analysis data of the created machine code as part of its slicing result, this keyword
argument will contain that data.
_error
If there was an error while slicing this keyword argument will contain the error message as returned from
the slicer.
_cancelled
If the slicing job was cancelled this keyword argument will be set to True.
Additionally callees may specify ``overrides`` for the specified slicing profile, e.g. a different extrusion
temperature than defined in the profile or a different layer height.
With ``on_progress``, ``on_progress_args`` and ``on_progress_kwargs``, callees may specify a callback plus
arguments and keyword arguments to call upon progress reports from the slicing job. The progress callback will
be called with a keyword argument ``_progress`` containing the current slicing progress as a value between 0
and 1 plus all additionally specified args and kwargs.
If a different printer profile than the currently selected one is to be used for slicing, its id can be provided
via the keyword argument ``printer_profile_id``.
If the ``source_path`` is to be a sliced at a different position than the print bed center, this ``position`` can
be supplied as a dictionary defining the ``x`` and ``y`` coordinate in print bed coordinates of the model's center.
Arguments:
slicer_name (str): The identifier of the slicer to use for slicing.
source_path (str): The absolute path to the source file to slice.
dest_path (str): The absolute path to the destination file to slice to.
profile_name (str): The name of the slicing profile to use.
callback (callable): A callback to call after slicing has finished.
callback_args (list or tuple): Arguments of the callback to call after slicing has finished. Defaults to
an empty list.
callback_kwargs (dict): Keyword arguments for the callback to call after slicing has finished, will be
extended by ``_analysis``, ``_error`` or ``_cancelled`` as described above! Defaults to an empty
dictionary.
overrides (dict): Overrides for the printer profile to apply.
on_progress (callable): Callback to call upon slicing progress.
on_progress_args (list or tuple): Arguments of the progress callback. Defaults to an empty list.
on_progress_kwargs (dict): Keyword arguments of the progress callback, will be extended by ``_progress``
as described above! Defaults to an empty dictionary.
printer_profile_id (str): Identifier of the printer profile for which to slice, if another than the
one currently selected is to be used.
position (dict): Dictionary containing the ``x`` and ``y`` coordinate in the print bed's coordinate system
of the sliced model's center. If not provided the model will be positioned at the print bed's center.
Example: ``dict(x=10,y=20)``.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer specified via ``slicer_name`` is unknown.
~octoprint.slicing.exceptions.SlicerNotConfigured: The slice specified via ``slicer_name`` is not configured yet.
"""
if callback_args is None:
callback_args = ()
if callback_kwargs is None:
callback_kwargs = dict()
if not slicer_name in self.configured_slicers:
if not slicer_name in self.registered_slicers:
error = "No such slicer: {slicer_name}".format(**locals())
exc = UnknownSlicer(slicer_name)
else:
error = "Slicer not configured: {slicer_name}".format(**locals())
exc = SlicerNotConfigured(slicer_name)
callback_kwargs.update(dict(_error=error, _exc=exc))
callback(*callback_args, **callback_kwargs)
raise exc
slicer = self.get_slicer(slicer_name)
printer_profile = None
if printer_profile_id is not None:
printer_profile = self._printer_profile_manager.get(printer_profile_id)
if printer_profile is None:
printer_profile = self._printer_profile_manager.get_current_or_default()
def slicer_worker(slicer, model_path, machinecode_path, profile_name, overrides, printer_profile, position, callback, callback_args, callback_kwargs):
try:
slicer_name = slicer.get_slicer_properties()["type"]
with self._temporary_profile(slicer_name, name=profile_name, overrides=overrides) as profile_path:
ok, result = slicer.do_slice(
model_path,
printer_profile,
machinecode_path=machinecode_path,
profile_path=profile_path,
position=position,
on_progress=on_progress,
on_progress_args=on_progress_args,
on_progress_kwargs=on_progress_kwargs
)
if not ok:
callback_kwargs.update(dict(_error=result))
elif result is not None and isinstance(result, dict) and "analysis" in result:
callback_kwargs.update(dict(_analysis=result["analysis"]))
except SlicingCancelled:
callback_kwargs.update(dict(_cancelled=True))
finally:
callback(*callback_args, **callback_kwargs)
import threading
slicer_worker_thread = threading.Thread(target=slicer_worker,
args=(slicer, source_path, dest_path, profile_name, overrides, printer_profile, position, callback, callback_args, callback_kwargs))
slicer_worker_thread.daemon = True
slicer_worker_thread.start()
def cancel_slicing(self, slicer_name, source_path, dest_path):
"""
Cancels the slicing job on slicer ``slicer_name`` from ``source_path`` to ``dest_path``.
Arguments:
slicer_name (str): Identifier of the slicer on which to cancel the job.
source_path (str): The absolute path to the source file being sliced.
dest_path (str): The absolute path to the destination file being sliced to.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer specified via ``slicer_name`` is unknown.
"""
slicer = self.get_slicer(slicer_name)
slicer.cancel_slicing(dest_path)
def load_profile(self, slicer, name, require_configured=True):
"""
Loads the slicing profile for ``slicer`` with the given profile ``name`` and returns it. If it can't be loaded
due to an :class:`IOError` ``None`` will be returned instead.
If ``require_configured`` is True (the default) a :class:`SlicerNotConfigured` exception will be raised
if the indicated ``slicer`` has not yet been configured.
Returns:
SlicingProfile: The requested slicing profile or None if it could not be loaded.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer specified via ``slicer`` is unknown.
~octoprint.slicing.exceptions.SlicerNotConfigured: The slicer specified via ``slicer`` has not yet been configured and
``require_configured`` was True.
~octoprint.slicing.exceptions.UnknownProfile: The profile for slicer ``slicer`` named ``name`` does not exist.
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
try:
path = self.get_profile_path(slicer, name, must_exist=True)
except IOError:
return None
return self._load_profile_from_path(slicer, path, require_configured=require_configured)
def save_profile(self, slicer, name, profile, overrides=None, allow_overwrite=True, display_name=None, description=None):
"""
Saves the slicer profile ``profile`` for slicer ``slicer`` under name ``name``.
``profile`` may be either a :class:`SlicingProfile` or a :class:`dict`.
If it's a :class:`SlicingProfile`, its :attr:`~SlicingProfile.slicer``, :attr:`~SlicingProfile.name` and - if
provided - :attr:`~SlicingProfile.display_name` and :attr:`~SlicingProfile.description` attributes will be
overwritten with the supplied values.
If it's a :class:`dict`, a new :class:`SlicingProfile` instance will be created with the supplied meta data and
the profile data as the :attr:`~SlicingProfile.data` attribute.
Arguments:
slicer (str): Identifier of the slicer for which to save the ``profile``.
name (str): Identifier under which to save the ``profile``.
profile (SlicingProfile or dict): The :class:`SlicingProfile` or a :class:`dict` containing the profile
data of the profile the save.
overrides (dict): Overrides to apply to the ``profile`` before saving it.
allow_overwrite (boolean): If True (default) if a profile for the same ``slicer`` of the same ``name``
already exists, it will be overwritten. Otherwise an exception will be thrown.
display_name (str): The name to display to the user for the profile.
description (str): A description of the profile.
Returns:
SlicingProfile: The saved profile (including the applied overrides).
Raises:
ValueError: The supplied ``profile`` is neither a :class:`SlicingProfile` nor a :class:`dict`.
~octoprint.slicing.exceptions.UnknownSlicer: The slicer ``slicer`` is unknown.
~octoprint.slicing.exceptions.ProfileAlreadyExists: A profile with name ``name`` already exists for ``slicer`` and ``allow_overwrite`` is
False.
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
if not isinstance(profile, SlicingProfile):
if isinstance(profile, dict):
profile = SlicingProfile(slicer, name, profile, display_name=display_name, description=description)
else:
raise ValueError("profile must be a SlicingProfile or a dict")
else:
profile.slicer = slicer
profile.name = name
if display_name is not None:
profile.display_name = display_name
if description is not None:
profile.description = description
path = self.get_profile_path(slicer, name)
self._save_profile_to_path(slicer, path, profile, overrides=overrides, allow_overwrite=allow_overwrite)
return profile
def _temporary_profile(self, slicer, name=None, overrides=None):
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
profile = self._get_default_profile(slicer)
if name:
try:
profile = self.load_profile(slicer, name)
except (UnknownProfile, IOError):
# in that case we'll use the default profile
pass
return TemporaryProfile(self.get_slicer(slicer).save_slicer_profile, profile, overrides=overrides)
def delete_profile(self, slicer, name):
"""
Deletes the profile ``name`` for the specified ``slicer``.
If the profile does not exist, nothing will happen.
Arguments:
slicer (str): Identifier of the slicer for which to delete the profile.
name (str): Identifier of the profile to delete.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer ``slicer`` is unknown.
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
if not name:
raise ValueError("name must be set")
try:
path = self.get_profile_path(slicer, name, must_exist=True)
except UnknownProfile:
return
os.remove(path)
def all_profiles(self, slicer, require_configured=False):
"""
Retrieves all profiles for slicer ``slicer``.
If ``require_configured`` is set to True (default is False), only will return the profiles if the ``slicer``
is already configured, otherwise a :class:`SlicerNotConfigured` exception will be raised.
Arguments:
slicer (str): Identifier of the slicer for which to retrieve all slicer profiles
require_configured (boolean): Whether to require the slicer ``slicer`` to be already configured (True)
or not (False, default). If False and the slicer is not yet configured, a :class:`~octoprint.slicing.exceptions.SlicerNotConfigured`
exception will be raised.
Returns:
list of SlicingProfile: A list of all :class:`SlicingProfile` instances available for the slicer ``slicer``.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer ``slicer`` is unknown.
~octoprint.slicing.exceptions.SlicerNotConfigured: The slicer ``slicer`` is not configured and ``require_configured`` was True.
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
if require_configured and not slicer in self.configured_slicers:
raise SlicerNotConfigured(slicer)
profiles = dict()
slicer_profile_path = self.get_slicer_profile_path(slicer)
for entry in os.listdir(slicer_profile_path):
if not entry.endswith(".profile") or entry.startswith("."):
# we are only interested in profiles and no hidden files
continue
path = os.path.join(slicer_profile_path, entry)
profile_name = entry[:-len(".profile")]
profiles[profile_name] = self._load_profile_from_path(slicer, path, require_configured=require_configured)
return profiles
def get_slicer_profile_path(self, slicer):
"""
Retrieves the path where the profiles for slicer ``slicer`` are stored.
Arguments:
slicer (str): Identifier of the slicer for which to retrieve the path.
Returns:
str: The absolute path to the folder where the slicer's profiles are stored.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer ``slicer`` is unknown.
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
path = os.path.join(self._profile_path, slicer)
if not os.path.exists(path):
os.makedirs(path)
return path
def get_profile_path(self, slicer, name, must_exist=False):
"""
Retrieves the path to the profile named ``name`` for slicer ``slicer``.
If ``must_exist`` is set to True (defaults to False) a :class:`UnknownProfile` exception will be raised if the
profile doesn't exist yet.
Arguments:
slicer (str): Identifier of the slicer to which the profile belongs to.
name (str): Identifier of the profile for which to retrieve the path.
must_exist (boolean): Whether the path must exist (True) or not (False, default).
Returns:
str: The absolute path to the profile identified by ``name`` for slicer ``slicer``.
Raises:
~octoprint.slicing.exceptions.UnknownSlicer: The slicer ``slicer`` is unknown.
~octoprint.slicing.exceptions.UnknownProfile: The profile named ``name`` doesn't exist and ``must_exist`` was True.
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
if not name:
raise ValueError("name must be set")
name = self._sanitize(name)
path = os.path.join(self.get_slicer_profile_path(slicer), "{name}.profile".format(name=name))
if not os.path.realpath(path).startswith(self._profile_path):
raise IOError("Path to profile {name} tried to break out of allows sub path".format(**locals()))
if must_exist and not (os.path.exists(path) and os.path.isfile(path)):
raise UnknownProfile(slicer, name)
return path
def _sanitize(self, name):
if name is None:
return None
if "/" in name or "\\" in name:
raise ValueError("name must not contain / or \\")
import string
valid_chars = "-_.() {ascii}{digits}".format(ascii=string.ascii_letters, digits=string.digits)
sanitized_name = ''.join(c for c in name if c in valid_chars)
sanitized_name = sanitized_name.replace(" ", "_")
return sanitized_name
def _load_profile_from_path(self, slicer, path, require_configured=False):
return self.get_slicer(slicer, require_configured=require_configured).get_slicer_profile(path)
def _save_profile_to_path(self, slicer, path, profile, allow_overwrite=True, overrides=None, require_configured=False):
self.get_slicer(slicer, require_configured=require_configured).save_slicer_profile(path, profile, allow_overwrite=allow_overwrite, overrides=overrides)
def _get_default_profile(self, slicer):
default_profiles = settings().get(["slicing", "defaultProfiles"])
if default_profiles and slicer in default_profiles:
try:
return self.load_profile(slicer, default_profiles[slicer])
except (UnknownProfile, IOError):
# in that case we'll use the slicers predefined default profile
pass
return self.get_slicer(slicer).get_slicer_default_profile()
| agpl-3.0 |
wzod/fsf | fsf-server/conf/disposition.py | 3 | 3787 | #!/usr/bin/env python
#
# This is the Python 'module' that contains the
# disposition criteria for Yara and jq filters the scanner framework
# will work on. Each member is the name of a
# high fidelity detection.
#
# default - Modules that are always run on a returned buffer value
# triggers - List of tuples that are configured to drive the flow of execution
# as the file itself it scanned recursively. They consist of Yara rule names
# that (if evaluated to true) may then run zero, one or more modules and optionally
# set the alert flag.
# post_processor - List of tuples that are configured to capture observations
# concerning the JSON output. These consist of jq filters that ultimately produce
# a boolean value dictating if a given condition is true. If 'true' then the
# observation is captured and the alert flag is optionally set.
default = ['META_BASIC_INFO',
'EXTRACT_EMBEDDED',
'SCAN_YARA']
# STRUCTURE: List of tuples such that...
# Types: [('string', 'list', boolean'), ...]
# Variables: [('rule name', ['module_1' , 'module_2'] , 'alert_flag'), ...]
triggers = [('ft_zip', ['EXTRACT_ZIP'], False),
('ft_exe', ['META_PE'], False),
('ft_rar', ['EXTRACT_RAR'], False),
('ft_ole_cf', ['META_OLECF', 'EXTRACT_VBA_MACRO'], False),
('ft_pdf', ['META_PDF'], False),
('misc_ooxml_core_properties', ['META_OOXML'], False),
('ft_swf', ['EXTRACT_SWF'], False),
('misc_upx_packed_binary', ['EXTRACT_UPX'], False),
('ft_rtf', ['EXTRACT_RTF_OBJ'], False),
('ft_tar', ['EXTRACT_TAR'], False),
('ft_gzip', ['EXTRACT_GZIP'], False),
('misc_pe_signature', ['META_PE_SIGNATURE'], False),
('ft_cab', ['EXTRACT_CAB'], False),
('ft_elf', ['META_ELF'], False),
('ft_java_class', ['META_JAVA_CLASS'], False),
('misc_hexascii_pe_in_html', ['EXTRACT_HEXASCII_PE'], False),
('misc_no_dosmode_header', '', False),
('ft_macho', ['META_MACHO'], False),
]
# STRUCTURE: List of tuples such that...
# Types: [('string', 'string', boolean'), ...]
# Variables: [('jq script', 'observation' , 'alert_flag'), ...]
post_processor = [('one_module.jq', 'Only one kind of module was run on for this report.', False),
('no_yara_hits.jq', 'There doesn\'t appear to be any Yara signature hits for this scan.', False),
('exe_in_zip.jq', 'An executable was found inside a ZIP file.', False),
('embedded_sfx_rar_w_exe.jq', 'An embedded file contained a self-extracting RAR that itself contained an executable payload.', False),
('many_objects.jq', 'More than 10 unique objects were observed in this file.', False),
('vt_match_found.jq', 'At least one file was found to have results in VirusTotal\'s database.', False),
('vt_match_not_found.jq', 'There were no matches found when VirusTotal was queried.', False),
('macro_gt_five_suspicious.jq', 'A macro was found with more than five suspicious traits.', False),
('vt_broadbased_detections_found.jq', 'Some AV products have detected this as a PUP threat.', False),
('vt_exploit_detections_found.jq', 'Some AV products have detected this as an exploit.', False),
('more_than_ten_yara.jq', 'More than 10 unique Yara signatures fired when processing this file!', False),
('fresh_vt_scan.jq', 'One of the VirusTotal results contains an object that was scanned less than 24 hours ago.', False),
('pe_recently_compiled.jq', 'An executable has a compile time less than a week old.', False),
]
| apache-2.0 |
dongguangming/django-oscar | src/oscar/apps/dashboard/vouchers/app.py | 49 | 1247 | from django.conf.urls import url
from oscar.core.application import Application
from oscar.core.loading import get_class
class VoucherDashboardApplication(Application):
name = None
default_permissions = ['is_staff', ]
list_view = get_class('dashboard.vouchers.views', 'VoucherListView')
create_view = get_class('dashboard.vouchers.views', 'VoucherCreateView')
update_view = get_class('dashboard.vouchers.views', 'VoucherUpdateView')
delete_view = get_class('dashboard.vouchers.views', 'VoucherDeleteView')
stats_view = get_class('dashboard.vouchers.views', 'VoucherStatsView')
def get_urls(self):
urls = [
url(r'^$', self.list_view.as_view(), name='voucher-list'),
url(r'^create/$', self.create_view.as_view(),
name='voucher-create'),
url(r'^update/(?P<pk>\d+)/$', self.update_view.as_view(),
name='voucher-update'),
url(r'^delete/(?P<pk>\d+)/$', self.delete_view.as_view(),
name='voucher-delete'),
url(r'^stats/(?P<pk>\d+)/$', self.stats_view.as_view(),
name='voucher-stats'),
]
return self.post_process_urls(urls)
application = VoucherDashboardApplication()
| bsd-3-clause |
googlemaps/google-maps-services-python | googlemaps/convert.py | 1 | 10197 | #
# Copyright 2014 Google Inc. All rights reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Converts Python types to string representations suitable for Maps API server.
For example:
sydney = {
"lat" : -33.8674869,
"lng" : 151.2069902
}
convert.latlng(sydney)
# '-33.8674869,151.2069902'
"""
def format_float(arg):
"""Formats a float value to be as short as possible.
Truncates float to 8 decimal places and trims extraneous
trailing zeros and period to give API args the best
possible chance of fitting within 2000 char URL length
restrictions.
For example:
format_float(40) -> "40"
format_float(40.0) -> "40"
format_float(40.1) -> "40.1"
format_float(40.001) -> "40.001"
format_float(40.0010) -> "40.001"
format_float(40.000000001) -> "40"
format_float(40.000000009) -> "40.00000001"
:param arg: The lat or lng float.
:type arg: float
:rtype: string
"""
return ("%.8f" % float(arg)).rstrip("0").rstrip(".")
def latlng(arg):
"""Converts a lat/lon pair to a comma-separated string.
For example:
sydney = {
"lat" : -33.8674869,
"lng" : 151.2069902
}
convert.latlng(sydney)
# '-33.8674869,151.2069902'
For convenience, also accepts lat/lon pair as a string, in
which case it's returned unchanged.
:param arg: The lat/lon pair.
:type arg: string or dict or list or tuple
"""
if is_string(arg):
return arg
normalized = normalize_lat_lng(arg)
return "%s,%s" % (format_float(normalized[0]), format_float(normalized[1]))
def normalize_lat_lng(arg):
"""Take the various lat/lng representations and return a tuple.
Accepts various representations:
1) dict with two entries - "lat" and "lng"
2) list or tuple - e.g. (-33, 151) or [-33, 151]
:param arg: The lat/lng pair.
:type arg: dict or list or tuple
:rtype: tuple (lat, lng)
"""
if isinstance(arg, dict):
if "lat" in arg and "lng" in arg:
return arg["lat"], arg["lng"]
if "latitude" in arg and "longitude" in arg:
return arg["latitude"], arg["longitude"]
# List or tuple.
if _is_list(arg):
return arg[0], arg[1]
raise TypeError(
"Expected a lat/lng dict or tuple, "
"but got %s" % type(arg).__name__)
def location_list(arg):
"""Joins a list of locations into a pipe separated string, handling
the various formats supported for lat/lng values.
For example:
p = [{"lat" : -33.867486, "lng" : 151.206990}, "Sydney"]
convert.waypoint(p)
# '-33.867486,151.206990|Sydney'
:param arg: The lat/lng list.
:type arg: list
:rtype: string
"""
if isinstance(arg, tuple):
# Handle the single-tuple lat/lng case.
return latlng(arg)
else:
return "|".join([latlng(location) for location in as_list(arg)])
def join_list(sep, arg):
"""If arg is list-like, then joins it with sep.
:param sep: Separator string.
:type sep: string
:param arg: Value to coerce into a list.
:type arg: string or list of strings
:rtype: string
"""
return sep.join(as_list(arg))
def as_list(arg):
"""Coerces arg into a list. If arg is already list-like, returns arg.
Otherwise, returns a one-element list containing arg.
:rtype: list
"""
if _is_list(arg):
return arg
return [arg]
def _is_list(arg):
"""Checks if arg is list-like. This excludes strings and dicts."""
if isinstance(arg, dict):
return False
if isinstance(arg, str): # Python 3-only, as str has __iter__
return False
return _has_method(arg, "__getitem__") if not _has_method(arg, "strip") else _has_method(arg, "__iter__")
def is_string(val):
"""Determines whether the passed value is a string, safe for 2/3."""
try:
basestring
except NameError:
return isinstance(val, str)
return isinstance(val, basestring)
def time(arg):
"""Converts the value into a unix time (seconds since unix epoch).
For example:
convert.time(datetime.now())
# '1409810596'
:param arg: The time.
:type arg: datetime.datetime or int
"""
# handle datetime instances.
if _has_method(arg, "timestamp"):
arg = arg.timestamp()
if isinstance(arg, float):
arg = int(arg)
return str(arg)
def _has_method(arg, method):
"""Returns true if the given object has a method with the given name.
:param arg: the object
:param method: the method name
:type method: string
:rtype: bool
"""
return hasattr(arg, method) and callable(getattr(arg, method))
def components(arg):
"""Converts a dict of components to the format expected by the Google Maps
server.
For example:
c = {"country": "US", "postal_code": "94043"}
convert.components(c)
# 'country:US|postal_code:94043'
:param arg: The component filter.
:type arg: dict
:rtype: basestring
"""
# Components may have multiple values per type, here we
# expand them into individual key/value items, eg:
# {"country": ["US", "AU"], "foo": 1} -> "country:AU", "country:US", "foo:1"
def expand(arg):
for k, v in arg.items():
for item in as_list(v):
yield "%s:%s" % (k, item)
if isinstance(arg, dict):
return "|".join(sorted(expand(arg)))
raise TypeError(
"Expected a dict for components, "
"but got %s" % type(arg).__name__)
def bounds(arg):
"""Converts a lat/lon bounds to a comma- and pipe-separated string.
Accepts two representations:
1) string: pipe-separated pair of comma-separated lat/lon pairs.
2) dict with two entries - "southwest" and "northeast". See convert.latlng
for information on how these can be represented.
For example:
sydney_bounds = {
"northeast" : {
"lat" : -33.4245981,
"lng" : 151.3426361
},
"southwest" : {
"lat" : -34.1692489,
"lng" : 150.502229
}
}
convert.bounds(sydney_bounds)
# '-34.169249,150.502229|-33.424598,151.342636'
:param arg: The bounds.
:type arg: dict
"""
if is_string(arg) and arg.count("|") == 1 and arg.count(",") == 2:
return arg
elif isinstance(arg, dict):
if "southwest" in arg and "northeast" in arg:
return "%s|%s" % (latlng(arg["southwest"]),
latlng(arg["northeast"]))
raise TypeError(
"Expected a bounds (southwest/northeast) dict, "
"but got %s" % type(arg).__name__)
def size(arg):
if isinstance(arg, int):
return "%sx%s" % (arg, arg)
elif _is_list(arg):
return "%sx%s" % (arg[0], arg[1])
raise TypeError(
"Expected a size int or list, "
"but got %s" % type(arg).__name__)
def decode_polyline(polyline):
"""Decodes a Polyline string into a list of lat/lng dicts.
See the developer docs for a detailed description of this encoding:
https://developers.google.com/maps/documentation/utilities/polylinealgorithm
:param polyline: An encoded polyline
:type polyline: string
:rtype: list of dicts with lat/lng keys
"""
points = []
index = lat = lng = 0
while index < len(polyline):
result = 1
shift = 0
while True:
b = ord(polyline[index]) - 63 - 1
index += 1
result += b << shift
shift += 5
if b < 0x1f:
break
lat += (~result >> 1) if (result & 1) != 0 else (result >> 1)
result = 1
shift = 0
while True:
b = ord(polyline[index]) - 63 - 1
index += 1
result += b << shift
shift += 5
if b < 0x1f:
break
lng += ~(result >> 1) if (result & 1) != 0 else (result >> 1)
points.append({"lat": lat * 1e-5, "lng": lng * 1e-5})
return points
def encode_polyline(points):
"""Encodes a list of points into a polyline string.
See the developer docs for a detailed description of this encoding:
https://developers.google.com/maps/documentation/utilities/polylinealgorithm
:param points: a list of lat/lng pairs
:type points: list of dicts or tuples
:rtype: string
"""
last_lat = last_lng = 0
result = ""
for point in points:
ll = normalize_lat_lng(point)
lat = int(round(ll[0] * 1e5))
lng = int(round(ll[1] * 1e5))
d_lat = lat - last_lat
d_lng = lng - last_lng
for v in [d_lat, d_lng]:
v = ~(v << 1) if v < 0 else v << 1
while v >= 0x20:
result += (chr((0x20 | (v & 0x1f)) + 63))
v >>= 5
result += (chr(v + 63))
last_lat = lat
last_lng = lng
return result
def shortest_path(locations):
"""Returns the shortest representation of the given locations.
The Elevations API limits requests to 2000 characters, and accepts
multiple locations either as pipe-delimited lat/lng values, or
an encoded polyline, so we determine which is shortest and use it.
:param locations: The lat/lng list.
:type locations: list
:rtype: string
"""
if isinstance(locations, tuple):
# Handle the single-tuple lat/lng case.
locations = [locations]
encoded = "enc:%s" % encode_polyline(locations)
unencoded = location_list(locations)
if len(encoded) < len(unencoded):
return encoded
else:
return unencoded
| apache-2.0 |
eleonrk/SickRage | lib/sqlalchemy/dialects/postgresql/ext.py | 8 | 6860 | # postgresql/ext.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from ...sql import expression
from ...sql import elements
from ...sql import functions
from ...sql.schema import ColumnCollectionConstraint
from .array import ARRAY
class aggregate_order_by(expression.ColumnElement):
"""Represent a PostgreSQL aggregate order by expression.
E.g.::
from sqlalchemy.dialects.postgresql import aggregate_order_by
expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
stmt = select([expr])
would represent the expression::
SELECT array_agg(a ORDER BY b DESC) FROM table;
Similarly::
expr = func.string_agg(
table.c.a,
aggregate_order_by(literal_column("','"), table.c.a)
)
stmt = select([expr])
Would represent::
SELECT string_agg(a, ',' ORDER BY a) FROM table;
.. versionadded:: 1.1
.. seealso::
:class:`.array_agg`
"""
__visit_name__ = 'aggregate_order_by'
def __init__(self, target, order_by):
self.target = elements._literal_as_binds(target)
self.order_by = elements._literal_as_binds(order_by)
def self_group(self, against=None):
return self
def get_children(self, **kwargs):
return self.target, self.order_by
def _copy_internals(self, clone=elements._clone, **kw):
self.target = clone(self.target, **kw)
self.order_by = clone(self.order_by, **kw)
@property
def _from_objects(self):
return self.target._from_objects + self.order_by._from_objects
class ExcludeConstraint(ColumnCollectionConstraint):
"""A table-level EXCLUDE constraint.
Defines an EXCLUDE constraint as described in the `postgres
documentation`__.
__ http://www.postgresql.org/docs/9.0/\
static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
"""
__visit_name__ = 'exclude_constraint'
where = None
def __init__(self, *elements, **kw):
r"""
Create an :class:`.ExcludeConstraint` object.
E.g.::
const = ExcludeConstraint(
(Column('period'), '&&'),
(Column('group'), '='),
where=(Column('group') != 'some group')
)
The constraint is normally embedded into the :class:`.Table` construct
directly, or added later using :meth:`.append_constraint`::
some_table = Table(
'some_table', metadata,
Column('id', Integer, primary_key=True),
Column('period', TSRANGE()),
Column('group', String)
)
some_table.append_constraint(
ExcludeConstraint(
(some_table.c.period, '&&'),
(some_table.c.group, '='),
where=some_table.c.group != 'some group',
name='some_table_excl_const'
)
)
:param \*elements:
A sequence of two tuples of the form ``(column, operator)`` where
"column" is a SQL expression element or a raw SQL string, most
typically a :class:`.Column` object,
and "operator" is a string containing the operator to use.
.. note::
A plain string passed for the value of "column" is interpreted
as an arbitrary SQL expression; when passing a plain string,
any necessary quoting and escaping syntaxes must be applied
manually. In order to specify a column name when a
:class:`.Column` object is not available, while ensuring that
any necessary quoting rules take effect, an ad-hoc
:class:`.Column` or :func:`.sql.expression.column` object may
be used.
:param name:
Optional, the in-database name of this constraint.
:param deferrable:
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
issuing DDL for this constraint.
:param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
:param using:
Optional string. If set, emit USING <index_method> when issuing DDL
for this constraint. Defaults to 'gist'.
:param where:
Optional SQL expression construct or literal SQL string.
If set, emit WHERE <predicate> when issuing DDL
for this constraint.
.. note::
A plain string passed here is interpreted as an arbitrary SQL
expression; when passing a plain string, any necessary quoting
and escaping syntaxes must be applied manually.
"""
columns = []
render_exprs = []
self.operators = {}
expressions, operators = zip(*elements)
for (expr, column, strname, add_element), operator in zip(
self._extract_col_expression_collection(expressions),
operators
):
if add_element is not None:
columns.append(add_element)
name = column.name if column is not None else strname
if name is not None:
# backwards compat
self.operators[name] = operator
expr = expression._literal_as_text(expr)
render_exprs.append(
(expr, name, operator)
)
self._render_exprs = render_exprs
ColumnCollectionConstraint.__init__(
self,
*columns,
name=kw.get('name'),
deferrable=kw.get('deferrable'),
initially=kw.get('initially')
)
self.using = kw.get('using', 'gist')
where = kw.get('where')
if where is not None:
self.where = expression._literal_as_text(where)
def copy(self, **kw):
elements = [(col, self.operators[col])
for col in self.columns.keys()]
c = self.__class__(*elements,
name=self.name,
deferrable=self.deferrable,
initially=self.initially,
where=self.where,
using=self.using)
c.dispatch._update(self.dispatch)
return c
def array_agg(*arg, **kw):
"""PostgreSQL-specific form of :class:`.array_agg`, ensures
return type is :class:`.postgresql.ARRAY` and not
the plain :class:`.types.ARRAY`.
.. versionadded:: 1.1
"""
kw['type_'] = ARRAY(functions._type_from_args(arg))
return functions.func.array_agg(*arg, **kw)
| gpl-3.0 |
mo-ki/pypgpwords | pypgpwords.py | 1 | 13257 | #!/usr/bin/python3
"""Define a PGPWords object inherited from bytearray.
Adding initialization via hex-, or pgp-word-string,
adding .hex() method and
overriding __str__
Mainline code:
Convert pgp words to hex strings and vice versa.
Example:
$ pypgpwords.py DEAD 1337
tactics perceptive Aztec consensus
or
$ pypgpwords.py absurd bodyguard baboon unicorn
0116 14EC
moki@posteo.de
"""
from __future__ import print_function
import sys
SEPARATOR = " "
EVEN = ("aardvark",
"absurd",
"accrue",
"acme",
"adrift",
"adult",
"afflict",
"ahead",
"aimless",
"Algol",
"allow",
"alone",
"ammo",
"ancient",
"apple",
"artist",
"assume",
"Athens",
"atlas",
"Aztec",
"baboon",
"backfield",
"backward",
"banjo",
"beaming",
"bedlamp",
"beehive",
"beeswax",
"befriend",
"Belfast",
"berserk",
"billiard",
"bison",
"blackjack",
"blockade",
"blowtorch",
"bluebird",
"bombast",
"bookshelf",
"brackish",
"breadline",
"breakup",
"brickyard",
"briefcase",
"Burbank",
"button",
"buzzard",
"cement",
"chairlift",
"chatter",
"checkup",
"chisel",
"choking",
"chopper",
"Christmas",
"clamshell",
"classic",
"classroom",
"cleanup",
"clockwork",
"cobra",
"commence",
"concert",
"cowbell",
"crackdown",
"cranky",
"crowfoot",
"crucial",
"crumpled",
"crusade",
"cubic",
"dashboard",
"deadbolt",
"deckhand",
"dogsled",
"dragnet",
"drainage",
"dreadful",
"drifter",
"dropper",
"drumbeat",
"drunken",
"Dupont",
"dwelling",
"eating",
"edict",
"egghead",
"eightball",
"endorse",
"endow",
"enlist",
"erase",
"escape",
"exceed",
"eyeglass",
"eyetooth",
"facial",
"fallout",
"flagpole",
"flatfoot",
"flytrap",
"fracture",
"framework",
"freedom",
"frighten",
"gazelle",
"Geiger",
"glitter",
"glucose",
"goggles",
"goldfish",
"gremlin",
"guidance",
"hamlet",
"highchair",
"hockey",
"indoors",
"indulge",
"inverse",
"involve",
"island",
"jawbone",
"keyboard",
"kickoff",
"kiwi",
"klaxon",
"locale",
"lockup",
"merit",
"minnow",
"miser",
"Mohawk",
"mural",
"music",
"necklace",
"Neptune",
"newborn",
"nightbird",
"Oakland",
"obtuse",
"offload",
"optic",
"orca",
"payday",
"peachy",
"pheasant",
"physique",
"playhouse",
"Pluto",
"preclude",
"prefer",
"preshrunk",
"printer",
"prowler",
"pupil",
"puppy",
"python",
"quadrant",
"quiver",
"quota",
"ragtime",
"ratchet",
"rebirth",
"reform",
"regain",
"reindeer",
"rematch",
"repay",
"retouch",
"revenge",
"reward",
"rhythm",
"ribcage",
"ringbolt",
"robust",
"rocker",
"ruffled",
"sailboat",
"sawdust",
"scallion",
"scenic",
"scorecard",
"Scotland",
"seabird",
"select",
"sentence",
"shadow",
"shamrock",
"showgirl",
"skullcap",
"skydive",
"slingshot",
"slowdown",
"snapline",
"snapshot",
"snowcap",
"snowslide",
"solo",
"southward",
"soybean",
"spaniel",
"spearhead",
"spellbind",
"spheroid",
"spigot",
"spindle",
"spyglass",
"stagehand",
"stagnate",
"stairway",
"standard",
"stapler",
"steamship",
"sterling",
"stockman",
"stopwatch",
"stormy",
"sugar",
"surmount",
"suspense",
"sweatband",
"swelter",
"tactics",
"talon",
"tapeworm",
"tempest",
"tiger",
"tissue",
"tonic",
"topmost",
"tracker",
"transit",
"trauma",
"treadmill",
"Trojan",
"trouble",
"tumor",
"tunnel",
"tycoon",
"uncut",
"unearth",
"unwind",
"uproot",
"upset",
"upshot",
"vapor",
"village",
"virus",
"Vulcan",
"waffle",
"wallet",
"watchword",
"wayside",
"willow",
"woodlark",
"Zulu")
ODD = ("adroitness",
"adviser",
"aftermath",
"aggregate",
"alkali",
"almighty",
"amulet",
"amusement",
"antenna",
"applicant",
"Apollo",
"armistice",
"article",
"asteroid",
"Atlantic",
"atmosphere",
"autopsy",
"Babylon",
"backwater",
"barbecue",
"belowground",
"bifocals",
"bodyguard",
"bookseller",
"borderline",
"bottomless",
"Bradbury",
"bravado",
"Brazilian",
"breakaway",
"Burlington",
"businessman",
"butterfat",
"Camelot",
"candidate",
"cannonball",
"Capricorn",
"caravan",
"caretaker",
"celebrate",
"cellulose",
"certify",
"chambermaid",
"Cherokee",
"Chicago",
"clergyman",
"coherence",
"combustion",
"commando",
"company",
"component",
"concurrent",
"confidence",
"conformist",
"congregate",
"consensus",
"consulting",
"corporate",
"corrosion",
"councilman",
"crossover",
"crucifix",
"cumbersome",
"customer",
"Dakota",
"decadence",
"December",
"decimal",
"designing",
"detector",
"detergent",
"determine",
"dictator",
"dinosaur",
"direction",
"disable",
"disbelief",
"disruptive",
"distortion",
"document",
"embezzle",
"enchanting",
"enrollment",
"enterprise",
"equation",
"equipment",
"escapade",
"Eskimo",
"everyday",
"examine",
"existence",
"exodus",
"fascinate",
"filament",
"finicky",
"forever",
"fortitude",
"frequency",
"gadgetry",
"Galveston",
"getaway",
"glossary",
"gossamer",
"graduate",
"gravity",
"guitarist",
"hamburger",
"Hamilton",
"handiwork",
"hazardous",
"headwaters",
"hemisphere",
"hesitate",
"hideaway",
"holiness",
"hurricane",
"hydraulic",
"impartial",
"impetus",
"inception",
"indigo",
"inertia",
"infancy",
"inferno",
"informant",
"insincere",
"insurgent",
"integrate",
"intention",
"inventive",
"Istanbul",
"Jamaica",
"Jupiter",
"leprosy",
"letterhead",
"liberty",
"maritime",
"matchmaker",
"maverick",
"Medusa",
"megaton",
"microscope",
"microwave",
"midsummer",
"millionaire",
"miracle",
"misnomer",
"molasses",
"molecule",
"Montana",
"monument",
"mosquito",
"narrative",
"nebula",
"newsletter",
"Norwegian",
"October",
"Ohio",
"onlooker",
"opulent",
"Orlando",
"outfielder",
"Pacific",
"pandemic",
"Pandora",
"paperweight",
"paragon",
"paragraph",
"paramount",
"passenger",
"pedigree",
"Pegasus",
"penetrate",
"perceptive",
"performance",
"pharmacy",
"phonetic",
"photograph",
"pioneer",
"pocketful",
"politeness",
"positive",
"potato",
"processor",
"provincial",
"proximate",
"puberty",
"publisher",
"pyramid",
"quantity",
"racketeer",
"rebellion",
"recipe",
"recover",
"repellent",
"replica",
"reproduce",
"resistor",
"responsive",
"retraction",
"retrieval",
"retrospect",
"revenue",
"revival",
"revolver",
"sandalwood",
"sardonic",
"Saturday",
"savagery",
"scavenger",
"sensation",
"sociable",
"souvenir",
"specialist",
"speculate",
"stethoscope",
"stupendous",
"supportive",
"surrender",
"suspicious",
"sympathy",
"tambourine",
"telephone",
"therapist",
"tobacco",
"tolerance",
"tomorrow",
"torpedo",
"tradition",
"travesty",
"trombonist",
"truncated",
"typewriter",
"ultimate",
"undaunted",
"underfoot",
"unicorn",
"unify",
"universe",
"unravel",
"upcoming",
"vacancy",
"vagabond",
"vertigo",
"Virginia",
"visitor",
"vocalist",
"voyager",
"warranty",
"Waterloo",
"whimsical",
"Wichita",
"Wilmington",
"Wyoming",
"yesteryear",
"Yucatan")
class InvalidWordError(ValueError):
pass
def words_to_int(word_iter, odd=False):
"""Generator yielding integer indices for each word in word_iter.
:param word_iter: iterable of pgp words
:type word_iter: iterable
:param odd: start with odd word list
:type odd: boolean
:return: integer
:rtype: generator
"""
for word in word_iter:
try:
yield (ODD if odd else EVEN).index(word)
except ValueError:
msg = "not in {} word list: '{}'"
raise InvalidWordError(msg.format("odd" if odd else "even", word))
# toggle odd/even
odd = not odd
def ints_to_word(int_iter, odd=False):
"""Generator yielding PGP words for each byte/int in int_iter.
:param int_iter: iterable of integers between 0 and 255
:type int_iter: iterable
:param odd: start with odd word list
:type odd: boolean
:return: pgp words
:rtype: generator
"""
for idx in int_iter:
yield (ODD if odd else EVEN)[idx]
# toggle odd/even
odd = not odd
class PGPWords(bytearray):
"""Inherits from bytearray. Add .hex() method and overwrite __str__"""
def __init__(self, source, **kwargs):
"""Initiate bytearray. Added initialization styles:
E.g.:
p = PGPWords("absurd bodyguard baboon", encoding="pgp-words")
p = PGPWords("DEAD 1337", encoding="hex")
"""
enc = kwargs.get("encoding")
if enc == "pgp-words":
kwargs.pop("encoding")
source = words_to_int(source.split(SEPARATOR), **kwargs)
kwargs = {}
elif enc == "hex" or source.startswith('0x'):
kwargs.pop("encoding")
tmp = source.replace("0x", '').replace(' ', '')
source = (int(tmp[i:i+2], 16) for i in range(0, len(tmp), 2))
super(PGPWords, self).__init__(source, **kwargs)
def __str__(self):
"""Return corresponding pgp words, separated by SEPARATOR."""
gen = ints_to_word(self)
return SEPARATOR.join(gen)
def hex(self):
"""Return corresponding hex representation as string"""
tmp = ''.join([hex(i).split('x')[1].zfill(2) for i in self])
gen = (tmp[i:i+4].upper() for i in range(0, len(tmp), 4))
return SEPARATOR.join(gen)
def main():
"""Try to convert arguments in either direction."""
if len(sys.argv) < 2 or sys.argv[1].startswith('-'):
print(__doc__.split("Mainline code:\n\n")[1], file=sys.stderr)
exit(-1)
arg_str = ' '.join(sys.argv[1:])
try:
result = PGPWords(arg_str, encoding="hex")
print(result)
except ValueError as err1:
try:
result = PGPWords(arg_str, encoding="pgp-words").hex()
print(result)
except InvalidWordError as err2:
print(err1, file=sys.stderr)
print(err2, file=sys.stderr)
exit(-1)
if __name__ == "__main__":
main()
| mit |
ESSICS/cs-studio | applications/scan/scan-plugins/org.csstudio.scan/jython/scan_client.py | 3 | 10449 | """
Scan Client Tools
DEPRECATED.
This API is based on the Java org.csstudio.scan.client.ScanClient
and thus limited to Jython.
For a Python scan client library that can be used by
both Jython and C-Python,
see https://github.com/PythonScanClient/PyScanClient#pyscanclient
@author: Kay Kasemir
"""
import sys, os, glob
import logging
# To debug the setup, directly execute this file.
# Or, from other file that tries to use it:
#
# import logging
# logging.basicConfig(level=logging.DEBUG)
# from scan_client import *
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
# Example for displaying debug info:
#from org.eclipse.jface.dialogs import MessageDialog
# for p in sys.path:
# print p
# MessageDialog.openWarning(None, "Debug", "Using " + p)
# -------------------------------------------------------
# Scan Server connection setup
import org.csstudio.scan.client.ScanClient as JavaScanClient
import java.lang.System as System
# Python packages are different from Java Packages
# There can be issues with 'package scanning' that cause
# jython to not find classes when using
# from org.csstudio.scan.command import *
# or
# import org.csstudio.scan.command
#
# The most dependable way is to explicitly import one-by-one
import org.csstudio.scan.command.CommandSequence as CommandSequence
import org.csstudio.scan.command.CommentCommand as CommentCommand
import org.csstudio.scan.command.IncludeCommand as IncludeCommand
import org.csstudio.scan.command.LoopCommand as LoopCommand
import org.csstudio.scan.command.Comparison as Comparison
import org.csstudio.scan.command.ScanCommand as ScanCommand
import org.csstudio.scan.command.WaitCommand as WaitCommand
import org.csstudio.scan.command.DelayCommand as DelayCommand
import org.csstudio.scan.command.LogCommand as LogCommand
import org.csstudio.scan.command.SetCommand as SetCommand
import org.csstudio.scan.command.ScriptCommand as ScriptCommand
import org.csstudio.scan.data.ScanDataIterator as ScanDataIterator
import time
class ScanClient(object):
"""
Base class for a scan client
Can submit scans to the server and monitor them
"""
def __init__(self):
# Connection to the scan server
self.client = JavaScanClient()
# Scan ID
self.id = -1
def checkServer(self):
"""
Attempt to call the server, and try to re-connect on error.
The server could be restarted, or there could have been
a network issue between the time we originally connected
to the server and now, which would invalidate the original
server connection.
"""
try:
self.client.getServerInfo()
except:
self.client = JavaScanClient()
def simulate(self, commands):
"""
Submit a CommandSequence to the server for simulation
@param commands CommandSequence or string with XML text
@return Simulation info
"""
self.checkServer()
if isinstance(commands, str):
xml = commands
elif isinstance(commands, CommandSequence):
xml = commands.getXML()
else:
raise Exception('Expecting CommandSequence or XML-text')
return self.client.simulateScan(xml)
def submit(self, name, commands, queue=True):
"""
Submit a CommandSequence to the server for execution
@param name Name of the scan
@param commands CommandSequence or string with XML text
@param queue Submit to scan server queue, or execute as soon as possible?
@return Scan ID
"""
self.checkServer()
if isinstance(commands, str):
xml = commands
elif isinstance(commands, CommandSequence):
xml = commands.getXML()
else:
raise Exception('Expecting CommandSequence or XML-text')
self.id = self.client.submitScan(name, xml, queue)
return self.id
def getScanInfo(self, id=-1):
"""
Get scan info
@param id Scan ID, defaulting to the last submitted scan
"""
self.checkServer()
if id == -1:
id = self.id
return self.client.getScanInfo(id)
def printData(self, id=-1, *devices):
"""
Print scan data
@param id: Scan ID, defaulting to the last submitted scan
@param devices: One or more device names. Default: All devices in scan.
"""
self.checkServer()
if id == -1:
id = self.id
data = self.client.getScanData(id)
if devices:
sheet = ScanDataIterator(data, devices)
else:
sheet = ScanDataIterator(data)
sheet.printTable(System.out)
def waitUntilDone(self, id=-1):
"""
Wait until a submitted scan has finished
@param id: Scan ID, defaulting to the last submitted scan
"""
while True:
info = self.getScanInfo(id)
print info
if info.getState().isDone():
break;
time.sleep(1.0)
def __str__(self):
return "Scan client, connected to %s" % self.server.getInfo()
class ScanNd(ScanClient):
"""
N-dimensional scan that logs arbitrary number of readings
based on nested loops.
Arguments:
* First argument can be scan name. Optional.
* Loop specification for all following arguments: ('device', start, end[, step])
* Names of device to log in addition to loop'ed devices
* Basic ScanCommand to perform: SetCommand, WaitCommand, ...
All the devices used in loops, mentioned as device names or
accessed in specific SetCommands will be logged in the innermost loop.
Examples:
# Scan 'xpos' from 1 to 10, stepping 1. xpos will be logged.
scan('My first one', ('xpos', 1, 10) )
# Scan name is optional. Loop of xpos from 1 to 10.
scan( ('xpos', 1, 10) )
# Log the 'readback' together with 'xpos' from the loop.
scan( ('xpos', 1, 10), 'readback')
# Scan 'xpos' from 1 to 10, stepping 1,
# inside that looping 'ypos' from 1 to 5 by 0.2,
# logging 'readback' with 'xpos' and 'ypos'.
scan('XY Example', ('xpos', 1, 10), ('ypos', 1, 5, 0.2), 'readback')
# Scan 'xpos' and 'ypos', set something to '1' and then '3' (with readback)
# Will log 'xpos', 'ypos', 'setpoint', 'readback'
scan('XY Example', ('xpos', 1, 10), ('ypos', 1, 5, 0.2),
SetCommand('setpoint', 1, 'readback'),
SetCommand('setpoint', 3, 'readback'))
"""
def __init__(self):
ScanClient.__init__(self)
def _decodeLoop(self, parms):
""" Check for
('device', start, end, step)
or
('device', start, end)
for a default step size of 1
@return ('device', start, end, step)
"""
if (len(parms) == 4):
return (parms[0], parms[1], parms[2], parms[3])
elif (len(parms) == 3):
return (parms[0], parms[1], parms[2], 1)
else:
raise Exception('Scan parameters should be (''device'', start, end, step), not %s' % str(parms))
def _decodeScan(self, log, args):
""" Recursively build commands from scan arguments
@param log: Devices to log so far while going down the argument list
@param args: Remaining scan arguments
@return List of commands
"""
if len(args) <= 0:
# Reached innermost layer, no arguments left.
# Log what needs to be logged. May be nothing.
if len(log) <= 0:
return []
# Remove duplicate device names from list,
# but preserve the list order
cleaned_log = []
for device in log:
if device not in cleaned_log:
cleaned_log.append(device)
return [ LogCommand(cleaned_log) ]
# Analyze next argument
arg = args.pop(0)
if isinstance(arg, str):
# Remember device to log, move on
log.append(arg)
return self._decodeScan(log, args)
elif isinstance(arg, tuple):
# Loop specification
scan = self._decodeLoop(arg)
# Remember loop variable for log
log.append(scan[0])
# Create loop with remaining arguments as body
return [ LoopCommand(scan[0], scan[1], scan[2], scan[3], self._decodeScan(log, args)) ]
elif isinstance(arg, ScanCommand):
if isinstance(arg, SetCommand):
# Log device affected by 'set'
log.append(arg.getDeviceName())
# Create list of commands
cmds = [ arg ]
cmds.extend(self._decodeScan(log, args))
return cmds
else:
raise Exception('Cannot handle scan parameter of type %s' % arg.__class__.__name__)
def __call__(self, *args):
""" N-dimensional scan command.
@return ID of scan that was scheduled on the scan server
"""
# Turn args into modifyable list
args = list(args)
# First string is optional scan title
if len(args) > 0 and isinstance(args[0], str):
name = args[0]
args.pop(0)
else:
name = "Scan"
# End result, overall scan
cmds = self._decodeScan([], args)
if len(cmds) <= 0:
raise Exception('Empty scan')
seq = CommandSequence(cmds)
id = self.submit(name, seq)
if __name__ == '__main__':
seq.dump()
self.waitUntilDone()
return id
# Create 'scan' command
scan = ScanNd()
if __name__ == '__main__':
print 'Welcome to the scan system'
# print 'Running in %s' % os.getcwd()
print 'Connected to %s' % scan.client.getServerInfo()
for scan in scan.client.getScanInfos():
print scan
# 'Normal' loops
#scan('Normal 2D', ('xpos', 1, 10), ('ypos', 1, 10, 0.5), 'readback')
# 'Reversing' inner loop
#scan('Reversing 2D', ('xpos', 1, 10), ('ypos', 1, 10, -0.5), 'readback')
| epl-1.0 |
thnee/ansible | test/units/modules/net_tools/nios/test_nios_zone.py | 68 | 10787 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.modules.net_tools.nios import nios_zone
from ansible.module_utils.net_tools.nios import api
from units.compat.mock import patch, MagicMock, Mock
from .test_nios_module import TestNiosModule, load_fixture
class TestNiosZoneModule(TestNiosModule):
module = nios_zone
def setUp(self):
super(TestNiosZoneModule, self).setUp()
self.module = MagicMock(name='ansible.modules.net_tools.nios.nios_zone.WapiModule')
self.module.check_mode = False
self.module.params = {'provider': None}
self.mock_wapi = patch('ansible.modules.net_tools.nios.nios_zone.WapiModule')
self.exec_command = self.mock_wapi.start()
self.mock_wapi_run = patch('ansible.modules.net_tools.nios.nios_zone.WapiModule.run')
self.mock_wapi_run.start()
self.load_config = self.mock_wapi_run.start()
def tearDown(self):
super(TestNiosZoneModule, self).tearDown()
self.mock_wapi.stop()
self.mock_wapi_run.stop()
def _get_wapi(self, test_object):
wapi = api.WapiModule(self.module)
wapi.get_object = Mock(name='get_object', return_value=test_object)
wapi.create_object = Mock(name='create_object')
wapi.update_object = Mock(name='update_object')
wapi.delete_object = Mock(name='delete_object')
return wapi
def load_fixtures(self, commands=None):
self.exec_command.return_value = (0, load_fixture('nios_result.txt').strip(), None)
self.load_config.return_value = dict(diff=None, session='session')
def test_nios_zone_create(self):
self.module.params = {'provider': None, 'state': 'present', 'fqdn': 'ansible.com',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"fqdn": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
print("WAPI: ", wapi)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'fqdn': 'ansible.com'})
def test_nios_zone_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'fqdn': 'ansible.com',
'comment': None, 'extattrs': None}
ref = "zone/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"fqdn": "ansible.com",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"fqdn": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_zone_update_comment(self):
self.module.params = {'provider': None, 'state': 'present', 'fqdn': 'ansible.com',
'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "zone/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"fqdn": "ansible.com",
"extattrs": {'Site': {'value': 'test'}}
}
]
test_spec = {
"fqdn": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
def test_nios_zone_create_using_grid_primary_secondaries(self):
self.module.params = {'provider': None, 'state': 'present', 'fqdn': 'ansible.com',
'grid_primary': [{"name": "gridprimary.grid.com"}],
'grid_secondaries': [{"name": "gridsecondary1.grid.com"},
{"name": "gridsecondary2.grid.com"}],
'restart_if_needed': True,
'comment': None, 'extattrs': None}
test_object = None
grid_spec = dict(
name=dict(required=True),
)
test_spec = {
"fqdn": {"ib_req": True},
"grid_primary": {},
"grid_secondaries": {},
"restart_if_needed": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
print("WAPI: ", wapi)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'fqdn': 'ansible.com',
"grid_primary": [{"name": "gridprimary.grid.com"}],
"grid_secondaries": [{"name": "gridsecondary1.grid.com"},
{"name": "gridsecondary2.grid.com"}],
"restart_if_needed": True
})
def test_nios_zone_remove_using_grid_primary_secondaries(self):
self.module.params = {'provider': None, 'state': 'absent', 'fqdn': 'ansible.com',
'grid_primary': [{"name": "gridprimary.grid.com"}],
'grid_secondaries': [{"name": "gridsecondary1.grid.com"},
{"name": "gridsecondary2.grid.com"}],
'restart_if_needed': True,
'comment': None, 'extattrs': None}
ref = "zone/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"fqdn": "ansible.com",
"grid_primary": [{"name": "gridprimary.grid.com"}],
"grid_secondaries": [{"name": "gridsecondary1.grid.com"}, {"name": "gridsecondary2.grid.com"}],
"restart_if_needed": True,
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"fqdn": {"ib_req": True},
"grid_primary": {},
"grid_secondaries": {},
"restart_if_needed": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_zone_create_using_name_server_group(self):
self.module.params = {'provider': None, 'state': 'present', 'fqdn': 'ansible.com',
'ns_group': 'examplensg', 'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"fqdn": {"ib_req": True},
"ns_group": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
print("WAPI: ", wapi)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'fqdn': 'ansible.com',
'ns_group': 'examplensg'})
def test_nios_zone_remove_using_name_server_group(self):
self.module.params = {'provider': None, 'state': 'absent', 'fqdn': 'ansible.com',
'ns_group': 'examplensg', 'comment': None, 'extattrs': None}
ref = "zone/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"fqdn": "ansible.com",
"ns_group": "examplensg",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"fqdn": {"ib_req": True},
"ns_group": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_zone_create_using_zone_format(self):
self.module.params = {'provider': None, 'state': 'present', 'fqdn': '10.10.10.in-addr.arpa',
'zone_format': 'IPV4', 'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"fqdn": {"ib_req": True},
"zone_format": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
print("WAPI: ", wapi)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'fqdn': '10.10.10.in-addr.arpa',
'zone_format': 'IPV4'})
def test_nios_zone_remove_using_using_zone_format(self):
self.module.params = {'provider': None, 'state': 'absent', 'fqdn': 'ansible.com',
'zone_format': 'IPV4', 'comment': None, 'extattrs': None}
ref = "zone/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"fqdn": "ansible.com",
"zone_format": "IPV4",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"fqdn": {"ib_req": True},
"zone_format": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
| gpl-3.0 |
ChronoMonochrome/android_external_chromium_org | tools/perf/metrics/statistics.py | 23 | 7779 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A collection of statistical utility functions to be used by metrics."""
import bisect
import math
def Clamp(value, low=0.0, high=1.0):
"""Clamp a value between some low and high value."""
return min(max(value, low), high)
def NormalizeSamples(samples):
"""Sorts the samples, and map them linearly to the range [0,1].
They're mapped such that for the N samples, the first sample is 0.5/N and the
last sample is (N-0.5)/N.
Background: The discrepancy of the sample set i/(N-1); i=0, ..., N-1 is 2/N,
twice the discrepancy of the sample set (i+1/2)/N; i=0, ..., N-1. In our case
we don't want to distinguish between these two cases, as our original domain
is not bounded (it is for Monte Carlo integration, where discrepancy was
first used).
"""
if not samples:
return samples, 1.0
samples = sorted(samples)
low = min(samples)
high = max(samples)
new_low = 0.5 / len(samples)
new_high = (len(samples)-0.5) / len(samples)
if high-low == 0.0:
return samples, 1.0
scale = (new_high - new_low) / (high - low)
for i in xrange(0, len(samples)):
samples[i] = float(samples[i] - low) * scale + new_low
return samples, scale
def Discrepancy(samples, interval_multiplier=10000):
"""Computes the discrepancy of a set of 1D samples from the interval [0,1].
The samples must be sorted.
http://en.wikipedia.org/wiki/Low-discrepancy_sequence
http://mathworld.wolfram.com/Discrepancy.html
"""
if not samples:
return 1.0
max_local_discrepancy = 0
locations = []
# For each location, stores the number of samples less than that location.
left = []
# For each location, stores the number of samples less than or equal to that
# location.
right = []
interval_count = len(samples) * interval_multiplier
# Compute number of locations the will roughly result in the requested number
# of intervals.
location_count = int(math.ceil(math.sqrt(interval_count*2)))
inv_sample_count = 1.0 / len(samples)
# Generate list of equally spaced locations.
for i in xrange(0, location_count):
location = float(i) / (location_count-1)
locations.append(location)
left.append(bisect.bisect_left(samples, location))
right.append(bisect.bisect_right(samples, location))
# Iterate over the intervals defined by any pair of locations.
for i in xrange(0, len(locations)):
for j in xrange(i, len(locations)):
# Compute length of interval and number of samples in the interval.
length = locations[j] - locations[i]
count = right[j] - left[i]
# Compute local discrepancy and update max_local_discrepancy.
local_discrepancy = abs(float(count)*inv_sample_count - length)
max_local_discrepancy = max(local_discrepancy, max_local_discrepancy)
return max_local_discrepancy
def FrameDiscrepancy(frame_timestamps, absolute=True,
interval_multiplier=10000):
"""A discrepancy based metric for measuring jank.
FrameDiscrepancy quantifies the largest area of jank observed in a series
of timestamps. Note that this is different form metrics based on the
max_frame_time. For example, the time stamp series A = [0,1,2,3,5,6] and
B = [0,1,2,3,5,7] have the same max_frame_time = 2, but
Discrepancy(B) > Discrepancy(A).
Two variants of discrepancy can be computed:
Relative discrepancy is following the original definition of
discrepancy. It characterized the largest area of jank, relative to the
duration of the entire time stamp series. We normalize the raw results,
because the best case discrepancy for a set of N samples is 1/N (for
equally spaced samples), and we want our metric to report 0.0 in that
case.
Absolute discrepancy also characterizes the largest area of jank, but its
value wouldn't change (except for imprecisions due to a low
interval_multiplier) if additional 'good' frames were added to an
exisiting list of time stamps. Its range is [0,inf] and the unit is
milliseconds.
The time stamp series C = [0,2,3,4] and D = [0,2,3,4,5] have the same
absolute discrepancy, but D has lower relative discrepancy than C.
"""
if not frame_timestamps:
return 1.0
samples, sample_scale = NormalizeSamples(frame_timestamps)
discrepancy = Discrepancy(samples, interval_multiplier)
inv_sample_count = 1.0 / len(samples)
if absolute:
# Compute absolute discrepancy
discrepancy /= sample_scale
else:
# Compute relative discrepancy
discrepancy = Clamp((discrepancy-inv_sample_count) / (1.0-inv_sample_count))
return discrepancy
def ArithmeticMean(numerator, denominator):
"""Calculates arithmetic mean.
Both numerator and denominator can be given as either individual
values or lists of values which will be summed.
Args:
numerator: A quantity that represents a sum total value.
denominator: A quantity that represents a count of the number of things.
Returns:
The arithmetic mean value, or 0 if the denominator value was 0.
"""
numerator_total = Total(numerator)
denominator_total = Total(denominator)
return DivideIfPossibleOrZero(numerator_total, denominator_total)
def Total(data):
"""Returns the float value of a number or the sum of a list."""
if type(data) == float:
total = data
elif type(data) == int:
total = float(data)
elif type(data) == list:
total = float(sum(data))
else:
raise TypeError
return total
def DivideIfPossibleOrZero(numerator, denominator):
"""Returns the quotient, or zero if the denominator is zero."""
if not denominator:
return 0.0
else:
return numerator / denominator
def GeneralizedMean(values, exponent):
"""See http://en.wikipedia.org/wiki/Generalized_mean"""
if not values:
return 0.0
sum_of_powers = 0.0
for v in values:
sum_of_powers += v ** exponent
return (sum_of_powers / len(values)) ** (1.0/exponent)
def Median(values):
"""Gets the median of a list of values."""
return Percentile(values, 50)
def Percentile(values, percentile):
"""Calculates the value below which a given percentage of values fall.
For example, if 17% of the values are less than 5.0, then 5.0 is the 17th
percentile for this set of values. When the percentage doesn't exactly
match a rank in the list of values, the percentile is computed using linear
interpolation between closest ranks.
Args:
values: A list of numerical values.
percentile: A number between 0 and 100.
Returns:
The Nth percentile for the list of values, where N is the given percentage.
"""
if not values:
return 0.0
sorted_values = sorted(values)
n = len(values)
percentile /= 100.0
if percentile <= 0.5 / n:
return sorted_values[0]
elif percentile >= (n - 0.5) / n:
return sorted_values[-1]
else:
floor_index = int(math.floor(n * percentile - 0.5))
floor_value = sorted_values[floor_index]
ceil_value = sorted_values[floor_index+1]
alpha = n * percentile - 0.5 - floor_index
return floor_value + alpha * (ceil_value - floor_value)
def GeometricMean(values):
"""Compute a rounded geometric mean from an array of values."""
if not values:
return None
# To avoid infinite value errors, make sure no value is less than 0.001.
new_values = []
for value in values:
if value > 0.001:
new_values.append(value)
else:
new_values.append(0.001)
# Compute the sum of the log of the values.
log_sum = sum(map(math.log, new_values))
# Raise e to that sum over the number of values.
mean = math.pow(math.e, (log_sum / len(new_values)))
# Return the rounded mean.
return int(round(mean))
| bsd-3-clause |
oldstylejoe/pychess-timed | lib/pychess/Players/PyChess.py | 2 | 7735 | #!/usr/bin/pypy -u
from __future__ import print_function
#if __name__ == "__main__":
# print("feature done=0")
import os
import random
import sys
from time import time
this_dir = os.path.dirname(os.path.abspath(__file__))
if os.path.join(this_dir, "../..") not in sys.path:
sys.path = [os.path.join(this_dir, "../..")] + sys.path
from pychess.compat import PY2
from pychess.Utils import const
from pychess.Utils.book import getOpenings
from pychess.Utils.const import *
from pychess.Utils.lutils import lsearch
from pychess.Utils.lutils.ldata import MAXPLY
from pychess.Utils.lutils.lsearch import alphaBeta
from pychess.Utils.lutils.LBoard import LBoard
from pychess.Utils.lutils.lmove import listToSan, toSAN
from pychess.System.Log import log
class PyChess (object):
def __init__ (self):
self.sd = MAXPLY
self.skipPruneChance = 0
self.clock = [0, 0]
self.increment = [0, 0]
self.movestogo = 0
self.searchtime = 0
self.scr = 0 # The current predicted score. Used when accepting draw offers
self.playingAs = WHITE
self.ponder = False # Currently unused
self.post = False
self.debug = True
self.outOfBook = False
def print(self, text):
print(text)
sys.stdout.flush()
log.debug(text, extra={"task": "stdout"})
#===========================================================================
# Play related
#===========================================================================
def __remainingMovesA (self):
# Based on regression of a 180k games pgn
x = self.board.plyCount
return -1.71086e-12*x**6 \
+1.69103e-9*x**5 \
-6.00801e-7*x**4 \
+8.17741e-5*x**3 \
+2.91858e-4*x**2 \
-0.94497*x \
+78.8979
def __remainingMovesB (self):
# We bet a game will be around 80 moves
x = self.board.plyCount
return max(80-x,4)
def __getBestOpening (self):
totalWeight = 0
choice = None
if self.board.variant not in (ASEANCHESS, CAMBODIANCHESS, MAKRUKCHESS, \
SITTUYINCHESS, LOSERSCHESS, SUICIDECHESS,\
ATOMICCHESS, KINGOFTHEHILLCHESS, THREECHECKCHESS):
for move, weight, histGames, histScore in getOpenings(self.board):
totalWeight += weight
if totalWeight == 0:
break
if not move or random.randrange(totalWeight) < weight:
choice = move
if choice is None:
self.outOfBook = True
return choice
def __go (self, ondone=None):
""" Finds and prints the best move from the current position """
mv = False if self.outOfBook else self.__getBestOpening()
if mv:
mvs = [mv]
if not mv:
lsearch.skipPruneChance = self.skipPruneChance
lsearch.searching = True
timed = self.basetime > 0
if self.searchtime > 0:
usetime = self.searchtime
else:
usetime = self.clock[self.playingAs] / self.__remainingMovesA()
if self.clock[self.playingAs] > 10:
# If we have time, we assume 40 moves rather than 80
usetime *= 2
# The increment is a constant. We'll use this always
usetime += self.increment[self.playingAs]
prevtime = 0
starttime = time()
lsearch.endtime = starttime + usetime if timed else sys.maxsize
if self.debug:
if timed:
self.print("# Time left: %3.2f s; Planing to think for %3.2f s" % (self.clock[self.playingAs], usetime))
else:
self.print("# Searching to depth %d without timelimit" % self.sd)
for depth in range(1, self.sd+1):
# Heuristic time saving
# Don't waste time, if the estimated isn't enough to complete next depth
if timed and usetime <= prevtime*4 and usetime > 1:
break
lsearch.timecheck_counter = lsearch.TIMECHECK_FREQ
search_result = alphaBeta(self.board, depth)
if lsearch.searching:
mvs, self.scr = search_result
if time() > lsearch.endtime:
break
if self.post:
pv = " ".join(listToSan(self.board, mvs))
time_cs = int(100 * (time()-starttime))
self.print("%s %s %s %s %s" % (depth, self.scr, time_cs, lsearch.nodes, pv))
else:
# We were interrupted
if depth == 1:
mvs, self.scr = search_result
break
prevtime = time()-starttime - prevtime
self.clock[self.playingAs] -= time() - starttime - self.increment[self.playingAs]
if not mvs:
if not lsearch.searching:
# We were interupted
lsearch.nodes = 0
return
# This should only happen in terminal mode
if self.scr == 0:
self.print("result %s" % reprResult[DRAW])
elif self.scr < 0:
if self.board.color == WHITE:
self.print("result %s" % reprResult[BLACKWON])
else: self.print("result %s" % reprResult[WHITEWON])
else:
if self.board.color == WHITE:
self.print("result %s" % reprResult[WHITEWON])
else: self.print("result %s" % reprResult[BLACKWON])
return
lsearch.nodes = 0
lsearch.searching = False
move = mvs[0]
sanmove = toSAN(self.board, move)
if ondone: ondone(sanmove)
return sanmove
def __analyze (self):
""" Searches, and prints info from, the position as stated in the cecp
protocol """
start = time()
lsearch.endtime = sys.maxsize
lsearch.searching = True
for depth in range (1, self.sd):
if not lsearch.searching:
break
t = time()
board = self.board.clone()
mvs, scr = alphaBeta (board, depth)
pv = " ".join(listToSan(board, mvs))
time_cs = int(100 * (time() - start))
self.print("%s %s %s %s %s" % (depth, scr, time_cs, lsearch.nodes, pv))
lsearch.nodes = 0
################################################################################
# main #
################################################################################
if __name__ == "__main__":
import logging
from pychess.Players.PyChessCECP import PyChessCECP
from pychess.System.Log import log
if len(sys.argv) == 1 or sys.argv[1:] == ["debug"]:
if "debug" in sys.argv[1:]:
log.logger.setLevel(logging.DEBUG)
else:
log.logger.setLevel(logging.WARNING)
pychess = PyChessCECP()
else:
print("Unknown argument(s):", repr(sys.argv))
sys.exit(0)
pychess.makeReady()
pychess.run()
| gpl-3.0 |
frank10704/DF_GCS_W | MissionPlanner-master/Lib/site-packages/scipy/optimize/info.py | 55 | 2661 | """
Optimization Tools
==================
General-purpose Optimization Routines
-------------------------------------
.. autosummary::
:toctree: generated/
fmin - Nelder-Mead Simplex algorithm
fmin_powell - Powell's (modified) level set method
fmin_cg - Non-linear (Polak-Ribiere) conjugate gradient algorithm
fmin_bfgs - Quasi-Newton method (Broydon-Fletcher-Goldfarb-Shanno)
fmin_ncg - Line-search Newton Conjugate Gradient
leastsq - Minimize the sum of squares of M equations in N unknowns
Constrained Optimizers (Multivariate)
-------------------------------------
.. autosummary::
:toctree: generated/
fmin_l_bfgs_b - Zhu, Byrd, and Nocedal's constrained optimizer
fmin_tnc - Truncated Newton code
fmin_cobyla - Constrained optimization by linear approximation
fmin_slsqp - Minimization using sequential least-squares programming
nnls - Linear least-squares problem with non-negativity constraint
Global Optimizers
-----------------
.. autosummary::
:toctree: generated/
anneal - Simulated annealing
brute - Brute force searching optimizer
Scalar Function Minimizers
--------------------------
.. autosummary::
:toctree: generated/
fminbound - Bounded minimization of a scalar function
brent - 1-D function minimization using Brent method
golden - 1-D function minimization using Golden Section method
bracket - Bracket a minimum, given two starting points
Fitting
=======
.. autosummary::
:toctree: generated/
curve_fit
Root finding
============
Scalar functions
----------------
.. autosummary::
:toctree: generated/
brentq - quadratic interpolation Brent method
brenth - Brent method, modified by Harris with hyperbolic extrapolation
ridder - Ridder's method
bisect - Bisection method
newton - Secant method or Newton's method
Fixed point finding:
.. autosummary::
:toctree: generated/
fixed_point - Single-variable fixed-point solver
Multidimensional
----------------
General nonlinear solvers:
.. autosummary::
:toctree: generated/
fsolve - Non-linear multi-variable equation solver
broyden1 - Broyden's first method
broyden2 - Broyden's second method
Large-scale nonlinear solvers:
.. autosummary::
:toctree: generated/
newton_krylov
anderson
Simple iterations:
.. autosummary::
:toctree: generated/
excitingmixing
linearmixing
diagbroyden
Utility Functions
=================
.. autosummary::
:toctree: generated/
line_search - Return a step that satisfies the strong Wolfe conditions
check_grad - Check the supplied derivative using finite differences
"""
postpone_import = 1
| gpl-3.0 |
benfinkelcbt/CPD200 | CPD200-Lab10-Python/simplejson/encoder.py | 43 | 26764 | """Implementation of JSONEncoder
"""
from __future__ import absolute_import
import re
from operator import itemgetter
# Do not import Decimal directly to avoid reload issues
import decimal
from .compat import u, unichr, binary_type, string_types, integer_types, PY3
def _import_speedups():
try:
from . import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from simplejson.decoder import PosInf
#ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
# This is required because u() will mangle the string and ur'' isn't valid
# python3 syntax
ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
for i in [0x2028, 0x2029]:
ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s, _PY3=PY3, _q=u('"')):
"""Return a JSON representation of a Python string
"""
if _PY3:
if isinstance(s, binary_type):
s = s.decode('utf-8')
else:
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return _q + ESCAPE.sub(replace, s) + _q
def py_encode_basestring_ascii(s, _PY3=PY3):
"""Return an ASCII-only JSON representation of a Python string
"""
if _PY3:
if isinstance(s, binary_type):
s = s.decode('utf-8')
else:
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict, namedtuple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=True, namedtuple_as_object=True,
tuple_as_array=True, bigint_as_string=False,
item_sort_key=None, for_json=False, ignore_nan=False,
int_as_string_bitcount=None, iterable_as_array=False):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be an (item_separator, key_separator)
tuple. The default is (', ', ': ') if *indent* is ``None`` and
(',', ': ') otherwise. To get the most compact JSON representation,
you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
If namedtuple_as_object is true (the default), objects with
``_asdict()`` methods will be encoded as JSON objects.
If tuple_as_array is true (the default), tuple (and subclasses) will
be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If bigint_as_string is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If int_as_string_bitcount is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, item_sort_key is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key.
If for_json is true (not the default), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized
as ``null`` in compliance with the ECMA-262 specification. If true,
this will override *allow_nan*.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
self.namedtuple_as_object = namedtuple_as_object
self.tuple_as_array = tuple_as_array
self.iterable_as_array = iterable_as_array
self.bigint_as_string = bigint_as_string
self.item_sort_key = item_sort_key
self.for_json = for_json
self.ignore_nan = ignore_nan
self.int_as_string_bitcount = int_as_string_bitcount
if indent is not None and not isinstance(indent, string_types):
indent = indent * ' '
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
elif indent is not None:
self.item_separator = ','
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, binary_type):
_encoding = self.encoding
if (_encoding is not None and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if isinstance(o, string_types):
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, binary_type):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, ignore_nan=self.ignore_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
if type(o) != float:
# See #118, do not trust custom str/repr
o = float(o)
return _repr(o)
if ignore_nan:
text = 'null'
elif not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
int_as_string_bitcount = (
53 if self.bigint_as_string else self.int_as_string_bitcount)
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
int_as_string_bitcount,
self.item_sort_key, self.encoding, self.for_json,
self.ignore_nan, decimal.Decimal, self.iterable_as_array)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
int_as_string_bitcount,
self.item_sort_key, self.encoding, self.for_json,
self.iterable_as_array, Decimal=decimal.Decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal, _namedtuple_as_object, _tuple_as_array,
_int_as_string_bitcount, _item_sort_key,
_encoding,_for_json,
_iterable_as_array,
## HACK: hand-optimized bytecode; turn globals into locals
_PY3=PY3,
ValueError=ValueError,
string_types=string_types,
Decimal=None,
dict=dict,
float=float,
id=id,
integer_types=integer_types,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
iter=iter,
):
if _use_decimal and Decimal is None:
Decimal = decimal.Decimal
if _item_sort_key and not callable(_item_sort_key):
raise TypeError("item_sort_key must be None or callable")
elif _sort_keys and not _item_sort_key:
_item_sort_key = itemgetter(0)
if (_int_as_string_bitcount is not None and
(_int_as_string_bitcount <= 0 or
not isinstance(_int_as_string_bitcount, integer_types))):
raise TypeError("int_as_string_bitcount must be a positive integer")
def _encode_int(value):
skip_quoting = (
_int_as_string_bitcount is None
or
_int_as_string_bitcount < 1
)
if type(value) not in integer_types:
# See #118, do not trust custom str/repr
value = int(value)
if (
skip_quoting or
(-1 << _int_as_string_bitcount)
< value <
(1 << _int_as_string_bitcount)
):
return str(value)
return '"' + str(value) + '"'
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if (isinstance(value, string_types) or
(_PY3 and isinstance(value, binary_type))):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, integer_types):
yield buf + _encode_int(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if first:
# iterable_as_array misses the fast path at the top
yield '[]'
else:
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _stringify_key(key):
if isinstance(key, string_types): # pragma: no cover
pass
elif isinstance(key, binary_type):
key = key.decode(_encoding)
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, integer_types):
if type(key) not in integer_types:
# See #118, do not trust custom str/repr
key = int(key)
key = str(key)
elif _use_decimal and isinstance(key, Decimal):
key = str(key)
elif _skipkeys:
key = None
else:
raise TypeError("key " + repr(key) + " is not a string")
return key
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _PY3:
iteritems = dct.items()
else:
iteritems = dct.iteritems()
if _item_sort_key:
items = []
for k, v in dct.items():
if not isinstance(k, string_types):
k = _stringify_key(k)
if k is None:
continue
items.append((k, v))
items.sort(key=_item_sort_key)
else:
items = iteritems
for key, value in items:
if not (_item_sort_key or isinstance(key, string_types)):
key = _stringify_key(key)
if key is None:
# _skipkeys must be True
continue
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if (isinstance(value, string_types) or
(_PY3 and isinstance(value, binary_type))):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, integer_types):
yield _encode_int(value)
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if (isinstance(o, string_types) or
(_PY3 and isinstance(o, binary_type))):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, integer_types):
yield _encode_int(o)
elif isinstance(o, float):
yield _floatstr(o)
else:
for_json = _for_json and getattr(o, 'for_json', None)
if for_json and callable(for_json):
for chunk in _iterencode(for_json(), _current_indent_level):
yield chunk
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(),
_current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
while _iterable_as_array:
# Markers are not checked here because it is valid for
# an iterable to return self.
try:
o = iter(o)
except TypeError:
break
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
return
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| gpl-3.0 |
danrouse/cvimage | test.py | 2 | 1323 | import unittest
import numpy as np
import cv2
from cvimage import CvImage
def gen_sample(dims = (512, 512, 3)):
return np.uint8(np.random.rand(*dims))
class TestCvMethods(unittest.TestCase):
def test_filter_chain(self):
sample = gen_sample()
b_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
baseline = sample.copy()
baseline = cv2.blur(baseline, (3, 3))
baseline = cv2.cvtColor(baseline, cv2.COLOR_BGR2GRAY)
baseline = cv2.Canny(baseline, 100, 200)
baseline = cv2.dilate(baseline, b_kernel)
p_kernel = CvImage.kernel('ellipse', (3, 3))
patient = CvImage(sample.copy())\
.blur((3, 3))\
.cvtColor(cv2.COLOR_BGR2GRAY)\
.canny(100, 200)\
.dilate(p_kernel)
self.assertEqual(str(baseline), str(patient.image))
def test_data_return(self):
sample = gen_sample()
result, threshold = cv2.threshold(sample, 100, 200, cv2.THRESH_BINARY)
im = CvImage(sample).threshold(100, 200, cv2.THRESH_BINARY)
self.assertEqual(result, im.data)
def test_constants(self):
sample = gen_sample()
result, threshold = cv2.threshold(sample, 100, 200, cv2.THRESH_BINARY)
im = CvImage(sample).threshold(100, 200, 'binary')
self.assertEqual(str(threshold), str(im.image))
#def test_kernel(self):
#def test_contrib_methods(self):
if __name__ == '__main__':
unittest.main()
| mit |
adw0rd/lettuce | tests/integration/lib/Django-1.3/tests/modeltests/update/tests.py | 89 | 4252 | from django.test import TestCase
from models import A, B, C, D, DataPoint, RelatedPoint
class SimpleTest(TestCase):
def setUp(self):
self.a1 = A.objects.create()
self.a2 = A.objects.create()
for x in range(20):
B.objects.create(a=self.a1)
D.objects.create(a=self.a1)
def test_nonempty_update(self):
"""
Test that update changes the right number of rows for a nonempty queryset
"""
num_updated = self.a1.b_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update(self):
"""
Test that update changes the right number of rows for an empty queryset
"""
num_updated = self.a2.b_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_nonempty_update_with_inheritance(self):
"""
Test that update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a1.d_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update_with_inheritance(self):
"""
Test that update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a2.d_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
class AdvancedTests(TestCase):
def setUp(self):
self.d0 = DataPoint.objects.create(name="d0", value="apple")
self.d2 = DataPoint.objects.create(name="d2", value="banana")
self.d3 = DataPoint.objects.create(name="d3", value="banana")
self.r1 = RelatedPoint.objects.create(name="r1", data=self.d3)
def test_update(self):
"""
Objects are updated by first filtering the candidates into a queryset
and then calling the update() method. It executes immediately and
returns nothing.
"""
resp = DataPoint.objects.filter(value="apple").update(name="d1")
self.assertEqual(resp, 1)
resp = DataPoint.objects.filter(value="apple")
self.assertEqual(list(resp), [self.d0])
def test_update_multiple_objects(self):
"""
We can update multiple objects at once.
"""
resp = DataPoint.objects.filter(value="banana").update(
value="pineapple")
self.assertEqual(resp, 2)
self.assertEqual(DataPoint.objects.get(name="d2").value, u'pineapple')
def test_update_fk(self):
"""
Foreign key fields can also be updated, although you can only update
the object referred to, not anything inside the related object.
"""
resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0)
self.assertEqual(resp, 1)
resp = RelatedPoint.objects.filter(data__name="d0")
self.assertEqual(list(resp), [self.r1])
def test_update_multiple_fields(self):
"""
Multiple fields can be updated at once
"""
resp = DataPoint.objects.filter(value="apple").update(
value="fruit", another_value="peach")
self.assertEqual(resp, 1)
d = DataPoint.objects.get(name="d0")
self.assertEqual(d.value, u'fruit')
self.assertEqual(d.another_value, u'peach')
def test_update_all(self):
"""
In the rare case you want to update every instance of a model, update()
is also a manager method.
"""
self.assertEqual(DataPoint.objects.update(value='thing'), 3)
resp = DataPoint.objects.values('value').distinct()
self.assertEqual(list(resp), [{'value': u'thing'}])
def test_update_slice_fail(self):
"""
We do not support update on already sliced query sets.
"""
method = DataPoint.objects.all()[:2].update
self.assertRaises(AssertionError, method,
another_value='another thing')
| gpl-3.0 |
chauhanhardik/populo | common/djangoapps/track/views/tests/test_segmentio.py | 74 | 21978 | """Ensure we can parse events sent to us from the segment.io webhook integration"""
from datetime import datetime
import json
from ddt import ddt, data, unpack
from mock import sentinel
from django.contrib.auth.models import User
from django.test.client import RequestFactory
from django.test.utils import override_settings
from openedx.core.lib.tests.assertions.events import assert_event_matches
from track.middleware import TrackMiddleware
from track.tests import EventTrackingTestCase
from track.views import segmentio
SECRET = 'anything'
ENDPOINT = '/segmentio/test/event'
USER_ID = 10
MOBILE_SHIM_PROCESSOR = [
{
'ENGINE': 'track.shim.LegacyFieldMappingProcessor'
},
{
'ENGINE': 'track.shim.VideoEventProcessor'
}
]
def expect_failure_with_message(message):
"""Ensure the test raises an exception and does not emit an event"""
def test_decorator(func):
def test_decorated(self, *args, **kwargs):
self.assertRaisesRegexp(segmentio.EventValidationError, message, func, self, *args, **kwargs)
self.assert_no_events_emitted()
return test_decorated
return test_decorator
@ddt
@override_settings(
TRACKING_SEGMENTIO_WEBHOOK_SECRET=SECRET,
TRACKING_IGNORE_URL_PATTERNS=[ENDPOINT],
TRACKING_SEGMENTIO_ALLOWED_TYPES=['track'],
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES=['.bi.'],
TRACKING_SEGMENTIO_SOURCE_MAP={'test-app': 'mobile'},
EVENT_TRACKING_PROCESSORS=MOBILE_SHIM_PROCESSOR,
)
class SegmentIOTrackingTestCase(EventTrackingTestCase):
"""Test processing of segment.io events"""
def setUp(self):
super(SegmentIOTrackingTestCase, self).setUp()
self.maxDiff = None # pylint: disable=invalid-name
self.request_factory = RequestFactory()
def test_get_request(self):
request = self.request_factory.get(ENDPOINT)
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 405)
self.assert_no_events_emitted()
@override_settings(
TRACKING_SEGMENTIO_WEBHOOK_SECRET=None
)
def test_no_secret_config(self):
request = self.request_factory.post(ENDPOINT)
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 401)
self.assert_no_events_emitted()
def test_no_secret_provided(self):
request = self.request_factory.post(ENDPOINT)
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 401)
self.assert_no_events_emitted()
def test_secret_mismatch(self):
request = self.create_request(key='y')
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 401)
self.assert_no_events_emitted()
def create_request(self, key=None, **kwargs):
"""Create a fake request that emulates a request from the segment.io servers to ours"""
if key is None:
key = SECRET
request = self.request_factory.post(ENDPOINT + "?key=" + key, **kwargs)
if 'data' in kwargs:
request.json = json.loads(kwargs['data'])
return request
@data('identify', 'Group', 'Alias', 'Page', 'identify', 'screen')
@expect_failure_with_message(segmentio.WARNING_IGNORED_TYPE)
def test_segmentio_ignore_actions(self, action):
self.post_segmentio_event(action=action)
@data('edx.bi.some_name', 'EDX.BI.CAPITAL_NAME')
@expect_failure_with_message(segmentio.WARNING_IGNORED_TYPE)
def test_segmentio_ignore_names(self, name):
self.post_segmentio_event(name=name)
def post_segmentio_event(self, **kwargs):
"""Post a fake segment.io event to the view that processes it"""
request = self.create_request(
data=self.create_segmentio_event_json(**kwargs),
content_type='application/json'
)
segmentio.track_segmentio_event(request)
def create_segmentio_event(self, **kwargs):
"""Populate a fake segment.io event with data of interest"""
action = kwargs.get('action', 'Track')
sample_event = {
"userId": kwargs.get('user_id', USER_ID),
"event": "Did something",
"properties": {
'name': kwargs.get('name', str(sentinel.name)),
'data': kwargs.get('data', {}),
'context': {
'course_id': kwargs.get('course_id') or '',
'app_name': 'edx.mobile.android',
}
},
"channel": 'server',
"context": {
"library": {
"name": kwargs.get('library_name', 'test-app'),
"version": "unknown"
},
"app": {
"version": "1.0.1",
},
'userAgent': str(sentinel.user_agent),
},
"receivedAt": "2014-08-27T16:33:39.100Z",
"timestamp": "2014-08-27T16:33:39.215Z",
"type": action.lower(),
"projectId": "u0j33yjkr8",
"messageId": "qy52hwp4",
"version": 2,
"integrations": {},
"options": {
"library": "unknown",
"providers": {}
},
"action": action
}
if 'context' in kwargs:
sample_event['properties']['context'].update(kwargs['context'])
return sample_event
def create_segmentio_event_json(self, **kwargs):
"""Return a json string containing a fake segment.io event"""
return json.dumps(self.create_segmentio_event(**kwargs))
@expect_failure_with_message(segmentio.WARNING_IGNORED_SOURCE)
def test_segmentio_ignore_unknown_libraries(self):
self.post_segmentio_event(library_name='foo')
@expect_failure_with_message(segmentio.ERROR_USER_NOT_EXIST)
def test_no_user_for_user_id(self):
self.post_segmentio_event(user_id=40)
@expect_failure_with_message(segmentio.ERROR_INVALID_USER_ID)
def test_invalid_user_id(self):
self.post_segmentio_event(user_id='foobar')
@data('foo/bar/baz', 'course-v1:foo+bar+baz')
def test_success(self, course_id):
middleware = TrackMiddleware()
request = self.create_request(
data=self.create_segmentio_event_json(data={'foo': 'bar'}, course_id=course_id),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
middleware.process_request(request)
# The middleware normally emits an event, make sure it doesn't in this case.
self.assert_no_events_emitted()
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
expected_event = {
'accept_language': '',
'referer': '',
'username': str(sentinel.username),
'ip': '',
'session': '',
'event_source': 'mobile',
'event_type': str(sentinel.name),
'name': str(sentinel.name),
'event': {'foo': 'bar'},
'agent': str(sentinel.user_agent),
'page': None,
'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
'host': 'testserver',
'context': {
'application': {
'name': 'edx.mobile.android',
'version': '1.0.1',
},
'user_id': USER_ID,
'course_id': course_id,
'org_id': u'foo',
'path': ENDPOINT,
'client': {
'library': {
'name': 'test-app',
'version': 'unknown'
},
'app': {
'version': '1.0.1',
},
},
'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
},
}
finally:
middleware.process_response(request, None)
assert_event_matches(expected_event, self.get_event())
def test_invalid_course_id(self):
request = self.create_request(
data=self.create_segmentio_event_json(course_id='invalid'),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
self.assert_events_emitted()
@expect_failure_with_message(segmentio.ERROR_MISSING_NAME)
def test_missing_name(self):
sample_event_raw = self.create_segmentio_event()
del sample_event_raw['properties']['name']
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
@expect_failure_with_message(segmentio.ERROR_MISSING_DATA)
def test_missing_data(self):
sample_event_raw = self.create_segmentio_event()
del sample_event_raw['properties']['data']
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
@expect_failure_with_message(segmentio.ERROR_MISSING_TIMESTAMP)
def test_missing_timestamp(self):
sample_event_raw = self.create_event_without_fields('timestamp')
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
@expect_failure_with_message(segmentio.ERROR_MISSING_RECEIVED_AT)
def test_missing_received_at(self):
sample_event_raw = self.create_event_without_fields('receivedAt')
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
def create_event_without_fields(self, *fields):
"""Create a fake event and remove some fields from it"""
event = self.create_segmentio_event()
for field in fields:
if field in event:
del event[field]
return event
def test_string_user_id(self):
User.objects.create(pk=USER_ID, username=str(sentinel.username))
self.post_segmentio_event(user_id=str(USER_ID))
self.assert_events_emitted()
def test_hiding_failure(self):
sample_event_raw = self.create_event_without_fields('timestamp')
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
self.assert_no_events_emitted()
@data(
('edx.video.played', 'play_video'),
('edx.video.paused', 'pause_video'),
('edx.video.stopped', 'stop_video'),
('edx.video.loaded', 'load_video'),
('edx.video.position.changed', 'seek_video'),
('edx.video.transcript.shown', 'show_transcript'),
('edx.video.transcript.hidden', 'hide_transcript'),
)
@unpack
def test_video_event(self, name, event_type):
course_id = 'foo/bar/baz'
middleware = TrackMiddleware()
input_payload = {
'current_time': 132.134456,
'module_id': 'i4x://foo/bar/baz/some_module',
'code': 'mobile'
}
if name == 'edx.video.loaded':
# We use the same expected payload for all of these types of events, but the load video event is the only
# one that is not actually expected to contain a "current time" field. So we remove it from the expected
# event here.
del input_payload['current_time']
request = self.create_request(
data=self.create_segmentio_event_json(
name=name,
data=input_payload,
context={
'open_in_browser_url': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity/2',
'course_id': course_id,
'application': {
'name': 'edx.mobileapp.android',
'version': '29',
'component': 'videoplayer'
}
}),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
expected_event = {
'accept_language': '',
'referer': '',
'username': str(sentinel.username),
'ip': '',
'session': '',
'event_source': 'mobile',
'event_type': event_type,
'name': name,
'agent': str(sentinel.user_agent),
'page': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity',
'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
'host': 'testserver',
'context': {
'user_id': USER_ID,
'course_id': course_id,
'org_id': 'foo',
'path': ENDPOINT,
'client': {
'library': {
'name': 'test-app',
'version': 'unknown'
},
'app': {
'version': '1.0.1',
},
},
'application': {
'name': 'edx.mobileapp.android',
'version': '29',
'component': 'videoplayer'
},
'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
},
'event': {
'currentTime': 132.134456,
'id': 'i4x-foo-bar-baz-some_module',
'code': 'mobile'
}
}
if name == 'edx.video.loaded':
# We use the same expected payload for all of these types of events, but the load video event is the
# only one that is not actually expected to contain a "current time" field. So we remove it from the
# expected event here.
del expected_event['event']['currentTime']
finally:
middleware.process_response(request, None)
actual_event = self.get_event()
assert_event_matches(expected_event, actual_event)
@data(
# Verify positive slide case. Verify slide to onSlideSeek. Verify edx.video.seeked emitted from iOS v1.0.02 is changed to edx.video.position.changed.
(1, 1, "seek_type", "slide", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify negative slide case. Verify slide to onSlideSeek. Verify edx.video.seeked to edx.video.position.changed.
(-2, -2, "seek_type", "slide", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify +30 is changed to -30 which is incorrectly emitted in iOS v1.0.02. Verify skip to onSkipSeek
(30, -30, "seek_type", "skip", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify the correct case of -30 is also handled as well. Verify skip to onSkipSeek
(-30, -30, "seek_type", "skip", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify positive slide case where onSkipSeek is changed to onSlideSkip. Verify edx.video.seeked emitted from Android v1.0.02 is changed to edx.video.position.changed.
(1, 1, "type", "onSkipSeek", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'),
# Verify positive slide case where onSkipSeek is changed to onSlideSkip. Verify edx.video.seeked emitted from Android v1.0.02 is changed to edx.video.position.changed.
(-2, -2, "type", "onSkipSeek", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'),
# Verify positive skip case where onSkipSeek is not changed and does not become negative.
(30, 30, "type", "onSkipSeek", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'),
# Verify positive skip case where onSkipSeek is not changed.
(-30, -30, "type", "onSkipSeek", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02')
)
@unpack
def test_previous_builds(self,
requested_skip_interval,
expected_skip_interval,
seek_type_key,
seek_type,
expected_seek_type,
name,
expected_name,
platform,
version,
):
"""
Test backwards compatibility of previous app builds
iOS version 1.0.02: Incorrectly emits the skip back 30 seconds as +30
instead of -30.
Android version 1.0.02: Skip and slide were both being returned as a
skip. Skip or slide is determined by checking if the skip time is == -30
Additionally, for both of the above mentioned versions, edx.video.seeked
was sent instead of edx.video.position.changed
"""
course_id = 'foo/bar/baz'
middleware = TrackMiddleware()
input_payload = {
"code": "mobile",
"new_time": 89.699177437,
"old_time": 119.699177437,
seek_type_key: seek_type,
"requested_skip_interval": requested_skip_interval,
'module_id': 'i4x://foo/bar/baz/some_module',
}
request = self.create_request(
data=self.create_segmentio_event_json(
name=name,
data=input_payload,
context={
'open_in_browser_url': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity/2',
'course_id': course_id,
'application': {
'name': platform,
'version': version,
'component': 'videoplayer'
}
},
),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
expected_event = {
'accept_language': '',
'referer': '',
'username': str(sentinel.username),
'ip': '',
'session': '',
'event_source': 'mobile',
'event_type': "seek_video",
'name': expected_name,
'agent': str(sentinel.user_agent),
'page': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity',
'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
'host': 'testserver',
'context': {
'user_id': USER_ID,
'course_id': course_id,
'org_id': 'foo',
'path': ENDPOINT,
'client': {
'library': {
'name': 'test-app',
'version': 'unknown'
},
'app': {
'version': '1.0.1',
},
},
'application': {
'name': platform,
'version': version,
'component': 'videoplayer'
},
'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
},
'event': {
"code": "mobile",
"new_time": 89.699177437,
"old_time": 119.699177437,
"type": expected_seek_type,
"requested_skip_interval": expected_skip_interval,
'id': 'i4x-foo-bar-baz-some_module',
}
}
finally:
middleware.process_response(request, None)
actual_event = self.get_event()
assert_event_matches(expected_event, actual_event)
| agpl-3.0 |
chauhanhardik/populo | common/static/js/vendor/mathjax-MathJax-c9db6ac/docs/source/conf.py | 104 | 6453 | # -*- coding: utf-8 -*-
#
# MathJax documentation build configuration file, created by
# sphinx-quickstart on Sun May 16 23:18:19 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MathJax'
copyright = u'2012 Design Science'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
#highlight_language = 'javascript'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'mjtheme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['.']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
html_show_sourcelink = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'MathJaxdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
'index',
'MathJax.tex',
u'MathJax Documentation',
u'Davide Cervone, Casey Stark, Robert Miner, Paul Topping',
'manual',
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| agpl-3.0 |
sauloal/cnidaria | scripts/venv/lib/python2.7/site-packages/setuptools/archive_util.py | 520 | 6609 | """Utilities for extracting common archive formats"""
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
import zipfile
import tarfile
import os
import shutil
import posixpath
import contextlib
from pkg_resources import ensure_directory, ContextualZipFile
from distutils.errors import DistutilsError
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src,dst):
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % filename)
paths = {
filename: ('', extract_dir),
}
for base, dirs, files in os.walk(filename):
src, dst = paths[base]
for d in dirs:
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
for f in files:
target = os.path.join(dst, f)
target = progress_filter(src + f, target)
if not target:
# skip non-files
continue
ensure_directory(target)
f = os.path.join(base, f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
with ContextualZipFile(filename) as z:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'):
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = z.read(info.filename)
with open(target, 'wb') as f:
f.write(data)
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
with contextlib.closing(tarobj):
# don't do any chowning!
tarobj.chown = lambda *args: None
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal
# files
while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
base = posixpath.dirname(member.name)
linkpath = posixpath.join(base, linkpath)
linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath)
if member is not None and (member.isfile() or member.isdir()):
final_dst = progress_filter(name, prelim_dst)
if final_dst:
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
# XXX Ugh
tarobj._extract_member(member, final_dst)
except tarfile.ExtractError:
# chown/chmod/mkfifo/mknode/makedev failed
pass
return True
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
| mit |
ahhda/sympy | sympy/printing/preview.py | 29 | 12243 | from __future__ import print_function, division
from os.path import join
from itertools import chain
import tempfile
import shutil
import sys
import io
from io import BytesIO
try:
from subprocess import STDOUT, CalledProcessError
from sympy.core.compatibility import check_output
except ImportError:
pass
from sympy.core.compatibility import unicode, u_decode
from sympy.utilities.exceptions import SymPyDeprecationWarning
from sympy.utilities.misc import find_executable
from .latex import latex
from sympy.utilities.decorator import doctest_depends_on
@doctest_depends_on(exe=('latex', 'dvipng'), modules=('pyglet',),
disable_viewers=('evince', 'gimp', 'superior-dvi-viewer'))
def preview(expr, output='png', viewer=None, euler=True, packages=(),
filename=None, outputbuffer=None, preamble=None, dvioptions=None,
outputTexFile=None, **latex_settings):
r"""
View expression or LaTeX markup in PNG, DVI, PostScript or PDF form.
If the expr argument is an expression, it will be exported to LaTeX and
then compiled using the available TeX distribution. The first argument,
'expr', may also be a LaTeX string. The function will then run the
appropriate viewer for the given output format or use the user defined
one. By default png output is generated.
By default pretty Euler fonts are used for typesetting (they were used to
typeset the well known "Concrete Mathematics" book). For that to work, you
need the 'eulervm.sty' LaTeX style (in Debian/Ubuntu, install the
texlive-fonts-extra package). If you prefer default AMS fonts or your
system lacks 'eulervm' LaTeX package then unset the 'euler' keyword
argument.
To use viewer auto-detection, lets say for 'png' output, issue
>>> from sympy import symbols, preview, Symbol
>>> x, y = symbols("x,y")
>>> preview(x + y, output='png')
This will choose 'pyglet' by default. To select a different one, do
>>> preview(x + y, output='png', viewer='gimp')
The 'png' format is considered special. For all other formats the rules
are slightly different. As an example we will take 'dvi' output format. If
you would run
>>> preview(x + y, output='dvi')
then 'view' will look for available 'dvi' viewers on your system
(predefined in the function, so it will try evince, first, then kdvi and
xdvi). If nothing is found you will need to set the viewer explicitly.
>>> preview(x + y, output='dvi', viewer='superior-dvi-viewer')
This will skip auto-detection and will run user specified
'superior-dvi-viewer'. If 'view' fails to find it on your system it will
gracefully raise an exception.
You may also enter 'file' for the viewer argument. Doing so will cause
this function to return a file object in read-only mode, if 'filename'
is unset. However, if it was set, then 'preview' writes the genereted
file to this filename instead.
There is also support for writing to a BytesIO like object, which needs
to be passed to the 'outputbuffer' argument.
>>> from io import BytesIO
>>> obj = BytesIO()
>>> preview(x + y, output='png', viewer='BytesIO',
... outputbuffer=obj)
The LaTeX preamble can be customized by setting the 'preamble' keyword
argument. This can be used, e.g., to set a different font size, use a
custom documentclass or import certain set of LaTeX packages.
>>> preamble = "\\documentclass[10pt]{article}\n" \
... "\\usepackage{amsmath,amsfonts}\\begin{document}"
>>> preview(x + y, output='png', preamble=preamble)
If the value of 'output' is different from 'dvi' then command line
options can be set ('dvioptions' argument) for the execution of the
'dvi'+output conversion tool. These options have to be in the form of a
list of strings (see subprocess.Popen).
Additional keyword args will be passed to the latex call, e.g., the
symbol_names flag.
>>> phidd = Symbol('phidd')
>>> preview(phidd, symbol_names={phidd:r'\ddot{\varphi}'})
For post-processing the generated TeX File can be written to a file by
passing the desired filename to the 'outputTexFile' keyword
argument. To write the TeX code to a file named
"sample.tex" and run the default png viewer to display the resulting
bitmap, do
>>> preview(x + y, outputTexFile="sample.tex")
"""
special = [ 'pyglet' ]
if viewer is None:
if output == "png":
viewer = "pyglet"
else:
# sorted in order from most pretty to most ugly
# very discussable, but indeed 'gv' looks awful :)
# TODO add candidates for windows to list
candidates = {
"dvi": [ "evince", "okular", "kdvi", "xdvi" ],
"ps": [ "evince", "okular", "gsview", "gv" ],
"pdf": [ "evince", "okular", "kpdf", "acroread", "xpdf", "gv" ],
}
try:
for candidate in candidates[output]:
path = find_executable(candidate)
if path is not None:
viewer = path
break
else:
raise SystemError(
"No viewers found for '%s' output format." % output)
except KeyError:
raise SystemError("Invalid output format: %s" % output)
else:
if viewer == "file":
if filename is None:
SymPyDeprecationWarning(feature="Using viewer=\"file\" without a "
"specified filename", deprecated_since_version="0.7.3",
useinstead="viewer=\"file\" and filename=\"desiredname\"",
issue=7018).warn()
elif viewer == "StringIO":
SymPyDeprecationWarning(feature="The preview() viewer StringIO",
useinstead="BytesIO", deprecated_since_version="0.7.4",
issue=7083).warn()
viewer = "BytesIO"
if outputbuffer is None:
raise ValueError("outputbuffer has to be a BytesIO "
"compatible object if viewer=\"StringIO\"")
elif viewer == "BytesIO":
if outputbuffer is None:
raise ValueError("outputbuffer has to be a BytesIO "
"compatible object if viewer=\"BytesIO\"")
elif viewer not in special and not find_executable(viewer):
raise SystemError("Unrecognized viewer: %s" % viewer)
if preamble is None:
actual_packages = packages + ("amsmath", "amsfonts")
if euler:
actual_packages += ("euler",)
package_includes = "\n" + "\n".join(["\\usepackage{%s}" % p
for p in actual_packages])
preamble = r"""\documentclass[12pt]{article}
\pagestyle{empty}
%s
\begin{document}
""" % (package_includes)
else:
if len(packages) > 0:
raise ValueError("The \"packages\" keyword must not be set if a "
"custom LaTeX preamble was specified")
latex_main = preamble + '\n%s\n\n' + r"\end{document}"
if isinstance(expr, str):
latex_string = expr
else:
latex_string = latex(expr, mode='inline', **latex_settings)
try:
workdir = tempfile.mkdtemp()
with io.open(join(workdir, 'texput.tex'), 'w', encoding='utf-8') as fh:
fh.write(unicode(latex_main) % u_decode(latex_string))
if outputTexFile is not None:
shutil.copyfile(join(workdir, 'texput.tex'), outputTexFile)
if not find_executable('latex'):
raise RuntimeError("latex program is not installed")
try:
check_output(['latex', '-halt-on-error', '-interaction=nonstopmode',
'texput.tex'], cwd=workdir, stderr=STDOUT)
except CalledProcessError as e:
raise RuntimeError(
"'latex' exited abnormally with the following output:\n%s" %
e.output)
if output != "dvi":
defaultoptions = {
"ps": [],
"pdf": [],
"png": ["-T", "tight", "-z", "9", "--truecolor"],
"svg": ["--no-fonts"],
}
commandend = {
"ps": ["-o", "texput.ps", "texput.dvi"],
"pdf": ["texput.dvi", "texput.pdf"],
"png": ["-o", "texput.png", "texput.dvi"],
"svg": ["-o", "texput.svg", "texput.dvi"],
}
if output == "svg":
cmd = ["dvisvgm"]
else:
cmd = ["dvi" + output]
if not find_executable(cmd[0]):
raise RuntimeError("%s is not installed" % cmd[0])
try:
if dvioptions is not None:
cmd.extend(dvioptions)
else:
cmd.extend(defaultoptions[output])
cmd.extend(commandend[output])
except KeyError:
raise SystemError("Invalid output format: %s" % output)
try:
check_output(cmd, cwd=workdir, stderr=STDOUT)
except CalledProcessError as e:
raise RuntimeError(
"'%s' exited abnormally with the following output:\n%s" %
(' '.join(cmd), e.output))
src = "texput.%s" % (output)
if viewer == "file":
if filename is None:
buffer = BytesIO()
with open(join(workdir, src), 'rb') as fh:
buffer.write(fh.read())
return buffer
else:
shutil.move(join(workdir,src), filename)
elif viewer == "BytesIO":
with open(join(workdir, src), 'rb') as fh:
outputbuffer.write(fh.read())
elif viewer == "pyglet":
try:
from pyglet import window, image, gl
from pyglet.window import key
except ImportError:
raise ImportError("pyglet is required for preview.\n visit http://www.pyglet.org/")
if output == "png":
from pyglet.image.codecs.png import PNGImageDecoder
img = image.load(join(workdir, src), decoder=PNGImageDecoder())
else:
raise SystemError("pyglet preview works only for 'png' files.")
offset = 25
config = gl.Config(double_buffer=False)
win = window.Window(
width=img.width + 2*offset,
height=img.height + 2*offset,
caption="sympy",
resizable=False,
config=config
)
win.set_vsync(False)
try:
def on_close():
win.has_exit = True
win.on_close = on_close
def on_key_press(symbol, modifiers):
if symbol in [key.Q, key.ESCAPE]:
on_close()
win.on_key_press = on_key_press
def on_expose():
gl.glClearColor(1.0, 1.0, 1.0, 1.0)
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
img.blit(
(win.width - img.width) / 2,
(win.height - img.height) / 2
)
win.on_expose = on_expose
while not win.has_exit:
win.dispatch_events()
win.flip()
except KeyboardInterrupt:
pass
win.close()
else:
try:
check_output([viewer, src], cwd=workdir, stderr=STDOUT)
except CalledProcessError as e:
raise RuntimeError(
"'%s %s' exited abnormally with the following output:\n%s" %
(viewer, src, e.output))
finally:
try:
shutil.rmtree(workdir) # delete directory
except OSError as e:
if e.errno != 2: # code 2 - no such file or directory
raise
| bsd-3-clause |
mpasternak/pyglet-fix-issue-552 | pyglet/app/__init__.py | 14 | 5406 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Application-wide functionality.
Most applications need only call `run` after creating one or more windows
to begin processing events. For example, a simple application consisting of
one window is::
import pyglet
win = pyglet.window.Window()
pyglet.app.run()
To handle events on the main event loop, instantiate it manually. The
following example exits the application as soon as any window is closed (the
default policy is to wait until all windows are closed)::
event_loop = pyglet.app.EventLoop()
@event_loop.event
def on_window_close(window):
event_loop.exit()
:since: pyglet 1.1
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import sys
import weakref
_is_epydoc = hasattr(sys, 'is_epydoc') and sys.is_epydoc
class AppException(Exception):
pass
class WeakSet(object):
'''Set of objects, referenced weakly.
Adding an object to this set does not prevent it from being garbage
collected. Upon being garbage collected, the object is automatically
removed from the set.
'''
def __init__(self):
self._dict = weakref.WeakKeyDictionary()
def add(self, value):
self._dict[value] = True
def remove(self, value):
del self._dict[value]
def __iter__(self):
for key in self._dict.keys():
yield key
def __contains__(self, other):
return other in self._dict
def __len__(self):
return len(self._dict)
#: Set of all open displays. Instances of `Display` are automatically added
#: to this set upon construction. The set uses weak references, so displays
#: are removed from the set when they are no longer referenced.
#:
#: :deprecated: Use `pyglet.canvas.get_display`.
#:
#: :type: `WeakSet`
displays = WeakSet()
#: Set of all open windows (including invisible windows). Instances of
#: `Window` are automatically added to this set upon construction. The set
#: uses weak references, so windows are removed from the set when they are no
#: longer referenced or are closed explicitly.
#:
#: :type: `WeakSet`
windows = WeakSet()
def run():
'''Begin processing events, scheduled functions and window updates.
This is a convenience function, equivalent to::
pyglet.app.event_loop.run()
'''
event_loop.run()
def exit():
'''Exit the application event loop.
Causes the application event loop to finish, if an event loop is currently
running. The application may not necessarily exit (for example, there may
be additional code following the `run` invocation).
This is a convenience function, equivalent to::
event_loop.exit()
'''
event_loop.exit()
from pyglet.app.base import EventLoop
if _is_epydoc:
from pyglet.app.base import PlatformEventLoop
else:
if sys.platform == 'darwin':
from pyglet import options as pyglet_options
if pyglet_options['darwin_cocoa']:
from pyglet.app.cocoa import CocoaEventLoop as PlatformEventLoop
else:
from pyglet.app.carbon import CarbonEventLoop as PlatformEventLoop
elif sys.platform in ('win32', 'cygwin'):
from pyglet.app.win32 import Win32EventLoop as PlatformEventLoop
else:
from pyglet.app.xlib import XlibEventLoop as PlatformEventLoop
#: The global event loop. Applications can replace this with their own
#: subclass of `pyglet.app.base.EventLoop` before calling `EventLoop.run`.
#:
#: :type: `EventLoop`
event_loop = EventLoop()
#: The platform-dependent event loop. Applications must not subclass or
#: replace this object.
#:
#: :since: pyglet 1.2
#:
#: :type: `PlatformEventLoop`
platform_event_loop = PlatformEventLoop()
| bsd-3-clause |
petezybrick/iote2e | iote2e-pyclient/src/iote2epyclient/test/testhatsensors.py | 1 | 3137 | # Copyright 2016, 2017 Peter Zybrick and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
testhatsensors - Test RPi HAT sensors
:author: Pete Zybrick
:contact: pzybrick@gmail.com
:version: 1.0.0
"""
import sys
import datetime
from sense_hat import SenseHat
from time import sleep
def main(conf_file):
import logging.config
logging.config.fileConfig( conf_file, disable_existing_loggers=False)
logger = logging.getLogger(__name__)
logger.info('Starting')
sense = SenseHat()
#showMessages(sense)
#showLetters(sense)
#showPixels(sense)
showTemperature(sense)
#showJoystickPoll(sense)
#showJoystickWait(sense)
sense.clear()
logger.info('Done')
def showJoystickPoll(sense):
while True:
for event in sense.stick.get_events():
print("The joystick was {} {}".format(event.action,event.direction))
sleep(.25)
print('poll')
def showJoystickWait(sense):
while True:
event = sense.stick.wait_for_event()
if "middle" == event.direction:
if "pressed" == event.action:
print("1");
elif "released" == event.action:
print("0");
#print("The joystick was {} {}".format(event.action,event.direction))
def showTemperature(sense):
for i in range(0,5):
t = round(sense.get_temperature(),2)
print(t)
sense.show_message("{}".format(t), scroll_speed=.1)
sleep(1)
def showMessages(sense):
sense.show_message("Watson, come here. I need you.", scroll_speed=.025);
def showLetters(sense):
sense.show_letter("R", text_colour=[255,0,0],back_colour=[0,0,0]);
sleep(1.5)
sense.show_letter("G", text_colour=[0,255,0],back_colour=[0,0,0]);
sleep(1.5)
sense.show_letter("B", text_colour=[0,0,255],back_colour=[0,0,0]);
sleep(1.5)
def showPixels(sense):
b = [0,0,255]
y = [255,255,0]
e = [0,0,0]
image = [
b,b,e,b,b,e,y,y,
b,b,e,b,b,e,y,y,
e,e,e,e,e,e,e,e,
b,b,e,b,b,e,b,b,
b,b,e,b,b,e,b,b,
e,e,e,e,e,e,e,e,
b,b,e,b,b,e,b,b,
b,b,e,b,b,e,b,b
]
sense.set_pixels(image)
angles = [0,90,180,270,0,90,180,270]
for angle in angles:
sense.set_rotation(angle)
sleep(2)
if __name__ == '__main__':
sys.argv = ['testhatsensors.py', '/home/pete/iote2epyclient/log-configs/client_consoleonly.conf']
if( len(sys.argv) < 2 ):
print('Invalid format, execution cancelled')
print('Correct format: python <consoleConfigFile.conf>')
sys.exit(8)
main(sys.argv[1])
| apache-2.0 |
Elico-Corp/odoo_OCB | addons/point_of_sale/report/pos_report.py | 44 | 5423 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields, osv
from openerp import tools
class report_transaction_pos(osv.osv):
_name = "report.transaction.pos"
_description = "transaction for the pos"
_auto = False
_columns = {
'date_create': fields.char('Date', size=16, readonly=True),
'journal_id': fields.many2one('account.journal', 'Sales Journal', readonly=True),
'jl_id': fields.many2one('account.journal', 'Cash Journals', readonly=True),
'user_id': fields.many2one('res.users', 'User', readonly=True),
'no_trans': fields.float('Number of Transaction', readonly=True),
'amount': fields.float('Amount', readonly=True),
'invoice_id': fields.float('Nbr Invoice', readonly=True),
'invoice_am': fields.float('Invoice Amount', readonly=True),
'product_nb': fields.float('Product Nb.', readonly=True),
'disc': fields.float('Disc.', readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_transaction_pos')
cr.execute("""
create or replace view report_transaction_pos as (
select
min(absl.id) as id,
count(absl.id) as no_trans,
sum(absl.amount) as amount,
sum((100.0-line.discount) * line.price_unit * line.qty / 100.0) as disc,
to_char(date_trunc('day',absl.create_date),'YYYY-MM-DD')::text as date_create,
po.user_id as user_id,
po.sale_journal as journal_id,
abs.journal_id as jl_id,
count(po.invoice_id) as invoice_id,
count(p.id) as product_nb
from
account_bank_statement_line as absl,
account_bank_statement as abs,
product_product as p,
pos_order_line as line,
pos_order as po
where
absl.pos_statement_id = po.id and
line.order_id=po.id and
line.product_id=p.id and
absl.statement_id=abs.id
group by
po.user_id,po.sale_journal, abs.journal_id,
to_char(date_trunc('day',absl.create_date),'YYYY-MM-DD')::text
)
""")
#to_char(date_trunc('day',absl.create_date),'YYYY-MM-DD')
#to_char(date_trunc('day',absl.create_date),'YYYY-MM-DD')::text as date_create,
class report_sales_by_user_pos(osv.osv):
_name = "report.sales.by.user.pos"
_description = "Sales by user"
_auto = False
_columns = {
'date_order': fields.date('Order Date',required=True, select=True),
'amount': fields.float('Total', readonly=True, select=True),
'qty': fields.float('Quantity', readonly=True, select=True),
'user_id': fields.many2one('res.users', 'User', readonly=True, select=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_sales_by_user_pos')
cr.execute("""
create or replace view report_sales_by_user_pos as (
select
min(po.id) as id,
to_char(date_trunc('day',po.date_order),'YYYY-MM-DD')::text as date_order,
po.user_id as user_id,
sum(pol.qty)as qty,
sum((pol.price_unit * pol.qty * (1 - (pol.discount) / 100.0))) as amount
from
pos_order as po,pos_order_line as pol,product_product as pp,product_template as pt
where
pt.id=pp.product_tmpl_id and pp.id=pol.product_id and po.id = pol.order_id
group by
to_char(date_trunc('day',po.date_order),'YYYY-MM-DD')::text,
po.user_id
)
""")
class report_sales_by_user_pos_month(osv.osv):
_name = "report.sales.by.user.pos.month"
_description = "Sales by user monthly"
_auto = False
_columns = {
'date_order': fields.date('Order Date',required=True, select=True),
'amount': fields.float('Total', readonly=True, select=True),
'qty': fields.float('Quantity', readonly=True, select=True),
'user_id': fields.many2one('res.users', 'User', readonly=True, select=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_sales_by_user_pos_month')
cr.execute("""
create or replace view report_sales_by_user_pos_month as (
select
min(po.id) as id,
to_char(date_trunc('month',po.date_order),'YYYY-MM-DD')::text as date_order,
po.user_id as user_id,
sum(pol.qty)as qty,
sum((pol.price_unit * pol.qty * (1 - (pol.discount) / 100.0))) as amount
from
pos_order as po,pos_order_line as pol,product_product as pp,product_template as pt
where
pt.id=pp.product_tmpl_id and pp.id=pol.product_id and po.id = pol.order_id
group by
to_char(date_trunc('month',po.date_order),'YYYY-MM-DD')::text,
po.user_id
)
""")
| agpl-3.0 |
tensorflow/ngraph-bridge | test/python/test_fusedConv2D.py | 1 | 5531 | # ==============================================================================
# Copyright 2018-2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""nGraph TensorFlow bridge fusedConv2D tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import platform
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import nn_impl
from tensorflow.python.ops import array_ops
from common import NgraphTest
from tensorflow.python.framework import dtypes
import numpy as np
class TestFusedConv2D(NgraphTest):
INPUT_SIZES = [3, 1, 6, 2]
FILTER_SIZES = [1, 1, 2, 2]
BIAS_SIZES = [2]
def get_relu_op(self, relutype):
return {
'relu': nn_ops.relu,
'relu6': nn_ops.relu6,
'': (lambda x: x)
}[relutype]
@pytest.mark.parametrize(("relutype",), (
('relu',),
('relu6',),
('',),
))
@pytest.mark.skipif(platform.system() == 'Darwin', reason='Only for Linux')
def test_fusedconv2d_bias_relu(self, relutype):
inp_values = np.random.rand(*self.INPUT_SIZES)
filt_values = np.random.rand(*self.FILTER_SIZES)
bias_values = np.random.rand(*self.BIAS_SIZES)
def run_test(sess):
inp = array_ops.placeholder(dtypes.float32)
filt = array_ops.placeholder(dtypes.float32)
bias = array_ops.placeholder(dtypes.float32)
relu_op = self.get_relu_op(relutype)
return sess.run(
relu_op(
nn_ops.bias_add(
nn_ops.conv2d(
inp, filt, strides=[1, 1, 1, 1], padding="SAME"),
bias)), {
inp: inp_values,
filt: filt_values,
bias: bias_values,
})
assert np.allclose(
self.without_ngraph(run_test), self.with_ngraph(run_test))
@pytest.mark.parametrize(("relutype",), (
('relu',),
('relu6',),
('',),
))
@pytest.mark.skipif(platform.system() == 'Darwin', reason='Only for Linux')
def test_fusedconv2d_batchnorm(self, relutype):
inp_values = np.random.rand(*self.INPUT_SIZES)
filt_values = np.random.rand(*self.FILTER_SIZES)
scale_values = np.random.rand(*self.BIAS_SIZES)
offset_values = np.random.rand(*self.BIAS_SIZES)
mean_values = np.random.rand(*self.BIAS_SIZES)
variance_values = np.random.rand(*self.BIAS_SIZES)
def run_test(sess):
inp = array_ops.placeholder(dtypes.float32)
filt = array_ops.placeholder(dtypes.float32)
scale = array_ops.placeholder(dtypes.float32)
offset = array_ops.placeholder(dtypes.float32)
mean = array_ops.placeholder(dtypes.float32)
variance = array_ops.placeholder(dtypes.float32)
relu_op = self.get_relu_op(relutype)
bn, _, _ = nn_impl.fused_batch_norm(
nn_ops.conv2d(inp, filt, strides=[1, 1, 1, 1], padding="SAME"),
scale,
offset,
mean,
variance,
epsilon=0.02,
is_training=False)
return sess.run(
relu_op(bn), {
inp: inp_values,
filt: filt_values,
scale: scale_values,
offset: offset_values,
mean: mean_values,
variance: variance_values,
})
assert np.allclose(
self.without_ngraph(run_test),
self.with_ngraph(run_test),
rtol=0,
atol=5e-5)
@pytest.mark.skipif(platform.system() == 'Darwin', reason='Only for Linux')
def test_fusedconv2d_squeeze_bias(self):
inp_values = np.random.rand(*self.INPUT_SIZES)
filt_values = np.random.rand(*self.FILTER_SIZES)
bias_values = np.random.rand(*self.BIAS_SIZES)
squeeze_dim = [1]
def run_test(sess):
inp = array_ops.placeholder(dtypes.float32)
filt = array_ops.placeholder(dtypes.float32)
bias = array_ops.placeholder(dtypes.float32)
return sess.run(
nn_ops.bias_add(
array_ops.squeeze(
nn_ops.conv2d(
inp, filt, strides=[1, 1, 1, 1], padding="SAME"),
squeeze_dim), bias), {
inp: inp_values,
filt: filt_values,
bias: bias_values,
})
assert np.allclose(
self.without_ngraph(run_test), self.with_ngraph(run_test))
| apache-2.0 |
rackerlabs/blueflood | ops/rackspace-agent-plugins/bf-rollups-delay.py | 6 | 7501 | #!/usr/bin/env python
'''Blueflood Rollup Delay'''
'''For each rollup level, lists the number of slots which need to processed by blueflood. For the 5m range, one day is 288 slots.'''
# Licensed to Rackspace under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# Rackspace licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License."
#
# The following is an example 'criteria' for a Rackspace Monitoring Alarm:
#
# if (metric['metrics_5m_delay'] > 300 ) {
# return new AlarmStatus( WARNING, 'metrics_5m_delay has > 300 slots waiting to be rolled up.' );
# }
#
import pycassa
import sys
import time
import logging
import os
import argparse
from collections import defaultdict
SLOTS = 4032
MILLIS_IN_BASE_SLOT = 300000
GRAN_MAPPINGS = {
'metrics_5m': {'max_slots': 4032, 'milliseconds_in_slot': 300000},
'metrics_20m': {'max_slots': 1008, 'milliseconds_in_slot': 1200000},
'metrics_60m': {'max_slots': 336, 'milliseconds_in_slot': 3600000},
'metrics_240m': {'max_slots': 84, 'milliseconds_in_slot': 14400000},
'metrics_1440m': {'max_slots': 14, 'milliseconds_in_slot': 86400000}
}
def __is_more_available(len_fetched, page_size):
return (len_fetched >= page_size)
def get_metrics_state_for_shard(shard, cf):
page_size = 100 # Pycassa has an implicit max limit of 100
start = ''
states = {}
while True:
batch = cf.get(shard, column_start=start,
column_finish='', column_count=page_size)
keys = batch.keys()
states.update(batch)
if not __is_more_available(len(batch), page_size):
# there are no more columns left
break
start = keys[len(batch) - 1]
return states
def get_metrics_state_for_shards(shards, servers):
pool = pycassa.ConnectionPool('DATA',
server_list=servers)
cf = pycassa.ColumnFamily(pool, 'metrics_state')
metrics_state_for_shards = {}
for shard in shards:
metrics_state_for_shards[shard] = get_metrics_state_for_shard(shard,
cf)
return metrics_state_for_shards
def _millis_to_slot(now_millis):
return int((now_millis % (SLOTS * MILLIS_IN_BASE_SLOT))
/ MILLIS_IN_BASE_SLOT)
def _get_slot_for_time(now_millis, gran):
full_slot = _millis_to_slot(now_millis)
return (GRAN_MAPPINGS[gran]['max_slots'] * full_slot) / SLOTS
def print_stats_for_metrics_state(metrics_state_for_shards, print_res):
delayed_slots = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
now = int(time.time() * 1000)
for shard in metrics_state_for_shards:
states_per_shard = metrics_state_for_shards[shard]
for resolution in GRAN_MAPPINGS.keys():
max_slots = GRAN_MAPPINGS[resolution]['max_slots']
for slot in range(max_slots):
last_active_key = ',' .join([resolution, str(slot), 'A'])
rolled_up_at_key = ',' .join([resolution, str(slot), 'X'])
last_active_timestamp = states_per_shard[last_active_key] if last_active_key in states_per_shard else 0
rolled_up_at_timestamp = states_per_shard[rolled_up_at_key] if rolled_up_at_key in states_per_shard else 0
current_slot = _get_slot_for_time(now, resolution)
if (current_slot > slot
and rolled_up_at_timestamp < last_active_timestamp):
# if slot is not rolled up yet, delay measured in slots
delayed_slots[
resolution][shard][slot] = current_slot - slot
if ( print_res == resolution ):
print "shard: %4s last_active_key: %19s rolled_up_at_key: %19s current_slot: %s slot: %s" % ( shard, last_active_key, rolled_up_at_key, current_slot, slot)
print " last_active_timestamp: %19s rolled_up_at_timestamp: %19s" % (last_active_timestamp, rolled_up_at_timestamp)
print " last_active_timestamp: %19s rolled_up_at_timestamp: %19s" % ( time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( last_active_timestamp/1000)), time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(rolled_up_at_timestamp/1000)) )
if ( print_res == resolution ):
if (last_active_key not in states_per_shard):
print "WARNING: %s does not exist in shard %s" % (last_active_key, shard)
if (rolled_up_at_key not in states_per_shard):
print "WARNING: %s does not exist in shard %s" % (rolled_up_at_key, shard)
output = {}
for resolution in GRAN_MAPPINGS.keys():
across_shards_most_delay = []
for shard in delayed_slots[resolution].keys():
max_delay = max(delayed_slots[resolution][shard].values())
# print 'Most delay: %d, Res: %s' % (float(max_delay/(1000*60)),
# resolution)
across_shards_most_delay.append(max_delay)
if (len(across_shards_most_delay)):
output[resolution] = max(across_shards_most_delay)
else:
output[resolution] = 0
for resol, delay in output.items():
print 'metric %s uint32 %u' % ('_'.join([resol, 'delay']), delay)
def main():
parser = argparse.ArgumentParser(description='For each rollup level, lists the number of slots which need to '
'be processed by blueflood. One day is approximately 300 slots.')
parser.add_argument( '-s', '--servers', help='Cassandra server IP addresses, space separated', required=True, nargs="+")
parser.add_argument( '-v', '--verbose', help='Print out the unprocessed slots for each shard, for the given granuality. Default: metrics_5m',
required=False, nargs="?", choices=['metrics_5m', 'metrics_20m', 'metrics_60m', 'metrics_240m', 'metrics_1440m'], const='metrics_5m' )
args = parser.parse_args()
try:
logfile = os.path.expanduser('~') + '/bf-rollup.log'
logging.basicConfig(format='%(asctime)s %(message)s',
filename=logfile, level=logging.DEBUG)
shards = range(128)
logging.debug('getting metrics state for shards')
metrics_state_for_shards = get_metrics_state_for_shards(shards,
args.servers)
print 'status ok bf_health_check'
logging.debug('printing stats for metrics state')
print_stats_for_metrics_state(metrics_state_for_shards,
args.verbose)
except Exception, ex:
logging.exception(ex)
print "status error", ex
raise ex
if __name__ == "__main__":
main()
| apache-2.0 |
tvalacarta/tvalacarta | python/main-classic/lib/youtube_dl/extractor/historicfilms.py | 64 | 1581 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import parse_duration
class HistoricFilmsIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?historicfilms\.com/(?:tapes/|play)(?P<id>\d+)'
_TEST = {
'url': 'http://www.historicfilms.com/tapes/4728',
'md5': 'd4a437aec45d8d796a38a215db064e9a',
'info_dict': {
'id': '4728',
'ext': 'mov',
'title': 'Historic Films: GP-7',
'description': 'md5:1a86a0f3ac54024e419aba97210d959a',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 2096,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
tape_id = self._search_regex(
[r'class="tapeId"[^>]*>([^<]+)<', r'tapeId\s*:\s*"([^"]+)"'],
webpage, 'tape id')
title = self._og_search_title(webpage)
description = self._og_search_description(webpage)
thumbnail = self._html_search_meta(
'thumbnailUrl', webpage, 'thumbnails') or self._og_search_thumbnail(webpage)
duration = parse_duration(self._html_search_meta(
'duration', webpage, 'duration'))
video_url = 'http://www.historicfilms.com/video/%s_%s_web.mov' % (tape_id, video_id)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
}
| gpl-3.0 |
kuiche/chromium | tools/grit/grit/gather/regexp.py | 3 | 7474 | #!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''A baseclass for simple gatherers based on regular expressions.
'''
import re
import types
from grit.gather import interface
from grit import clique
from grit import tclib
class RegexpGatherer(interface.GathererBase):
'''Common functionality of gatherers based on parsing using a single
regular expression.
'''
DescriptionMapping_ = {
'CAPTION' : 'This is a caption for a dialog',
'CHECKBOX' : 'This is a label for a checkbox',
'CONTROL': 'This is the text on a control',
'CTEXT': 'This is a label for a control',
'DEFPUSHBUTTON': 'This is a button definition',
'GROUPBOX': 'This is a label for a grouping',
'ICON': 'This is a label for an icon',
'LTEXT': 'This is the text for a label',
'PUSHBUTTON': 'This is the text for a button',
}
def __init__(self, text):
interface.GathererBase.__init__(self)
# Original text of what we're parsing
self.text_ = text.strip()
# List of parts of the document. Translateable parts are clique.MessageClique
# objects, nontranslateable parts are plain strings. Translated messages are
# inserted back into the skeleton using the quoting rules defined by
# self.Escape()
self.skeleton_ = []
# A list of the names of IDs that need to be defined for this resource
# section to compile correctly.
self.ids_ = []
# True if Parse() has already been called.
self.have_parsed_ = False
# True if a translatable chunk has been added
self.translatable_chunk_ = False
# If not None, all parts of the document will be put into this single
# message; otherwise the normal skeleton approach is used.
self.single_message_ = None
# Number to use for the next placeholder name. Used only if single_message
# is not None
self.ph_counter_ = 1
def GetText(self):
'''Returns the original text of the section'''
return self.text_
def Escape(self, text):
'''Subclasses can override. Base impl is identity.
'''
return text
def UnEscape(self, text):
'''Subclasses can override. Base impl is identity.
'''
return text
def GetTextualIds(self):
'''Returns the list of textual IDs that need to be defined for this
resource section to compile correctly.'''
return self.ids_
def GetCliques(self):
'''Returns the message cliques for each translateable message in the
resource section.'''
return filter(lambda x: isinstance(x, clique.MessageClique), self.skeleton_)
def Translate(self, lang, pseudo_if_not_available=True,
skeleton_gatherer=None, fallback_to_english=False):
if len(self.skeleton_) == 0:
raise exception.NotReady()
if skeleton_gatherer:
assert len(skeleton_gatherer.skeleton_) == len(self.skeleton_)
out = []
for ix in range(len(self.skeleton_)):
if isinstance(self.skeleton_[ix], types.StringTypes):
if skeleton_gatherer:
# Make sure the skeleton is like the original
assert(isinstance(skeleton_gatherer.skeleton_[ix], types.StringTypes))
out.append(skeleton_gatherer.skeleton_[ix])
else:
out.append(self.skeleton_[ix])
else:
if skeleton_gatherer: # Make sure the skeleton is like the original
assert(not isinstance(skeleton_gatherer.skeleton_[ix],
types.StringTypes))
msg = self.skeleton_[ix].MessageForLanguage(lang,
pseudo_if_not_available,
fallback_to_english)
def MyEscape(text):
return self.Escape(text)
text = msg.GetRealContent(escaping_function=MyEscape)
out.append(text)
return ''.join(out)
# Contextualization elements. Used for adding additional information
# to the message bundle description string from RC files.
def AddDescriptionElement(self, string):
if self.DescriptionMapping_.has_key(string):
description = self.DescriptionMapping_[string]
else:
description = string
if self.single_message_:
self.single_message_.SetDescription(description)
else:
if (self.translatable_chunk_):
message = self.skeleton_[len(self.skeleton_) - 1].GetMessage()
message.SetDescription(description)
def Parse(self):
'''Parses the section. Implemented by subclasses. Idempotent.'''
raise NotImplementedError()
def _AddNontranslateableChunk(self, chunk):
'''Adds a nontranslateable chunk.'''
if self.single_message_:
ph = tclib.Placeholder('XX%02dXX' % self.ph_counter_, chunk, chunk)
self.ph_counter_ += 1
self.single_message_.AppendPlaceholder(ph)
else:
self.skeleton_.append(chunk)
def _AddTranslateableChunk(self, chunk):
'''Adds a translateable chunk. It will be unescaped before being added.'''
# We don't want empty messages since they are redundant and the TC
# doesn't allow them.
if chunk == '':
return
unescaped_text = self.UnEscape(chunk)
if self.single_message_:
self.single_message_.AppendText(unescaped_text)
else:
self.skeleton_.append(self.uberclique.MakeClique(
tclib.Message(text=unescaped_text)))
self.translatable_chunk_ = True
def _AddTextualId(self, id):
self.ids_.append(id)
def _RegExpParse(self, regexp, text_to_parse):
'''An implementation of Parse() that can be used for resource sections that
can be parsed using a single multi-line regular expression.
All translateables must be in named groups that have names starting with
'text'. All textual IDs must be in named groups that have names starting
with 'id'. All type definitions that can be included in the description
field for contextualization purposes should have a name that starts with
'type'.
Args:
regexp: re.compile('...', re.MULTILINE)
text_to_parse:
'''
if self.have_parsed_:
return
self.have_parsed_ = True
chunk_start = 0
for match in regexp.finditer(text_to_parse):
groups = match.groupdict()
keys = groups.keys()
keys.sort()
self.translatable_chunk_ = False
for group in keys:
if group.startswith('id') and groups[group]:
self._AddTextualId(groups[group])
elif group.startswith('text') and groups[group]:
self._AddNontranslateableChunk(
text_to_parse[chunk_start : match.start(group)])
chunk_start = match.end(group) # Next chunk will start after the match
self._AddTranslateableChunk(groups[group])
elif group.startswith('type') and groups[group]:
# Add the description to the skeleton_ list. This works because
# we are using a sort set of keys, and because we assume that the
# group name used for descriptions (type) will come after the "text"
# group in alphabetical order. We also assume that there cannot be
# more than one description per regular expression match.
self.AddDescriptionElement(groups[group])
self._AddNontranslateableChunk(text_to_parse[chunk_start:])
if self.single_message_:
self.skeleton_.append(self.uberclique.MakeClique(self.single_message_))
| bsd-3-clause |
kkxue/Py2ChainMap | setup.py | 2 | 1030 | """
Py2ChainMap
==================
A backport of ChainMap from Python 3 to Python .
Get source from https://github.com/justanr/Py2ChainMap
"""
from setuptools import setup
setup(
name='Py2ChainMap',
version='0.1.0',
author='kkxue',
author_email='xueguanwen@gmail.com',
description='A backport of ChainMap from Python 3 to Python 2.',
long_description=__doc__,
url='https://github.com/kkxue/Py2ChainMap/',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Python Software Foundation License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
platforms='any',
py_modules=['py2chainmap', 'test_py2chainmap'],
test_suite='test_py2chainmap.suite',
)
| bsd-3-clause |
andrejb/cloudant_bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Tool/f77.py | 61 | 2056 | """engine.SCons.Tool.f77
Tool-specific initialization for the generic Posix f77 Fortran compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/f77.py 5134 2010/08/16 23:02:40 bdeegan"
import SCons.Defaults
import SCons.Scanner.Fortran
import SCons.Tool
import SCons.Util
from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env
compilers = ['f77']
def generate(env):
add_all_to_env(env)
add_f77_to_env(env)
fcomp = env.Detect(compilers) or 'f77'
env['F77'] = fcomp
env['SHF77'] = fcomp
env['FORTRAN'] = fcomp
env['SHFORTRAN'] = fcomp
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
liorvh/Empire | lib/stagers/hop_php.py | 22 | 2123 | from lib.common import helpers
class Stager:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Launcher',
'Author': ['@harmj0y'],
'Description': ('Generates a hop.php redirector for an Empire listener.'),
'Comments': [
''
]
}
# any options needed by the stager, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Listener' : {
'Description' : 'Listener to generate stager for.',
'Required' : True,
'Value' : ''
},
'OutFile' : {
'Description' : 'File to output php redirector to.',
'Required' : True,
'Value' : '/tmp/hop.php'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# extract all of our options
listenerID = self.options['Listener']['Value']
# extract out the listener config information
listener = self.mainMenu.listeners.get_listener(listenerID)
if listener:
# extract out the listener config information
name = listener[1]
host = listener[2]
port = listener[3]
certPath = listener[4]
profile = listener[8]
listenerType = listener[-2]
redirectTarget = listener[-1]
resources = profile.split("|")[0]
code = self.mainMenu.stagers.generate_hop_php(host, resources)
return code
else:
print helpers.color("[!] Error in hop.php generation.")
return ""
| bsd-3-clause |
RealTimeWeb/Blockpy-Server | static/blockly-games/storage.py | 3 | 2721 | """Blockly Games: Storage
Copyright 2012 Google Inc.
https://github.com/google/blockly-games
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""Store and retrieve XML with App Engine.
"""
__author__ = "q.neutron@gmail.com (Quynh Neutron)"
import cgi
import logging
from random import randint
from google.appengine.ext import db
from google.appengine.api import memcache
def keyGen():
# Generate a random string of length KEY_LEN.
KEY_LEN = 6
CHARS = "abcdefghijkmnopqrstuvwxyz23456789" # Exclude l, 0, 1.
max_index = len(CHARS) - 1
return "".join([CHARS[randint(0, max_index)] for x in range(KEY_LEN)])
class Xml(db.Model):
# A row in the database.
xml_hash = db.IntegerProperty()
xml_content = db.TextProperty()
def xmlToKey(xml_content):
# Store XML and return a generated key.
xml_hash = hash(xml_content)
lookup_query = db.Query(Xml)
lookup_query.filter("xml_hash =", xml_hash)
lookup_result = lookup_query.get()
if lookup_result:
xml_key = lookup_result.key().name()
else:
trials = 0
result = True
while result:
trials += 1
if trials == 100:
raise Exception("Sorry, the generator failed to get a key for you.")
xml_key = keyGen()
result = db.get(db.Key.from_path("Xml", xml_key))
xml = db.Text(xml_content, encoding="utf_8")
row = Xml(key_name = xml_key, xml_hash = xml_hash, xml_content = xml)
row.put()
return xml_key
def keyToXml(key_provided):
# Retrieve stored XML based on the provided key.
# Normalize the string.
key_provided = key_provided.lower().strip()
# Check memcache for a quick match.
xml = memcache.get("XML_" + key_provided)
if xml is None:
# Check datastore for a definitive match.
result = db.get(db.Key.from_path("Xml", key_provided))
if not result:
xml = ""
else:
xml = result.xml_content
# Save to memcache for next hit.
if not memcache.add("XML_" + key_provided, xml, 3600):
logging.error("Memcache set failed.")
return xml.encode("utf-8")
if __name__ == "__main__":
print "Content-Type: text/plain\n"
forms = cgi.FieldStorage()
if "xml" in forms:
print(xmlToKey(forms["xml"].value))
if "key" in forms:
print(keyToXml(forms["key"].value))
| mit |
SirkkaNiittyllae/FaPraCSCW20172018 | node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py | 1446 | 65937 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the MSVSSettings.py file."""
import StringIO
import unittest
import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
"""Compares recorded lines to expected warnings."""
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line]
self.assertEqual(sorted(expected), sorted(actual))
def testValidateMSVSSettings_tool_names(self):
"""Tests that only MSVS tool names are allowed."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def testValidateMSVSSettings_settings(self):
"""Tests that for invalid MSVS settings."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalDependencies_excluded': 'file3',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
'index value (5) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
'index value (-1) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: for VCLinkerTool/TargetMachine, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: for VCManifestTool/UpdateFileHashes, '
"expected bool; got 'truel'"
''])
def testValidateMSBuildSettings_settings(self):
"""Tests that for invalid MSBuild settings."""
MSVSSettings.ValidateMSBuildSettings(
{'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'true',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Manifest/notgood3',
'Warning: for Manifest/GenerateCatalogFiles, '
"expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i',
"Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion."""
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_minimal(self):
"""Tests a minimal conversion."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_warnings(self):
"""Tests conversion that generates warnings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
'MSBuild, index value (12) not in expected range [0, 4)',
'Warning: while converting VCCLCompilerTool/BrowseInformation to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
'MSBuild, index value (13) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
'MSBuild, value must be one of [0, 1, 2]; got 14',
'Warning: while converting VCLinkerTool/Driver to '
'MSBuild, index value (10) not in expected range [0, 4)',
'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
'MSBuild, index value (31) not in expected range [0, 5)',
'Warning: while converting VCLinkerTool/ErrorReporting to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCLinkerTool/FixedBaseAddress to '
'MSBuild, index value (6) not in expected range [0, 3)',
])
def testConvertToMSBuildSettings_full_synthetic(self):
"""Tests conversion of all the MSBuild settings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Manifest:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main()
| mit |
jarodsun/wesnoth1.12.6 | data/tools/wesnoth/libgithub.py | 33 | 24064 | # vim: tabstop=4: shiftwidth=4: expandtab: softtabstop=4: autoindent:
"""
This library provides an interface to github, the interface is build upon
the command line git tool.
"""
import logging
import os
try:
# Externally distributed, usually more up-to-date
import simplejson as json
except ImportError:
# Distributed with python since 2.6
import json
import shutil
import subprocess
import tempfile
import urllib2
#TODO: document and log where missing
class Error(StandardError):
"""Base class for exceptions in this module."""
pass
class AddonError(Error):
"""Class for exceptions that belong to an add-on."""
def __init__(self, addon, message):
self.addon = addon
self.message = message
self.args = (addon, message)
def __str__(self):
return "{0}: {1}".format(str(self.addon), str(self.message))
class _execresult(object):
"""Store the results of GitHub._execute and Addon._execute"""
def __init__(self, out, err, returncode):
self.out = out
self.err = err
self.returncode = returncode
def __iter__(self):
yield self.out
yield self.err
yield self.returncode
class Addon(object):
"""Represents an add-on from a github directory.
Each Addon object belongs to GitHub object and should not be created manually.
"""
def __init__(self, github, name, readonly):
"""Initialize an Addon object.
Do NOT use this constructor directly.
github: Parent GitHub object that created this object.
name: Name of the add-on that this object represents.
readonly: Whether the add-on has been checked out over git: instead of ssh:
"""
logging.debug("Addon created with name {0} and version {1}{2}".format(name, github.version, ". It is read-only" if readonly else ""))
self.github = github
self.name = name
self.readonly = readonly
def update(self):
"""Update this add-on.
Returns whether anything changed.
"""
logging.debug("Updating add-on {0}".format(self.name))
out, err, ret = self._execute(["git", "pull"], check_error=False)
if len(err):
real_errs = []
for line in err.splitlines():
if line in ["Your configuration specifies to merge with the ref 'master'", "from the remote, but no such ref was fetched."]:
# This means the repository has no commits yet
pass
elif "From" in line or "origin/master" in line:
# Regular fetch stuff
pass
elif "Checking out files" in line:
# Irregular fetch stuff
# not being attached to a terminal *should* squelch progress reports
pass
else:
real_errs.append(line)
if real_errs:
raise AddonError(self.name, "Error pulling:\n{0}".format("\n".join(real_errs)))
def remove_untracked():
untracked = [line.replace("?? ","",1) for line in self._status() if line.startswith("??")]
for item in untracked:
try:
path = os.path.join(self.get_dir(), item)
if item.endswith("/"):
shutil.rmtree(path)
else:
os.remove(path)
except:
logging.error("Failed to remove {0}".format(item))
if "Already up-to-date." in out:
return False
elif "Fast-forward" in out:
return True
elif "Merge made by recursive." in out:
logging.warn("Merge done in add-on {0}.".format(self.name))
return True
elif "CONFLICT" in out:
#This means that a conflicting local commit was done
#Its author will have to fix it
logging.error("CONFLICT in add-on {0}. Please merge".format(self.name))
return False
elif "local changes" in err:
logging.error("Found local changes in add-on {0}.".format(self.name))
# If this is a read-write repo, leave the files be
# If it's read-only, they're not supposed to be here
if self.readonly:
logging.warn("Attempting to fix.")
# Get rid of local modifications
self._execute(["git", "reset", "--hard"], check_error=False)
status = self._status()
untracked = [line for line in status if "??" in line]
# I don't want to recursively delete directories
if len(untracked) > 0:
logging.warn("Untracked files found. Attempting to remove...")
remove_untracked()
return False
elif "Untracked working tree" in err:
if self.readonly:
logging.error("Untracked files blocking pull of {0}. Attempting to remove...".format(self.name))
remove_untracked()
else:
logging.error("Untracked files blocking pull of {0}. Please remove.".format(self.name))
return False
elif "Your configuration specifies to merge with the ref 'master'" in err:
logging.info("Pulled from still-empty (not initialized) repository {0}.".format(self.name))
return False
else:
logging.error("Unknown pull result in add-on {0}:\nOut: {1}\nErr: {2}".format(self.name, out, err))
return False
def sync_from(self, src, exclude):
"""Synchronises add-on from another directory.
src: Directory with new add-on version.
exclude: List of files to ignore.
Returns whether anything changed.
Raises libgithub.Error if the checkout is not clean.
"""
logging.debug("Syncing add-on {0} from add-on server ({1})".format(self.name, src))
status = self._status()
if status:
raise AddonError(self.name, "Checkout is not clean:\n{0}".format("\n".join(status)))
self._rmtree(".", exclude)
#actual copying
self._copytree(src, self.get_dir(), ignore=lambda src,names: [n for n in names if n in exclude])
self._execute(["git", "add", "."], check_error=True)
status = self._status()
return len(status) > 0
def commit(self, message):
"""Commits and pushes add-on to git repo.
message: Commit message.
Raises libgithub.Error if something went wrong
"""
logging.debug("Committing and pushing add-on {0}".format(self.name))
tmpfile = tempfile.NamedTemporaryFile(delete=False)
tmpfile.write(message)
tmpfile.close()
tmpname = tmpfile.name
self._execute(["git", "commit", "-F", tmpname], check_error=True)
os.remove(tmpname)
out, err, ret = self._execute(["git", "push", "-u", "--porcelain", "origin", "master"], check_error=False)
statusline = [x for x in out.splitlines() if "refs/heads/master" in x]
if not statusline:
raise AddonError(self.name, "No statusline produced by git push")
else:
status = statusline[0][0]
refs, summary = statusline[0][1:].split(None, 1)
if status == " ":
# Fast forward
pass
elif status == "*":
# Freshly initiated repository
pass
elif status == "=":
# Up to date?
logging.warn("Commit to add-on {0} with message '{1}' has not made any changes".format(self.name, message))
elif status == "!":
raise AddonError(self.name, "Commit with message '{0}' failed for reason {1}".format(message, summary))
else:
raise AddonError(self.name, "Commit with message '{0}' has done something unexpected: {1}".format(message, statusline[0]))
def get_dir(self):
"""Return the directory this add-on's checkout is in.
"""
return os.path.join(self.github.directory, self.name)
# Internal functions
def _rmtree(self, directory, exclude):
logging.debug("Deleting tree {0}, except for {1}".format(self.name, ",".join(exclude)))
# Ensure the os calls all happen in the right directory
# not needed for _execute, as that does the cwd manipulation itself
# so only the os.chdir and os.path.isdir here need it
# Another option would be to os.path.join with self.get_dir
os.chdir(self.get_dir())
for entry in os.listdir(directory):
if entry in exclude:
continue
if entry == ".git":
continue
relpath = os.path.join(directory, entry)
if os.path.isdir(relpath):
self._rmtree(relpath, exclude)
# git rm removes directories that it empties
if os.path.exists(relpath):
self._execute(["rmdir", "--ignore-fail-on-non-empty", relpath])
else:
self._execute(["git", "rm", relpath], check_error=True)
def _copytree(self, src, dst, ignore=None):
"""Recursively copy a directory tree using copy2().
Based on shutil.copytree
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
if not os.path.exists(dst):
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.isdir(srcname):
self._copytree(srcname, dstname, ignore)
else:
shutil.copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
try:
shutil.copystat(src, dst)
except OSError as why:
if shutil.WindowsError is not None and isinstance(why, shutil.WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
raise AddonError(self.name, "Errors attempting to sync:\n{0}".format("\n".join(errors)))
def _status(self):
out, err, ret = self._execute(["git", "status", "--porcelain"])
if err:
raise AddonError(self.name, "Status failed with message: {0}".format(err))
return [line for line in out.split('\n') if len(line)]
def _execute(self, command, check_error = False):
return self.github._execute(command, cwd=self.get_dir(), check_error=check_error)
_GITHUB_API_BASE = "https://api.github.com/"
_GITHUB_API_REPOS = "orgs/wescamp/repos"
_GITHUB_API_TEAMS = "orgs/wescamp/teams"
# PUT /teams/:id/repos/:org/:repo
_GITHUB_API_TEAM_REPO = "teams/{0}/repos/wescamp/{1}"
# POST /repos/:user/:repo/hooks
_GITHUB_API_HOOKS = "repos/wescamp/{0}/hooks"
class GitHub(object):
"""Interface to a github checkout directory. Such a directory contains all translatable add-ons for a certain wesnoth version.
Every GitHub object is specific to a directory and wesnoth version.
"""
def __init__(self, directory, version, authorization=None):
"""Initializes a GitHub object.
directory: Directory in which the git repos for this wesnoth branch live.
version: The version of this wesnoth branch.
"""
logging.debug("GitHub created with directory {0} and version {1}, {2} authentication data".format(directory, version, "with" if authorization else "without"))
self.directory = directory
self.version = version
self.authorization = authorization
def update(self):
"""Update all add-ons.
Returns whether anything changed.
"""
logging.debug("Updating in directory {0}".format(self.directory))
changed = False
changed |= self._get_new_addons()
for addon in self._get_local_addons():
changed |= self.addon(addon).update()
return changed
def addon(self, name, readonly=False):
"""Returns an add-on object for the given name.
name: Name of the add-on.
readonly: If set, and the add-on needs to be freshly cloned, use a read-only protocol
Raises libgithub.Error if no such add-on exists.
"""
logging.debug("Generating add-on object for {0}".format(name))
if not os.path.isdir(self._absolute_path(name)):
logging.debug("Add-on {0} not found locally, checking github.".format(name))
github_list = self._github_repos_list(readonly=readonly)
matches = [x for x in github_list if x[0] == name]
if matches:
repo = matches[0]
self._clone(repo[0], repo[1])
else:
raise AddonError(name, "Add-on not found")
return Addon(self, name, readonly)
def create_addon(self, name):
"""Creates a new add-on on github.
name: Name of the add-on.
Returns an Addon object for the new add-on.
"""
logging.debug("Creating new add-on {0}".format(name))
response = self._github_repos_create(name)
self._clone(name, response["ssh_url"])
return self.addon(name)
def addon_exists(self, name):
"""Checks whether an add-on exists on github..
name: Name of the add-on.
Returns a bool representing the existence of the add-on.
"""
logging.debug("Checking whether add-on {0} exists".format(name))
github_list = self._github_repos_list()
return name in [repo[0] for repo in github_list]
def list_addons(self):
"""Returns a list of valid add-on names.
Returns a list of names that can be passed to self.addon()
"""
logging.debug("Generating list of add-on names for version {0}".format(self.version))
github_list = self._github_repos_list()
return [repo[0] for repo in github_list]
def _absolute_path(self, name):
return os.path.join(self.directory, name)
def _clone(self, name, url):
target = self._absolute_path(name)
out, err, ret = self._execute(["git", "clone", url, target])
# Rather hacky
if len(err):
errors = [line.strip() for line in err.split('\n') if len(line)]
got_error = False
for error in errors:
if error != "warning: You appear to have cloned an empty repository.":
got_error = True
break
if got_error:
raise AddonError(name, "Error cloning: " + err)
def _get_new_addons(self):
"""Check out any new add-ons.
Returns whether anything changed.
"""
changed = False
github_list = self._github_repos_list()
local_list = self._get_local_addons()
for repo in github_list:
if repo[0] not in local_list:
self._clone(repo[0], repo[1])
changed = True
return changed
def _get_local_addons(self):
"""...
Returns list of local add-ons.
"""
return os.listdir(self.directory)
_github_repos_memo = None
def _github_repos_list(self, readonly=False):
"""Get a list of repositories.
readonly: Should the tuples have ssh urls or readonly urls.
Returns a list of tuples that contain the add-on name and the url.
"""
if not self._github_repos_memo:
url = _GITHUB_API_BASE + _GITHUB_API_REPOS
self._github_repos_memo = self._github_api_request(url)
version_suffix = "-{0}".format(self.version)
return [(repo["name"][:-len(version_suffix)], repo["git_url"] if readonly else repo["ssh_url"])
for repo in self._github_repos_memo if repo["name"].endswith(version_suffix)]
def _github_repos_create(self, name):
"""Create a new repository.
name: The name of the add-on for which the repository will be created.
"""
reponame = "{0}-{1}".format(name, self.version)
# Create the repository
url = _GITHUB_API_BASE + _GITHUB_API_REPOS
requestdata = { "name" : reponame }
repodata = self._github_api_request(url, requestdata, authenticate=True)
# Request the teams
url = _GITHUB_API_BASE + _GITHUB_API_TEAMS
teams = self._github_api_request(url, authenticate=True)
# Find the right team number
# This can probably be cleaner
team_number = [team["id"] for team in teams if team["name"] == "Developers"][0]
# Add the repository to the team
# PUT /teams/:id/repos/:org/:repo
baseurl = _GITHUB_API_BASE + _GITHUB_API_TEAM_REPO
url = baseurl.format(team_number, reponame)
# Github requires data for every modifying request, even if there is none
self._github_api_request(url, data="", method="PUT", authenticate=True)
# Add commit hook
baseurl = _GITHUB_API_BASE + _GITHUB_API_HOOKS
url = baseurl.format(reponame)
requestdata = { "name" : "web", "events" : ["push"], "active" : True,
"config" : {
"url" : "http://ai0867.net:6660/wescamp",
"content_type" : "json"
}
}
self._github_api_request(url, requestdata, authenticate=True)
return repodata
def _github_api_request(self, url, data=None, method=None, authenticate=False):
logging.debug("Making github API request {0}".format(url))
request = urllib2.Request(url)
if method:
request.get_method = lambda: method
if data == "":
# Workaround for PUTs requiring data, even if you have nothing to pass
request.add_data(data)
elif data:
request.add_data(json.dumps(data))
# Manually adding authentication data
# Basic works in curl, but urllib2 doesn't
# probably because github's API doesn't send a www-authenticate header
if authenticate or self._github_have_authorization():
from base64 import encodestring
auth = self._github_authorization()
if ":" in auth:
# username:password
base64string = encodestring(auth).replace('\n','')
request.add_header("Authorization", "Basic {0}".format(base64string))
else:
# token
request.add_header("Authorization", "Bearer {0}".format(auth))
try:
response = urllib2.urlopen(request)
except IOError as e:
raise Error("GitHub API failure: " + str(e))
if response.code == 204:
# 204 = No content
return None
json_parsed = json.load(response)
link_headers = response.info().getallmatchingheaders("Link")
if link_headers:
logging.debug("Found a Link header in response, analyzing...")
link_header = link_headers[0].lstrip("Link:")
links_raw = link_header.split(",")
links_split_raw = [link.split(";") for link in links_raw]
links_split_proc = [(l[1].strip().lstrip('rel="').rstrip('"'), l[0].strip().lstrip("<").rstrip(">")) for l in links_split_raw]
links_dict = dict((k,v) for (k,v) in links_split_proc)
if "next" in links_dict:
logging.debug("Link with rel=\"next\" found, recursing to deal with pagination")
rest = self._github_api_request(links_dict["next"], data, method, authenticate)
json_parsed += rest
return json_parsed
def _github_have_authorization(self):
return self.authorization != None
def _github_authorization(self):
if self.authorization:
return self.authorization
else:
raise Error("Authentication required")
def _execute(self, command, cwd=None, check_error=False):
#TODO: have an errorcheck that actually checks the returncode?
"""Executes a command.
command: The command to execute.
cwd: Directory to execute the command from.
check_error: Whether to raise an exception if there's stderr output.
Returns stdout, stderr.
Raises libgithub.Error if check_error and len(err).
"""
logging.debug("execute command = '%s'", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=cwd)
out = ""
err = ""
while(p.poll() == None):
out += p.stdout.read()
err += p.stderr.read()
out += p.stdout.read()
err += p.stderr.read()
logging.debug("===== stdout ====\n%s\n===== stdout ====", out)
logging.debug("===== stderr ====\n%s\n===== stderr ====", err)
if check_error and len(err):
raise Error("Failure executing command '{0}': {1}".format(" ".join(command), err))
return _execresult(out, err, p.returncode)
def _gen(possible_dirs):
def _get_build_system(possible_dirs):
logging.debug("get_build_system with paths: %s", ";".join(possible_dirs))
if not isinstance(possible_dirs, list):
raise Error("Incorrect argument type passed, {0} instead of {1}".format(str(type(possible_dirs)), str(list)))
def is_good_checkout(addon):
try:
out, err, ret = addon._execute(["git", "remote", "-v"], check_error=True)
test = "wescamp/build-system"
return test in out
except:
return False
for path in possible_dirs:
base, rest = os.path.split(path.rstrip(os.sep))
fake_github = GitHub(base, "system")
fake_build = Addon(fake_github, rest, True)
if is_good_checkout(fake_build):
logging.debug("Found {0} to be valid build-system checkout".format(path))
return fake_build, False
else:
logging.debug("Discarded possible checkout {0}".format(path))
logging.debug("No candidates left, creating new checkout")
realish_github = GitHub(tempfile.mkdtemp(),"system")
build_system = realish_github.addon("build", readonly=True)
return build_system, True
try:
bs, fresh = _get_build_system(possible_dirs)
bs.update()
except Error as e:
# Exception to make sure nobody catches it
# Use raise ... from syntax in python3
import sys
raise Exception(str(e)), None, sys.exc_info()[2]
# Add references to shutil and os to ensure we're destructed before they are
stored_shutil = shutil
stored_os = os
try:
while True:
# Don't make a fresh clone every call
yield bs
except GeneratorExit:
# Clean up our temporary clone
if fresh:
stored_shutil.rmtree(bs.get_dir())
stored_os.rmdir(os.path.dirname(bs.get_dir()))
_g = None
def get_build_system(possible_dirs=[]):
"""Create a special 'add-on', containing the wescamp build system.
possible_dirs: List of paths to possible existing.
Returns: The Addon object of the build-system
"""
global _g
if _g == None:
_g = _gen(possible_dirs)
return _g.next()
| gpl-2.0 |
xamfoo/thumbor-docker | setup/thumbor/thumbor/optimizers/__init__.py | 13 | 1083 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
import os
from tempfile import NamedTemporaryFile
class BaseOptimizer(object):
def __init__(self, context):
self.context = context
def should_run(self, image_extension, buffer):
return True
def run_optimizer(self, image_extension, buffer):
if not self.should_run(image_extension, buffer):
return buffer
ifile = NamedTemporaryFile(delete=False)
ofile = NamedTemporaryFile(delete=False)
try:
ifile.write(buffer)
ifile.close()
ofile.close()
self.optimize(buffer, ifile.name, ofile.name)
ofile = open(ofile.name, 'rb') # reopen with file thats been changed with the optimizer
return ofile.read()
finally:
os.unlink(ifile.name)
os.unlink(ofile.name)
| mit |
Goyatuzo/Challenges | HackerRank/Algorithms/Sorting/Insertion Sort Part 1/insertion_sort_p1.py | 1 | 1113 | def insertion_sort(lst):
"""Instead of just inserting the value where it should be at,
it shifts the entire array until the location is found. It prints
out all the intermediate steps, but the final step is actually just
returned, so the output must be manually printed.
:param lst: The list of values to be sorted by insertion."""
# The value to be inserted.
to_insert = lst[-1]
n = len(lst)
# Remove the element to be added and replace with last element.
del lst[-1]
lst.append(lst[-1])
print(" ".join(map(str, lst)))
for i in range(n - 2, -1, -1):
# If it's at the beginning of the list, just insert it.
if i <= 0:
lst.insert(0, to_insert)
del lst[1]
break
# If it's in the middle of the list.
elif lst[i - 1] <= to_insert and lst[i] >= to_insert:
lst.insert(i, to_insert)
del lst[i + 1]
break
else:
lst.insert(i, lst[i - 1])
del lst[i + 1]
print(" ".join(map(str, lst)))
return " ".join(map(str, lst))
| mit |
cshallue/models | research/gan/stargan/ops_test.py | 2 | 3109 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
import ops
class OpsTest(tf.test.TestCase):
def test_padding_arg(self):
pad_h = 2
pad_w = 3
self.assertListEqual([[0, 0], [pad_h, pad_h], [pad_w, pad_w], [0, 0]],
ops._padding_arg(pad_h, pad_w, 'NHWC'))
def test_padding_arg_specify_format(self):
pad_h = 2
pad_w = 3
self.assertListEqual([[pad_h, pad_h], [pad_w, pad_w], [0, 0]],
ops._padding_arg(pad_h, pad_w, 'HWC'))
def test_padding_arg_invalid_format(self):
pad_h = 2
pad_w = 3
with self.assertRaises(ValueError):
ops._padding_arg(pad_h, pad_w, 'INVALID')
def test_padding(self):
n = 2
h = 128
w = 64
c = 3
pad = 3
test_input_tensor = tf.random_uniform((n, h, w, c))
test_output_tensor = ops.pad(test_input_tensor, padding_size=pad)
with self.test_session() as sess:
output = sess.run(test_output_tensor)
self.assertTupleEqual((n, h + pad * 2, w + pad * 2, c), output.shape)
def test_padding_with_3D_tensor(self):
h = 128
w = 64
c = 3
pad = 3
test_input_tensor = tf.random_uniform((h, w, c))
test_output_tensor = ops.pad(test_input_tensor, padding_size=pad)
with self.test_session() as sess:
output = sess.run(test_output_tensor)
self.assertTupleEqual((h + pad * 2, w + pad * 2, c), output.shape)
def test_padding_with_tensor_of_invalid_shape(self):
n = 2
invalid_rank = 1
h = 128
w = 64
c = 3
pad = 3
test_input_tensor = tf.random_uniform((n, invalid_rank, h, w, c))
with self.assertRaises(ValueError):
ops.pad(test_input_tensor, padding_size=pad)
def test_condition_input_with_pixel_padding(self):
n = 2
h = 128
w = h
c = 3
num_label = 5
input_tensor = tf.random_uniform((n, h, w, c))
label_tensor = tf.random_uniform((n, num_label))
output_tensor = ops.condition_input_with_pixel_padding(
input_tensor, label_tensor)
with self.test_session() as sess:
labels, outputs = sess.run([label_tensor, output_tensor])
self.assertTupleEqual((n, h, w, c + num_label), outputs.shape)
for label, output in zip(labels, outputs):
for i in range(output.shape[0]):
for j in range(output.shape[1]):
self.assertListEqual(label.tolist(), output[i, j, c:].tolist())
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
x1957/thrift | lib/py/src/TSerialization.py | 111 | 1389 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from .protocol import TBinaryProtocol
from .transport import TTransport
def serialize(thrift_object,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer()
protocol = protocol_factory.getProtocol(transport)
thrift_object.write(protocol)
return transport.getvalue()
def deserialize(base,
buf,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer(buf)
protocol = protocol_factory.getProtocol(transport)
base.read(protocol)
return base
| apache-2.0 |
Loisel/tmr3 | tmr.py | 1 | 15096 | #!/usr/bin/python
"""
A module to calculate the current, the conductance and the TMR from
a set of rate arrays.
The rate arrays are supposed to be stored in a h5 file in the job directory.
The result is stored in a h5 file. The name of the dataset contains all
parameters. They are also stored as attributes in the dataset.
The conductance in the two lead configurations (parallel/anti-parallel)
are stored in arrays in the dataset.
Usage:
./tmr.py <jobname>
"""
import numpy as np
from numpy import linalg
import time
import sys
import getopt
import h5py
import os
# We are picky about possible floating point overflows
# to avoid calculating NaNs
np.seterr(divide="raise")
np.seterr(invalid="raise")
# A helper module to calculate the populations.
import pop
# The configuration module
import cfg
# path to the dat directory
datpath = "dat/"
# name of the temporary file where the rates are stored
ratefile = "running_calc.h5"
# name of the h5 file to store the conductance for the two configuration
# and the configuraion parameters.
hdffile = "simdata_new.h5"
def save_hdf5(fname,G_P,G_AP):
"""
Store the conductance and the configuration to the h5 file.
Args:
fname: filename of the h5 file
G_P: the conductance for leads with parallel magnetization
G_AP: the conductance for leads with anti-parallel magnetization
"""
print "Shape of GP {}".format(G_P.shape)
fileh = h5py.File(fname,"a")
# Note that the selection of parameters to construct the name of the
# dataset should be chosen such that this string is unique!
# That is, it should contain all running parameters.
dset_name = "G={}_kbT={}_Ec={}_E0={}_Pol={}_PolOrb={}_SO={}_tau={}_DS={}_B_P={}_B_AP={}_B_ORB_P={}_B_ORB_AP={}_W_e={}_W_0={}".format(cfg.conf['G_scale'],cfg.conf['kBT'],cfg.conf['E_C'],cfg.conf['E_0'],cfg.conf['Pol'],cfg.conf['OrbPol'],cfg.conf['SO'],cfg.conf['tau_r'],cfg.conf['D_S_factor'],cfg.conf['B_P'],cfg.conf['B_AP'],cfg.conf['B_ORB_P'],cfg.conf['B_ORB_AP'],cfg.conf['W_E'],cfg.conf['W_0'])
try:
# we create the dataset
dset = fileh.create_dataset(dset_name,data=np.vstack((G_P,G_AP)))
# and store the config attributes
dset.attrs['alpha'] = cfg.conf['ALPHA']
dset.attrs['temperature'] = cfg.conf['kBT']
dset.attrs['coupling'] = cfg.conf['G_scale']
dset.attrs['electron_number'] = cfg.conf['N_0']
dset.attrs['charging_energy'] = cfg.conf['E_C']
dset.attrs['level_spacing'] = cfg.conf['E_0']
dset.attrs['polarization_spin'] = cfg.conf['Pol']
dset.attrs['polarization_orbit'] = cfg.conf['OrbPol']
dset.attrs['spinorbit'] = cfg.conf['SO']
dset.attrs['stonershift'] = cfg.conf['D_S_factor']
dset.attrs['tau_r'] = cfg.conf['tau_r']
dset.attrs['vg_min'] = cfg.conf['V_g_min']
dset.attrs['vg_max'] = cfg.conf['V_g_max']
dset.attrs['b_p'] = cfg.conf['B_P']
dset.attrs['b_ap'] = cfg.conf['B_AP']
dset.attrs['b_orb_p'] = cfg.conf['B_ORB_P']
dset.attrs['b_orb_ap'] = cfg.conf['B_ORB_AP']
dset.attrs['w_0'] = cfg.conf['W_0']
dset.attrs['w_e'] = cfg.conf['W_E']
dset.attrs['timestamp'] = time.time()
except KeyError:
# If the choice was not unique we complain but continue.
print "Dataset exists."
fileh.close()
def eval_DENKER(GM,GP,configuration):
"""
Evaluate the density matrix kernel using the in- and out-tunneling rates.
Args:
GM,GP: numpy arrays containing in- and out-tunneling rates
in the order of cfg.TLIST.
configuration: integer determining parallel (0) or anti-parallel(1)
configuration
Returns:
the density matrix as a square 2-d numpy array that is NP**2 in size,
where NP is the number of states in the groundstatespace.
"""
# we get a view on the transition list and, for simplicity, its transpose
TLIST = cfg.TLIST[configuration]
TLIST_T = np.transpose(TLIST)
# from all transitions we extract all groundstates in the statespace
# this is probably a complicated way to do it
PLIST = list(set(TLIST_T[0]).union(TLIST_T[1]))
# ... and sort it by index
PLIST.sort()
# the number of groundstates
NP = len(PLIST)
# let's create an empty density matrix
ME = np.zeros((NP,NP))
# we create a version of the transition list that does not contain
# the indices in terms of the energy array (see cfg.py), but
# in terms of the number in the state list (plist)
# (the transition list can then be used to denote non-zero matrix elements)
TMP = np.copy(TLIST)
for idx,val in enumerate(PLIST):
TMP[TLIST == val] = idx
# We calculate diagonal elements of the density matrix:
# TLIST_T[1] == num selects the correct in-tunneling rates for the
# state with label num
# have a look at numpy.where to understand this line
for idx,num in enumerate(PLIST):
ME[idx,idx] = -np.sum(np.where(TLIST_T[1] == num,GP,0.)) - np.sum(np.where(TLIST_T[0] == num,GM,0.))
# for the off diagonal elements we can directly use the generated TMP
# transition list
for k,tup in enumerate(TMP):
ME[tup[0],tup[1]] = GP[k]
ME[tup[1],tup[0]] = GM[k]
# print "tup: {} and matrix element {}".format(tup,ME[tuple(tup)])
return ME
def eval_CURKER(GM,GP,configuration):
"""
Evaluate the current kernel using the in- and out-tunneling rates.
Args:
GM,GP: numpy arrays containing in- and out-tunneling rates
in the order of cfg.TLIST.
configuration: integer determining parallel (0) or anti-parallel(1)
configuration
Returns:
the current kernel as a 1-d numpy array.
"""
# We get a view on the transition list and its transpose
TLIST = cfg.TLIST[configuration]
TLIST_T = np.transpose(TLIST)
# ... and extract the list of groundstates (see also eval_DENKER)
PLIST = list(set(TLIST_T[0]).union(TLIST_T[1]))
PLIST.sort()
# this determines the size of the statespace
NP = len(PLIST)
CUR = np.zeros(NP)
# Note that the current kernel can be calculated by summing the diagonal elements
# of the density matrix with opposite sign
# compare eval_DENKER
for idx,num in enumerate(PLIST):
CUR[idx] = np.sum(np.where(TLIST_T[1] == num,GP,0.)) - np.sum(np.where(TLIST_T[0] == num,GM,0.))
return CUR
def current(GP,GM,POP,configuration):
"""
Calculate the current using the rates and populations.
Args:
GP, GM: np-arrays containing in- and out-tunneling rates.
POP: np-array for the populations
configuration: integer determining parallel (0) or anti-parallel(1)
configuration
Returns:
current as a float.
"""
# We calculate the current kernel
CURKER = eval_CURKER(GM,GP,configuration)
# and vector-multiply it with the population vector
I = -np.sum(cfg.conf["ELE"]*np.dot( CURKER, POP))
return I
def eval_tmr(fname,plotname,pop):
"""
Calculates the TMR by evaluating conductance through
parallel and anti-parallel polarized contacts.
Args:
fname: the h5 file to load the rates from.
plotname: A name for the pdf output to produce.
pop: If True, we plot the populations, too.
"""
# We prepare the current and conductance vectors for different
# values of gate and bias voltage
C_p = np.zeros((cfg.conf['NV'],cfg.conf['NVb']))
C_ap = np.zeros((cfg.conf['NV'],cfg.conf['NVb']))
G_p = np.zeros((cfg.conf['NV'],cfg.conf['NVb']-1))
G_ap = np.zeros((cfg.conf['NV'],cfg.conf['NVb']-1))
dVb = cfg.conf['Vb_range'][1]- cfg.conf['Vb_range'][0]
# the population vectors, for all values of gate and bias
POP_p = np.zeros((cfg.conf['NVb'],cfg.conf['NV'],cfg.N_GS[0]))
POP_ap = np.zeros((cfg.conf['NVb'],cfg.conf['NV'],cfg.N_GS[1]))
# We iterate over two bias values first
for nV,Vb in enumerate(cfg.conf["Vb_range"]):
# now the rates are loaded from the h5 file
# note that the label of the specific rate arrays are fixed
with h5py.File(fname) as file:
GP0_p = np.array(file['par_P0_V{}'.format(Vb)])
GP0_ap = np.array(file['apa_P0_V{}'.format(Vb)])
GP1_p = np.array(file['par_P1_V{}'.format(Vb)])
GP1_ap = np.array(file['apa_P1_V{}'.format(Vb)])
GM0_p = np.array(file['par_M0_V{}'.format(Vb)])
GM0_ap = np.array(file['apa_M0_V{}'.format(Vb)])
GM1_p = np.array(file['par_M1_V{}'.format(Vb)])
GM1_ap = np.array(file['apa_M1_V{}'.format(Vb)])
# for the density kernel, we sum all rates over both leads
DENKER_p = np.array([eval_DENKER(GM0_p[n]+GM1_p[n],GP0_p[n]+GP1_p[n],0)for n in range(cfg.conf["NV"])])
DENKER_ap = np.array([eval_DENKER(GM0_ap[n]+GM1_ap[n],GP0_ap[n]+GP1_ap[n],1)for n in range(cfg.conf["NV"])])
# the populations are calculated from the density kernel by an asymptotic
# approximation scheme
POP_ap[nV] = np.array([pop.asymptotic_ssp(DENKER_ap[n]) for n in range(cfg.conf["NV"])])
POP_p[nV] = np.array([pop.asymptotic_ssp(DENKER_p[n]) for n in range(cfg.conf["NV"])])
# note that the current is calculated from the rates in one of the leads only
C_p[:,nV] = np.array([ current(GP0_p[n],GM0_p[n],POP_p[nV,n],0) for n in np.arange(cfg.conf["NV"]) ])
C_ap[:,nV] = np.array([ current(GP0_ap[n],GM0_ap[n],POP_ap[nV,n],1) for n in np.arange(cfg.conf["NV"]) ])
# the numerical derivative gives the conductance
G_p = np.diff(C_p).flatten()/dVb
G_ap = np.diff(C_ap).flatten()/dVb
# we save the conductance traces to a h5 file specified as a global variable
# hdffile in the path datpath
# It is possible that the dataset already exists. In this case, we issue a warning.
try:
save_hdf5("{}{}".format(datpath,hdffile),G_p,G_ap)
except RuntimeError:
print "Unable to save to {}, maybe there is already a dataset with similar parameters...".format(hdffile)
# the tmr and conductance graphs are plotted to a pdf file for review.
plot_tmr_pdf(G_p,G_ap,plotname)
# if the pop flag is set, we also plot the population for one bias value
if pop:
plot_population([POP_p[0],POP_ap[0]],os.path.splitext(plotname)[0]+"_POP.pdf")
def plot_tmr_pdf(C_p,C_ap,fname):
"""
A helper routine to plot the conductance and TMR to a pdf file in the datpath.
Args:
C_p, C_ap: the parallel and anti-parallel conductance.
fname: the filename to plot to
"""
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
# we plot the conductance graph on top, p and ap with different colors
Axes1 = plt.subplot(2,1,1)
Axes1.set_xticklabels([])
plt.ylabel("Conductance (e^2/h)")
plt.title("Conductance at zero bias")
# parallel is plotted in red, and anti-parallel as blue dashed line
plt.plot( cfg.conf["V_g"],C_p,'r',cfg.conf["V_g"], C_ap, 'b--')
# on the second panel, the TMR is plotted
Axes2 = plt.subplot(2,1,2)
plt.xlabel("gate voltage (V)")
plt.ylabel("TMR")
plt.title("TMR")
plt.ylim((-0.3,1.5))
TMR = np.zeros(cfg.conf["NV"])
for i in range(cfg.conf["NV"]):
try:
TMR[i] = C_p[i]/C_ap[i]-1.
except ZeroDivisionError:
print "Zero Division, returning null."
TMR[i] = 0.
plt.plot( cfg.conf["V_g"], TMR)
plt.savefig(fname, bbox_inches='tight')
def plot_population(POP, fname):
"""
Calculates and plots selected populations of the quantum dot
with gate voltage. The edge states N=-1 and 5 are neglected.
Args:
POP: a list with the two population vectors
for parallel and anti-parallel configurations
fname: the filename to plot to
"""
import matplotlib.pyplot as plt
NV = cfg.conf["NV"]
print "Calculating populations..."
# We plot the populations for both configurations
# the parallel populations on top
# the anti-parallel on bottom
Ax = [plt.subplot(2,1,1),plt.subplot(2,1,2)]
cm = plt.get_cmap('gist_rainbow')
PopPlots = [1,4,8,12,17,18]
NP = len(PopPlots)
for gamidx in range(2):
TLIST = cfg.TLIST[gamidx]
TLIST_T = np.transpose(TLIST)
PLIST = list(set(TLIST_T[0]).union(TLIST_T[1]))
PLIST.sort()
# we cycle through the linecolors to distinguish the different
# groundstates
Ax[gamidx].set_color_cycle([cm(1.*k/NP) for k in range(NP)])
for i in PopPlots:
color = cm(1.*i/NP)
LABEL = "P_{}".format(cfg.int_to_state(PLIST[i]))
Ax[gamidx].plot( cfg.conf["V_g"], POP[gamidx][:,i],label=LABEL)
lines =Ax[gamidx].get_lines()
labels = [l.get_label() for l in lines]
leg = plt.figlegend(lines,labels,loc='upper right')
plt.savefig(fname)
plt.show()
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def main(argv=None):
"""
Interface routine to call the tmr module.
Example:
./tmr.py <jobname>
In principle, there were routines to plot rates, populations,
conductances etc. but apart from the population plotting,
none of the use cases was needed anymore.
"""
POP = False
# The default config file is called cnt.conf
cfile = "cnt.conf"
rlist = [0.,]
if argv is None:
argv = sys.argv
try:
try:
opts, args = getopt.getopt(argv[1:], "hc:P", ["help","config=","pop"])
except getopt.error, msg:
raise Usage(msg)
for o,a in opts:
if o in ('-h','--help'):
usage()
exit()
elif o in ('-c','--config'):
cfile = a
elif o in ('-P','--pop'):
POP = True
else:
raise Usage('Invalid argument.')
# we parse the config and initialize it
cfg.parse_conf("dat/{0}/{1}".format(args[0],cfile))
cfg.init()
h5file = "{}{}/{}".format(datpath,args[0],ratefile)
pdffile = "{}{}.pdf".format(datpath,args[0])
print "Try to open {}".format(h5file)
eval_tmr(h5file,pdffile,POP)
except Usage, err:
print >>sys.stderr, err.msg
print >>sys.stderr, "for help use --help"
return 2
def usage():
print "This is a tool to process rate files.\n\
\n\
usage: tmr.py [-hP] [--pop] jobname\n\
\n\
--pop or -P: Plot the populations.\n\
\n\
jobname: The name of the directory for the rate files.\n\
\n\
The script searches for files dat/jobname/running_calc.h5\n\
and dat/jobname/cnt.conf"
if __name__ == "__main__":
sys.exit(main())
| gpl-3.0 |
ryfeus/lambda-packs | Keras_tensorflow/source/tensorflow/contrib/integrate/python/ops/odes.py | 69 | 20508 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ODE solvers for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import tensor_array_ops
_ButcherTableau = collections.namedtuple(
'_ButcherTableau', 'alpha beta c_sol c_mid c_error')
# Parameters from Shampine (1986), section 4.
_DORMAND_PRINCE_TABLEAU = _ButcherTableau(
alpha=[1/5, 3/10, 4/5, 8/9, 1., 1.],
beta=[[1/5],
[3/40, 9/40],
[44/45, -56/15, 32/9],
[19372/6561, -25360/2187, 64448/6561, -212/729],
[9017/3168, -355/33, 46732/5247, 49/176, -5103/18656],
[35/384, 0, 500/1113, 125/192, -2187/6784, 11/84]],
c_sol=[35/384, 0, 500/1113, 125/192, -2187/6784, 11/84, 0],
c_mid=[6025192743/30085553152 / 2, 0, 51252292925/65400821598 / 2,
-2691868925/45128329728 / 2, 187940372067/1594534317056 / 2,
-1776094331/19743644256 / 2, 11237099/235043384 / 2],
c_error=[1951/21600 - 35/384,
0,
22642/50085 - 500/1113,
451/720 - 125/192,
-12231/42400 - -2187/6784,
649/6300 - 11/84,
1/60],
)
def _possibly_nonzero(x):
return isinstance(x, ops.Tensor) or x != 0
def _scaled_dot_product(scale, xs, ys, name=None):
"""Calculate a scaled, vector inner product between lists of Tensors."""
with ops.name_scope(name, 'scaled_dot_product', [scale, xs, ys]) as scope:
# Some of the parameters in our Butcher tableau include zeros. Using
# _possibly_nonzero lets us avoid wasted computation.
return math_ops.add_n([(scale * x) * y for x, y in zip(xs, ys)
if _possibly_nonzero(x) or _possibly_nonzero(y)],
name=scope)
def _dot_product(xs, ys, name=None):
"""Calculate the vector inner product between two lists of Tensors."""
with ops.name_scope(name, 'dot_product', [xs, ys]) as scope:
return math_ops.add_n([x * y for x, y in zip(xs, ys)], name=scope)
def _runge_kutta_step(func, y0, f0, t0, dt, tableau=_DORMAND_PRINCE_TABLEAU,
name=None):
"""Take an arbitrary Runge-Kutta step and estimate error.
Args:
func: Function to evaluate like `func(y, t)` to compute the time derivative
of `y`.
y0: Tensor initial value for the state.
f0: Tensor initial value for the derivative, computed from `func(y0, t0)`.
t0: float64 scalar Tensor giving the initial time.
dt: float64 scalar Tensor giving the size of the desired time step.
tableau: optional _ButcherTableau describing how to take the Runge-Kutta
step.
name: optional name for the operation.
Returns:
Tuple `(y1, f1, y1_error, k)` giving the estimated function value after
the Runge-Kutta step at `t1 = t0 + dt`, the derivative of the state at `t1`,
estimated error at `t1`, and a list of Runge-Kutta coefficients `k` used for
calculating these terms.
"""
with ops.name_scope(name, 'runge_kutta_step', [y0, f0, t0, dt]) as scope:
y0 = ops.convert_to_tensor(y0, name='y0')
f0 = ops.convert_to_tensor(f0, name='f0')
t0 = ops.convert_to_tensor(t0, name='t0')
dt = ops.convert_to_tensor(dt, name='dt')
dt_cast = math_ops.cast(dt, y0.dtype)
k = [f0]
for alpha_i, beta_i in zip(tableau.alpha, tableau.beta):
ti = t0 + alpha_i * dt
yi = y0 + _scaled_dot_product(dt_cast, beta_i, k)
k.append(func(yi, ti))
if not (tableau.c_sol[-1] == 0 and tableau.c_sol == tableau.beta[-1]):
# This property (true for Dormand-Prince) lets us save a few FLOPs.
yi = y0 + _scaled_dot_product(dt_cast, tableau.c_sol, k)
y1 = array_ops.identity(yi, name='%s/y1' % scope)
f1 = array_ops.identity(k[-1], name='%s/f1' % scope)
y1_error = _scaled_dot_product(dt_cast, tableau.c_error, k,
name='%s/y1_error' % scope)
return (y1, f1, y1_error, k)
def _interp_fit(y0, y1, y_mid, f0, f1, dt):
"""Fit coefficients for 4th order polynomial interpolation.
Args:
y0: function value at the start of the interval.
y1: function value at the end of the interval.
y_mid: function value at the mid-point of the interval.
f0: derivative value at the start of the interval.
f1: derivative value at the end of the interval.
dt: width of the interval.
Returns:
List of coefficients `[a, b, c, d, e]` for interpolating with the polynomial
`p = a * x ** 4 + b * x ** 3 + c * x ** 2 + d * x + e` for values of `x`
between 0 (start of interval) and 1 (end of interval).
"""
# a, b, c, d, e = sympy.symbols('a b c d e')
# x, dt, y0, y1, y_mid, f0, f1 = sympy.symbols('x dt y0 y1 y_mid f0 f1')
# p = a * x ** 4 + b * x ** 3 + c * x ** 2 + d * x + e
# sympy.solve([p.subs(x, 0) - y0,
# p.subs(x, 1 / 2) - y_mid,
# p.subs(x, 1) - y1,
# (p.diff(x) / dt).subs(x, 0) - f0,
# (p.diff(x) / dt).subs(x, 1) - f1],
# [a, b, c, d, e])
# {a: -2.0*dt*f0 + 2.0*dt*f1 - 8.0*y0 - 8.0*y1 + 16.0*y_mid,
# b: 5.0*dt*f0 - 3.0*dt*f1 + 18.0*y0 + 14.0*y1 - 32.0*y_mid,
# c: -4.0*dt*f0 + dt*f1 - 11.0*y0 - 5.0*y1 + 16.0*y_mid,
# d: dt*f0,
# e: y0}
a = _dot_product([-2 * dt, 2 * dt, -8, -8, 16], [f0, f1, y0, y1, y_mid])
b = _dot_product([5 * dt, -3 * dt, 18, 14, -32], [f0, f1, y0, y1, y_mid])
c = _dot_product([-4 * dt, dt, -11, -5, 16], [f0, f1, y0, y1, y_mid])
d = dt * f0
e = y0
return [a, b, c, d, e]
def _interp_fit_rk(y0, y1, k, dt, tableau=_DORMAND_PRINCE_TABLEAU):
"""Fit an interpolating polynomial to the results of a Runge-Kutta step."""
with ops.name_scope('interp_fit_rk'):
dt = math_ops.cast(dt, y0.dtype)
y_mid = y0 + _scaled_dot_product(dt, tableau.c_mid, k)
f0 = k[0]
f1 = k[-1]
return _interp_fit(y0, y1, y_mid, f0, f1, dt)
def _interp_evaluate(coefficients, t0, t1, t):
"""Evaluate polynomial interpolation at the given time point.
Args:
coefficients: list of Tensor coefficients as created by `interp_fit`.
t0: scalar float64 Tensor giving the start of the interval.
t1: scalar float64 Tensor giving the end of the interval.
t: scalar float64 Tensor giving the desired interpolation point.
Returns:
Polynomial interpolation of the coefficients at time `t`.
"""
with ops.name_scope('interp_evaluate'):
t0 = ops.convert_to_tensor(t0)
t1 = ops.convert_to_tensor(t1)
t = ops.convert_to_tensor(t)
dtype = coefficients[0].dtype
assert_op = control_flow_ops.Assert(
(t0 <= t) & (t <= t1),
['invalid interpolation, fails `t0 <= t <= t1`:', t0, t, t1])
with ops.control_dependencies([assert_op]):
x = math_ops.cast((t - t0) / (t1 - t0), dtype)
xs = [constant_op.constant(1, dtype), x]
for _ in range(2, len(coefficients)):
xs.append(xs[-1] * x)
return _dot_product(coefficients, reversed(xs))
def _optimal_step_size(last_step,
error_ratio,
safety=0.9,
ifactor=10.0,
dfactor=0.2,
order=5,
name=None):
"""Calculate the optimal size for the next Runge-Kutta step."""
with ops.name_scope(
name, 'optimal_step_size', [last_step, error_ratio]) as scope:
error_ratio = math_ops.cast(error_ratio, last_step.dtype)
exponent = math_ops.cast(1 / order, last_step.dtype)
# this looks more complex than necessary, but importantly it keeps
# error_ratio in the numerator so we can't divide by zero:
factor = math_ops.maximum(
1 / ifactor,
math_ops.minimum(error_ratio ** exponent / safety, 1 / dfactor))
return math_ops.div(last_step, factor, name=scope)
def _abs_square(x):
if x.dtype.is_complex:
return math_ops.square(math_ops.real(x)) + math_ops.square(math_ops.imag(x))
else:
return math_ops.square(x)
def _ta_append(tensor_array, value):
"""Append a value to the end of a tf.TensorArray."""
return tensor_array.write(tensor_array.size(), value)
class _RungeKuttaState(collections.namedtuple(
'_RungeKuttaState', 'y1, f1, t0, t1, dt, interp_coeff')):
"""Saved state of the Runge Kutta solver.
Attributes:
y1: Tensor giving the function value at the end of the last time step.
f1: Tensor giving derivative at the end of the last time step.
t0: scalar float64 Tensor giving start of the last time step.
t1: scalar float64 Tensor giving end of the last time step.
dt: scalar float64 Tensor giving the size for the next time step.
interp_coef: list of Tensors giving coefficients for polynomial
interpolation between `t0` and `t1`.
"""
class _History(collections.namedtuple(
'_History', 'integrate_points, error_ratio')):
"""Saved integration history for use in `info_dict`.
Attributes:
integrate_points: tf.TensorArray storing integrating time points.
error_ratio: tf.TensorArray storing computed error ratios at each
integration step.
"""
def _dopri5(func,
y0,
t,
rtol,
atol,
full_output=False,
first_step=None,
safety=0.9,
ifactor=10.0,
dfactor=0.2,
max_num_steps=1000,
name=None):
"""Solve an ODE for `odeint` using method='dopri5'."""
if first_step is None:
# at some point, we might want to switch to picking the step size
# automatically
first_step = 1.0
with ops.name_scope(
name, 'dopri5',
[y0, t, rtol, atol, safety, ifactor, dfactor, max_num_steps]) as scope:
first_step = ops.convert_to_tensor(first_step, dtype=t.dtype,
name='first_step')
safety = ops.convert_to_tensor(safety, dtype=t.dtype, name='safety')
ifactor = ops.convert_to_tensor(ifactor, dtype=t.dtype, name='ifactor')
dfactor = ops.convert_to_tensor(dfactor, dtype=t.dtype, name='dfactor')
max_num_steps = ops.convert_to_tensor(max_num_steps, dtype=dtypes.int32,
name='max_num_steps')
def adaptive_runge_kutta_step(rk_state, history, n_steps):
"""Take an adaptive Runge-Kutta step to integrate the ODE."""
y0, f0, _, t0, dt, interp_coeff = rk_state
with ops.name_scope('assertions'):
check_underflow = control_flow_ops.Assert(
t0 + dt > t0, ['underflow in dt', dt])
check_max_num_steps = control_flow_ops.Assert(
n_steps < max_num_steps, ['max_num_steps exceeded'])
check_numerics = control_flow_ops.Assert(
math_ops.reduce_all(math_ops.is_finite(abs(y0))),
['non-finite values in state `y`', y0])
with ops.control_dependencies(
[check_underflow, check_max_num_steps, check_numerics]):
y1, f1, y1_error, k = _runge_kutta_step(func, y0, f0, t0, dt)
with ops.name_scope('error_ratio'):
# We use the same approach as the dopri5 fortran code.
error_tol = atol + rtol * math_ops.maximum(abs(y0), abs(y1))
tensor_error_ratio = _abs_square(y1_error) / _abs_square(error_tol)
# Could also use reduce_maximum here.
error_ratio = math_ops.sqrt(math_ops.reduce_mean(tensor_error_ratio))
accept_step = error_ratio <= 1
with ops.name_scope('update/rk_state'):
# If we don't accept the step, the _RungeKuttaState will be useless
# (covering a time-interval of size 0), but that's OK, because in such
# cases we always immediately take another Runge-Kutta step.
y_next = control_flow_ops.cond(accept_step, lambda: y1, lambda: y0)
f_next = control_flow_ops.cond(accept_step, lambda: f1, lambda: f0)
t_next = control_flow_ops.cond(accept_step, lambda: t0 + dt, lambda: t0)
interp_coeff = control_flow_ops.cond(
accept_step,
lambda: _interp_fit_rk(y0, y1, k, dt),
lambda: interp_coeff)
dt_next = _optimal_step_size(dt, error_ratio, safety, ifactor, dfactor)
rk_state = _RungeKuttaState(
y_next, f_next, t0, t_next, dt_next, interp_coeff)
with ops.name_scope('update/history'):
history = _History(_ta_append(history.integrate_points, t0 + dt),
_ta_append(history.error_ratio, error_ratio))
return rk_state, history, n_steps + 1
def interpolate(solution, history, rk_state, i):
"""Interpolate through the next time point, integrating as necessary."""
with ops.name_scope('interpolate'):
rk_state, history, _ = control_flow_ops.while_loop(
lambda rk_state, *_: t[i] > rk_state.t1,
adaptive_runge_kutta_step,
(rk_state, history, 0),
name='integrate_loop')
y = _interp_evaluate(
rk_state.interp_coeff, rk_state.t0, rk_state.t1, t[i])
solution = solution.write(i, y)
return solution, history, rk_state, i + 1
assert_increasing = control_flow_ops.Assert(
math_ops.reduce_all(t[1:] > t[:-1]),
['`t` must be monotonic increasing'])
with ops.control_dependencies([assert_increasing]):
num_times = array_ops.size(t)
solution = tensor_array_ops.TensorArray(
y0.dtype, size=num_times).write(0, y0)
history = _History(
integrate_points=tensor_array_ops.TensorArray(
t.dtype, size=0, dynamic_size=True),
error_ratio=tensor_array_ops.TensorArray(
rtol.dtype, size=0, dynamic_size=True))
rk_state = _RungeKuttaState(
y0, func(y0, t[0]), t[0], t[0], first_step, interp_coeff=[y0] * 5)
solution, history, _, _ = control_flow_ops.while_loop(
lambda _, __, ___, i: i < num_times,
interpolate,
(solution, history, rk_state, 1),
name='interpolate_loop')
y = solution.stack(name=scope)
y.set_shape(t.get_shape().concatenate(y0.get_shape()))
if not full_output:
return y
else:
integrate_points = history.integrate_points.stack()
info_dict = {'num_func_evals': 6 * array_ops.size(integrate_points) + 1,
'integrate_points': integrate_points,
'error_ratio': history.error_ratio.stack()}
return (y, info_dict)
def odeint(func,
y0,
t,
rtol=1e-6,
atol=1e-12,
method=None,
options=None,
full_output=False,
name=None):
"""Integrate a system of ordinary differential equations.
Solves the initial value problem for a non-stiff system of first order ode-s:
```
dy/dt = func(y, t), y(t[0]) = y0
```
where y is a Tensor of any shape.
For example:
```
# solve `dy/dt = -y`, corresponding to exponential decay
tf.contrib.integrate.odeint(lambda y, _: -y, 1.0, [0, 1, 2])
=> [1, exp(-1), exp(-2)]
```
Output dtypes and numerical precision are based on the dtypes of the inputs
`y0` and `t`.
Currently, implements 5th order Runge-Kutta with adaptive step size control
and dense output, using the Dormand-Prince method. Similar to the 'dopri5'
method of `scipy.integrate.ode` and MATLAB's `ode45`.
Based on: Shampine, Lawrence F. (1986), "Some Practical Runge-Kutta Formulas",
Mathematics of Computation, American Mathematical Society, 46 (173): 135-150,
doi:10.2307/2008219
Args:
func: Function that maps a Tensor holding the state `y` and a scalar Tensor
`t` into a Tensor of state derivatives with respect to time.
y0: N-D Tensor giving starting value of `y` at time point `t[0]`. May
have any floating point or complex dtype.
t: 1-D Tensor holding a sequence of time points for which to solve for
`y`. The initial time point should be the first element of this sequence,
and each time must be larger than the previous time. May have any floating
point dtype. If not provided as a Tensor, converted to a Tensor with
float64 dtype.
rtol: optional float64 Tensor specifying an upper bound on relative error,
per element of `y`.
atol: optional float64 Tensor specifying an upper bound on absolute error,
per element of `y`.
method: optional string indicating the integration method to use. Currently,
the only valid option is `'dopri5'`.
options: optional dict of configuring options for the indicated integration
method. Can only be provided if a `method` is explicitly set. For
`'dopri5'`, valid options include:
* first_step: an initial guess for the size of the first integration
(current default: 1.0, but may later be changed to use heuristics based
on the gradient).
* safety: safety factor for adaptive step control, generally a constant
in the range 0.8-1 (default: 0.9).
* ifactor: maximum factor by which the adaptive step may be increased
(default: 10.0).
* dfactor: maximum factor by which the adpative step may be decreased
(default: 0.2).
* max_num_steps: integer maximum number of integrate steps between time
points in `t` (default: 1000).
full_output: optional boolean. If True, `odeint` returns a tuple
`(y, info_dict)` describing the integration process.
name: Optional name for this operation.
Returns:
y: (N+1)-D tensor, where the first dimension corresponds to different
time points. Contains the solved value of y for each desired time point in
`t`, with the initial value `y0` being the first element along the first
dimension.
info_dict: only if `full_output == True`. A dict with the following values:
* num_func_evals: integer Tensor counting the number of function
evaluations.
* integrate_points: 1D float64 Tensor with the upper bound of each
integration time step.
* error_ratio: 1D float Tensor with the estimated ratio of the integration
error to the error tolerance at each integration step. An ratio greater
than 1 corresponds to rejected steps.
Raises:
ValueError: if an invalid `method` is provided.
TypeError: if `options` is supplied without `method`, or if `t` or `y0` has
an invalid dtype.
"""
if method is not None and method != 'dopri5':
raise ValueError('invalid method: %r' % method)
if options is None:
options = {}
elif method is None:
raise ValueError('cannot supply `options` without specifying `method`')
with ops.name_scope(name, 'odeint', [y0, t, rtol, atol]) as scope:
# TODO(shoyer): use nest.flatten (like tf.while_loop) to allow `y0` to be an
# arbitrarily nested tuple. This will help performance and usability by
# avoiding the need to pack/unpack in user functions.
y0 = ops.convert_to_tensor(y0, name='y0')
if not (y0.dtype.is_floating or y0.dtype.is_complex):
raise TypeError('`y0` must have a floating point or complex floating '
'point dtype')
t = ops.convert_to_tensor(t, preferred_dtype=dtypes.float64, name='t')
if not t.dtype.is_floating:
raise TypeError('`t` must have a floating point dtype')
error_dtype = abs(y0).dtype
rtol = ops.convert_to_tensor(rtol, dtype=error_dtype, name='rtol')
atol = ops.convert_to_tensor(atol, dtype=error_dtype, name='atol')
return _dopri5(func, y0, t,
rtol=rtol,
atol=atol,
full_output=full_output,
name=scope,
**options)
| mit |
werthen/django-rest-framework | rest_framework/mixins.py | 93 | 2745 | """
Basic building blocks for generic class based views.
We don't bind behaviour to http method handlers yet,
which allows mixin classes to be composed in interesting ways.
"""
from __future__ import unicode_literals
from rest_framework import status
from rest_framework.response import Response
from rest_framework.settings import api_settings
class CreateModelMixin(object):
"""
Create a model instance.
"""
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def perform_create(self, serializer):
serializer.save()
def get_success_headers(self, data):
try:
return {'Location': data[api_settings.URL_FIELD_NAME]}
except (TypeError, KeyError):
return {}
class ListModelMixin(object):
"""
List a queryset.
"""
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
class RetrieveModelMixin(object):
"""
Retrieve a model instance.
"""
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance)
return Response(serializer.data)
class UpdateModelMixin(object):
"""
Update a model instance.
"""
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer = self.get_serializer(instance, data=request.data, partial=partial)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
return Response(serializer.data)
def perform_update(self, serializer):
serializer.save()
def partial_update(self, request, *args, **kwargs):
kwargs['partial'] = True
return self.update(request, *args, **kwargs)
class DestroyModelMixin(object):
"""
Destroy a model instance.
"""
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
def perform_destroy(self, instance):
instance.delete()
| bsd-2-clause |
mahmutf/dupeguru | core/pe/cache_shelve.py | 3 | 4189 | # Copyright 2016 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import os
import os.path as op
import shelve
import tempfile
from collections import namedtuple
from .cache import string_to_colors, colors_to_string
def wrap_path(path):
return "path:{}".format(path)
def unwrap_path(key):
return key[5:]
def wrap_id(path):
return "id:{}".format(path)
def unwrap_id(key):
return int(key[3:])
CacheRow = namedtuple("CacheRow", "id path blocks mtime")
class ShelveCache:
"""A class to cache picture blocks in a shelve backend.
"""
def __init__(self, db=None, readonly=False):
self.istmp = db is None
if self.istmp:
self.dtmp = tempfile.mkdtemp()
self.ftmp = db = op.join(self.dtmp, "tmpdb")
flag = "r" if readonly else "c"
self.shelve = shelve.open(db, flag)
self.maxid = self._compute_maxid()
def __contains__(self, key):
return wrap_path(key) in self.shelve
def __delitem__(self, key):
row = self.shelve[wrap_path(key)]
del self.shelve[wrap_path(key)]
del self.shelve[wrap_id(row.id)]
def __getitem__(self, key):
if isinstance(key, int):
skey = self.shelve[wrap_id(key)]
else:
skey = wrap_path(key)
return string_to_colors(self.shelve[skey].blocks)
def __iter__(self):
return (unwrap_path(k) for k in self.shelve if k.startswith("path:"))
def __len__(self):
return sum(1 for k in self.shelve if k.startswith("path:"))
def __setitem__(self, path_str, blocks):
blocks = colors_to_string(blocks)
if op.exists(path_str):
mtime = int(os.stat(path_str).st_mtime)
else:
mtime = 0
if path_str in self:
rowid = self.shelve[wrap_path(path_str)].id
else:
rowid = self._get_new_id()
row = CacheRow(rowid, path_str, blocks, mtime)
self.shelve[wrap_path(path_str)] = row
self.shelve[wrap_id(rowid)] = wrap_path(path_str)
def _compute_maxid(self):
return max(
(unwrap_id(k) for k in self.shelve if k.startswith("id:")), default=1
)
def _get_new_id(self):
self.maxid += 1
return self.maxid
def clear(self):
self.shelve.clear()
def close(self):
if self.shelve is not None:
self.shelve.close()
if self.istmp:
os.remove(self.ftmp)
os.rmdir(self.dtmp)
self.shelve = None
def filter(self, func):
to_delete = [key for key in self if not func(key)]
for key in to_delete:
del self[key]
def get_id(self, path):
if path in self:
return self.shelve[wrap_path(path)].id
else:
raise ValueError(path)
def get_multiple(self, rowids):
for rowid in rowids:
try:
skey = self.shelve[wrap_id(rowid)]
except KeyError:
continue
yield (rowid, string_to_colors(self.shelve[skey].blocks))
def purge_outdated(self):
"""Go through the cache and purge outdated records.
A record is outdated if the picture doesn't exist or if its mtime is greater than the one in
the db.
"""
todelete = []
for path in self:
row = self.shelve[wrap_path(path)]
if row.mtime and op.exists(path):
picture_mtime = os.stat(path).st_mtime
if int(picture_mtime) <= row.mtime:
# not outdated
continue
todelete.append(path)
for path in todelete:
try:
del self[path]
except KeyError:
# I have no idea why a KeyError sometimes happen, but it does, as we can see in
# #402 and #439. I don't think it hurts to silently ignore the error, so that's
# what we do
pass
| gpl-3.0 |
robk5uj/invenio | modules/webcomment/lib/webcomment_templates.py | 2 | 113838 | # -*- coding: utf-8 -*-
## Comments and reviews for records.
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""HTML Templates for commenting features """
__revision__ = "$Id$"
import cgi
# Invenio imports
from invenio.urlutils import create_html_link
from invenio.webuser import get_user_info, collect_user_info, isGuestUser, get_email
from invenio.dateutils import convert_datetext_to_dategui
from invenio.webmessage_mailutils import email_quoted_txt2html
from invenio.config import CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_NAME_INTL,\
CFG_SITE_SUPPORT_EMAIL,\
CFG_WEBCOMMENT_ALLOW_REVIEWS, \
CFG_WEBCOMMENT_ALLOW_COMMENTS, \
CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR, \
CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN, \
CFG_WEBCOMMENT_AUTHOR_DELETE_COMMENT_OPTION, \
CFG_CERN_SITE, \
CFG_SITE_RECORD, \
CFG_WEBCOMMENT_MAX_ATTACHED_FILES, \
CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE
from invenio.htmlutils import get_html_text_editor
from invenio.messages import gettext_set_language
from invenio.bibformat import format_record
from invenio.access_control_engine import acc_authorize_action
from invenio.search_engine_utils import get_fieldvalues
class Template:
"""templating class, refer to webcomment.py for examples of call"""
def tmpl_get_first_comments_without_ranking(self, recID, ln, comments, nb_comments_total, warnings):
"""
@param recID: record id
@param ln: language
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param nb_comments_total: total number of comments for this record
@param warnings: list of warning tuples (warning_text, warning_color)
@return: html of comments
"""
# load the right message language
_ = gettext_set_language(ln)
# naming data fields of comments
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_id = 6
warnings = self.tmpl_warnings(warnings, ln)
# write button
write_button_label = _("Write a comment")
write_button_link = '%s/%s/%s/comments/add' % (CFG_SITE_URL, CFG_SITE_RECORD, recID)
write_button_form = '<input type="hidden" name="ln" value="%s"/>' % ln
write_button_form = self.createhiddenform(action=write_button_link, method="get", text=write_button_form, button=write_button_label)
# comments
comment_rows = ''
last_comment_round_name = None
comment_round_names = [comment[0] for comment in comments]
if comment_round_names:
last_comment_round_name = comment_round_names[-1]
for comment_round_name, comments_list in comments:
comment_rows += '<div id="cmtRound%s" class="cmtRound">' % (comment_round_name)
if comment_round_name:
comment_rows += _('<div class="webcomment_comment_round_header">%(x_nb)i Comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "</div>"
else:
comment_rows += _('<div class="webcomment_comment_round_header">%(x_nb)i Comments') % {'x_nb': len(comments_list),} + "</div>"
for comment in comments_list:
if comment[c_nickname]:
nickname = comment[c_nickname]
display = nickname
else:
(uid, nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(nickname, display, ln)
comment_rows += """
<tr>
<td>"""
report_link = '%s/%s/%s/comments/report?ln=%s&comid=%s' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, ln, comment[c_id])
reply_link = '%s/%s/%s/comments/add?ln=%s&comid=%s&action=REPLY' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, ln, comment[c_id])
comment_rows += self.tmpl_get_comment_without_ranking(req=None, ln=ln, nickname=messaging_link, comment_uid=comment[c_user_id],
date_creation=comment[c_date_creation],
body=comment[c_body], status='', nb_reports=0,
report_link=report_link, reply_link=reply_link, recID=recID)
comment_rows += """
<br />
<br />
</td>
</tr>"""
# Close comment round
comment_rows += '</div>'
# output
if nb_comments_total > 0:
out = warnings
comments_label = len(comments) > 1 and _("Showing the latest %i comments:") % len(comments) \
or ""
out += """
<div class="video_content_clear"></div>
<table class="webcomment_header_comments">
<tr>
<td class="blocknote">%(comment_title)s</td>
</tr>
</table>
<div class="websomment_header_comments_label">%(comments_label)s</div>
%(comment_rows)s
%(view_all_comments_link)s
%(write_button_form)s<br />""" % \
{'comment_title': _("Discuss this document"),
'comments_label': comments_label,
'nb_comments_total' : nb_comments_total,
'recID': recID,
'comment_rows': comment_rows,
'tab': ' '*4,
'siteurl': CFG_SITE_URL,
's': nb_comments_total>1 and 's' or "",
'view_all_comments_link': nb_comments_total>0 and '''<a class="webcomment_view_all_comments" href="%s/%s/%s/comments/display">View all %s comments</a>''' \
% (CFG_SITE_URL, CFG_SITE_RECORD, recID, nb_comments_total) or "",
'write_button_form': write_button_form,
'nb_comments': len(comments)
}
if not comments:
out = """
<!-- comments title table -->
<table class="webcomment_header_comments">
<tr>
<td class="blocknote">%(discuss_label)s:</td>
</tr>
</table>
<div class="webcomment_header_details">%(detailed_info)s
<br />
</div>
%(form)s
""" % {'form': write_button_form,
'discuss_label': _("Discuss this document"),
'detailed_info': _("Start a discussion about any aspect of this document.")
}
return out
def tmpl_record_not_found(self, status='missing', recID="", ln=CFG_SITE_LANG):
"""
Displays a page when bad or missing record ID was given.
@param status: 'missing' : no recID was given
'inexistant': recID doesn't have an entry in the database
'nan' : recID is not a number
'invalid' : recID is an error code, i.e. in the interval [-99,-1]
@param return: body of the page
"""
_ = gettext_set_language(ln)
if status == 'inexistant':
body = _("Sorry, the record %s does not seem to exist.") % (recID,)
elif status in ('nan', 'invalid'):
body = _("Sorry, %s is not a valid ID value.") % (recID,)
else:
body = _("Sorry, no record ID was provided.")
body += "<br /><br />"
link = "<a href=\"%s?ln=%s\">%s</a>." % (CFG_SITE_URL, ln, CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME))
body += _("You may want to start browsing from %s") % link
return body
def tmpl_get_first_comments_with_ranking(self, recID, ln, comments=None, nb_comments_total=None, avg_score=None, warnings=[]):
"""
@param recID: record id
@param ln: language
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param nb_comments_total: total number of comments for this record
@param avg_score: average score of all reviews
@param warnings: list of warning tuples (warning_text, warning_color)
@return: html of comments
"""
# load the right message language
_ = gettext_set_language(ln)
# naming data fields of comments
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_nb_votes_yes = 4
c_nb_votes_total = 5
c_star_score = 6
c_title = 7
c_id = 8
warnings = self.tmpl_warnings(warnings, ln)
#stars
if avg_score > 0:
avg_score_img = 'stars-' + str(avg_score).split('.')[0] + '-' + str(avg_score).split('.')[1] + '.png'
else:
avg_score_img = "stars-0-0.png"
# voting links
useful_dict = { 'siteurl' : CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'recID' : recID,
'ln' : ln,
'yes_img' : 'smchk_gr.gif', #'yes.gif',
'no_img' : 'iconcross.gif' #'no.gif'
}
link = '<a href="%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/reviews/vote?ln=%(ln)s&comid=%%(comid)s' % useful_dict
useful_yes = link + '&com_value=1">' + _("Yes") + '</a>'
useful_no = link + '&com_value=-1">' + _("No") + '</a>'
#comment row
comment_rows = ' '
last_comment_round_name = None
comment_round_names = [comment[0] for comment in comments]
if comment_round_names:
last_comment_round_name = comment_round_names[-1]
for comment_round_name, comments_list in comments:
comment_rows += '<div id="cmtRound%s" class="cmtRound">' % (comment_round_name)
comment_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "<br/>"
for comment in comments_list:
if comment[c_nickname]:
nickname = comment[c_nickname]
display = nickname
else:
(uid, nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(nickname, display, ln)
comment_rows += '''
<tr>
<td>'''
report_link = '%s/%s/%s/reviews/report?ln=%s&comid=%s' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, ln, comment[c_id])
comment_rows += self.tmpl_get_comment_with_ranking(None, ln=ln, nickname=messaging_link,
comment_uid=comment[c_user_id],
date_creation=comment[c_date_creation],
body=comment[c_body],
status='', nb_reports=0,
nb_votes_total=comment[c_nb_votes_total],
nb_votes_yes=comment[c_nb_votes_yes],
star_score=comment[c_star_score],
title=comment[c_title], report_link=report_link, recID=recID)
comment_rows += '''
%s %s / %s<br />''' % (_("Was this review helpful?"), useful_yes % {'comid':comment[c_id]}, useful_no % {'comid':comment[c_id]})
comment_rows += '''
<br />
</td>
</tr>'''
# Close comment round
comment_rows += '</div>'
# write button
write_button_link = '''%s/%s/%s/reviews/add''' % (CFG_SITE_URL, CFG_SITE_RECORD, recID)
write_button_form = ' <input type="hidden" name="ln" value="%s"/>' % ln
write_button_form = self.createhiddenform(action=write_button_link, method="get", text=write_button_form, button=_("Write a review"))
if nb_comments_total > 0:
avg_score_img = str(avg_score_img)
avg_score = str(avg_score)
nb_comments_total = str(nb_comments_total)
score = '<b>'
score += _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '</b><img src="' + CFG_SITE_URL + '/img/' + avg_score_img + '" alt="' + avg_score + '" />',
'x_nb_reviews': nb_comments_total}
useful_label = _("Readers found the following %s reviews to be most helpful.")
useful_label %= len(comments) > 1 and len(comments) or ""
view_all_comments_link ='<a class"webcomment_view_all_reviews" href="%s/%s/%s/reviews/display?ln=%s&do=hh">' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, ln)
view_all_comments_link += _("View all %s reviews") % nb_comments_total
view_all_comments_link += '</a><br />'
out = warnings + """
<!-- review title table -->
<table class="webcomment_header_ratings">
<tr>
<td class="blocknote">%(comment_title)s:</td>
</tr>
</table>
%(score_label)s<br />
%(useful_label)s
<!-- review table -->
<table class="webcomment_review_title_table">
%(comment_rows)s
</table>
%(view_all_comments_link)s
%(write_button_form)s<br />
""" % \
{ 'comment_title' : _("Rate this document"),
'score_label' : score,
'useful_label' : useful_label,
'recID' : recID,
'view_all_comments' : _("View all %s reviews") % (nb_comments_total,),
'write_comment' : _("Write a review"),
'comment_rows' : comment_rows,
'tab' : ' '*4,
'siteurl' : CFG_SITE_URL,
'view_all_comments_link': nb_comments_total>0 and view_all_comments_link or "",
'write_button_form' : write_button_form
}
else:
out = '''
<!-- review title table -->
<table class="webcomment_header_ratings">
<tr>
<td class="blocknote">%s:</td>
</tr>
</table>
%s<br />
%s
<br />''' % (_("Rate this document"),
_('<div class="webcomment_review_first_introduction">Be the first to review this document.</div>'),
write_button_form)
return out
def tmpl_get_comment_without_ranking(self, req, ln, nickname, comment_uid, date_creation, body, status, nb_reports, reply_link=None, report_link=None, undelete_link=None, delete_links=None, unreport_link=None, recID=-1, com_id='', attached_files=None):
"""
private function
@param req: request object to fetch user info
@param ln: language
@param nickname: nickname
@param date_creation: date comment was written
@param body: comment body
@param status: status of the comment:
da: deleted by author
dm: deleted by moderator
ok: active
@param nb_reports: number of reports the comment has
@param reply_link: if want reply and report, give the http links
@param report_link: if want reply and report, give the http links
@param undelete_link: http link to delete the message
@param delete_links: http links to delete the message
@param unreport_link: http link to unreport the comment
@param recID: recID where the comment is posted
@param com_id: ID of the comment displayed
@param attached_files: list of attached files
@return: html table of comment
"""
from invenio.search_engine import guess_primary_collection_of_a_record
# load the right message language
_ = gettext_set_language(ln)
date_creation = convert_datetext_to_dategui(date_creation, ln=ln)
if attached_files is None:
attached_files = []
out = ''
final_body = email_quoted_txt2html(body)
title = _('%(x_name)s') % {'x_name': nickname,}
title += '<a name=%s></a>' % com_id
links = ''
moderator_links = ''
if reply_link:
links += '<a class="webcomment_comment_reply" href="' + reply_link +'">' + _("Reply") +'</a>'
if report_link and status != 'ap':
links += '<a class="webcomment_comment_report" href="' + report_link +'">' + _("Report abuse") + '</a>'
# Check if user is a comment moderator
record_primary_collection = guess_primary_collection_of_a_record(recID)
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'moderatecomments', collection=record_primary_collection)
if status in ['dm', 'da'] and req:
if not auth_code:
if status == 'dm':
final_body = '<div class="webcomment_deleted_comment_message">(Comment deleted by the moderator) - not visible for users<br /><br />' +\
final_body + '</div>'
else:
final_body = '<div class="webcomment_deleted_comment_message">(Comment deleted by the author) - not visible for users<br /><br />' +\
final_body + '</div>'
links = ''
moderator_links += '<a class="webcomment_deleted_comment_undelete" href="' + undelete_link + '">' + _("Undelete comment") + '</a>'
else:
if status == 'dm':
final_body = '<div class="webcomment_deleted_comment_message">Comment deleted by the moderator</div>'
else:
final_body = '<div class="webcomment_deleted_comment_message">Comment deleted by the author</div>'
links = ''
else:
if not auth_code:
moderator_links += '<a class="webcomment_comment_delete" href="' + delete_links['mod'] +'">' + _("Delete comment") + '</a>'
elif (user_info['uid'] == comment_uid) and CFG_WEBCOMMENT_AUTHOR_DELETE_COMMENT_OPTION:
moderator_links += '<a class="webcomment_comment_delete" href="' + delete_links['auth'] +'">' + _("Delete comment") + '</a>'
if nb_reports >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN:
if not auth_code:
final_body = '<div class="webcomment_reported_comment_message">(Comment reported. Pending approval) - not visible for users<br /><br />' + final_body + '</div>'
links = ''
moderator_links += '<a class="webcomment_reported_comment_unreport" href="' + unreport_link +'">' + _("Unreport comment") + '</a>'
else:
final_body = '<div class="webcomment_comment_pending_approval_message">This comment is pending approval due to user reports</div>'
links = ''
if links and moderator_links:
links = links + moderator_links
elif not links:
links = moderator_links
attached_files_html = ''
if attached_files:
attached_files_html = '<div class="cmtfilesblock"><b>%s:</b><br/>' % (len(attached_files) == 1 and _("Attached file") or _("Attached files"))
for (filename, filepath, fileurl) in attached_files:
attached_files_html += create_html_link(urlbase=fileurl, urlargd={},
link_label=cgi.escape(filename)) + '<br />'
attached_files_html += '</div>'
out += """
<div class="webcomment_comment_box">
<div class="webcomment_comment_avatar"><img class="webcomment_comment_avatar_default" src="%(site_url)s/img/user-icon-1-24x24.gif" alt="avatar" /></div>
<div class="webcomment_comment_content">
<div class="webcomment_comment_title">
%(title)s
<div class="webcomment_comment_date">%(date)s</div>
</div>
<blockquote>
%(body)s
</blockquote>
%(attached_files_html)s
<div class="webcomment_comment_options">%(links)s</div>
<div class="clearer"></div>
</div>
<div class="clearer"></div>
</div>""" % \
{'title' : title,
'body' : final_body,
'links' : links,
'attached_files_html': attached_files_html,
'date': date_creation,
'site_url': CFG_SITE_URL,
}
return out
def tmpl_get_comment_with_ranking(self, req, ln, nickname, comment_uid, date_creation, body, status, nb_reports, nb_votes_total, nb_votes_yes, star_score, title, report_link=None, delete_links=None, undelete_link=None, unreport_link=None, recID=-1):
"""
private function
@param req: request object to fetch user info
@param ln: language
@param nickname: nickname
@param date_creation: date comment was written
@param body: comment body
@param status: status of the comment
@param nb_reports: number of reports the comment has
@param nb_votes_total: total number of votes for this review
@param nb_votes_yes: number of positive votes for this record
@param star_score: star score for this record
@param title: title of review
@param report_link: if want reply and report, give the http links
@param undelete_link: http link to delete the message
@param delete_link: http link to delete the message
@param unreport_link: http link to unreport the comment
@param recID: recID where the comment is posted
@return: html table of review
"""
from invenio.search_engine import guess_primary_collection_of_a_record
# load the right message language
_ = gettext_set_language(ln)
if star_score > 0:
star_score_img = 'stars-' + str(star_score) + '-0.png'
else:
star_score_img = 'stars-0-0.png'
out = ""
date_creation = convert_datetext_to_dategui(date_creation, ln=ln)
reviewed_label = _("Reviewed by %(x_nickname)s on %(x_date)s") % {'x_nickname': nickname, 'x_date':date_creation}
## FIX
nb_votes_yes = str(nb_votes_yes)
nb_votes_total = str(nb_votes_total)
useful_label = _("%(x_nb_people)s out of %(x_nb_total)s people found this review useful") % {'x_nb_people': nb_votes_yes,
'x_nb_total': nb_votes_total}
links = ''
_body = ''
if body != '':
_body = '''
<blockquote>
%s
</blockquote>''' % email_quoted_txt2html(body, linebreak_html='')
# Check if user is a comment moderator
record_primary_collection = guess_primary_collection_of_a_record(recID)
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'moderatecomments', collection=record_primary_collection)
if status in ['dm', 'da'] and req:
if not auth_code:
if status == 'dm':
_body = '<div class="webcomment_deleted_review_message">(Review deleted by moderator) - not visible for users<br /><br />' +\
_body + '</div>'
else:
_body = '<div class="webcomment_deleted_review_message">(Review deleted by author) - not visible for users<br /><br />' +\
_body + '</div>'
links = '<a class="webcomment_deleted_review_undelete" href="' + undelete_link + '">' + _("Undelete review") + '</a>'
else:
if status == 'dm':
_body = '<div class="webcomment_deleted_review_message">Review deleted by moderator</div>'
else:
_body = '<div class="webcomment_deleted_review_message">Review deleted by author</div>'
links = ''
else:
if not auth_code:
links += '<a class="webcomment_review_delete" href="' + delete_links['mod'] +'">' + _("Delete review") + '</a>'
if nb_reports >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN:
if not auth_code:
_body = '<div class="webcomment_review_pending_approval_message">(Review reported. Pending approval) - not visible for users<br /><br />' + _body + '</div>'
links += ' | '
links += '<a class="webcomment_reported_review_unreport" href="' + unreport_link +'">' + _("Unreport review") + '</a>'
else:
_body = '<div class="webcomment_review_pending_approval_message">This review is pending approval due to user reports.</div>'
links = ''
out += '''
<div class="webcomment_review_box">
<div class="webcomment_review_box_inner">
<img src="%(siteurl)s/img/%(star_score_img)s" alt="%(star_score)s/>
<div class="webcomment_review_title">%(title)s</div>
<div class="webcomment_review_label_reviewed">%(reviewed_label)s</div>
<div class="webcomment_review_label_useful">%(useful_label)s</div>
%(body)s
</div>
</div>
%(abuse)s''' % {'siteurl' : CFG_SITE_URL,
'star_score_img': star_score_img,
'star_score' : star_score,
'title' : title,
'reviewed_label': reviewed_label,
'useful_label' : useful_label,
'body' : _body,
'abuse' : links
}
return out
def tmpl_get_comments(self, req, recID, ln,
nb_per_page, page, nb_pages,
display_order, display_since,
CFG_WEBCOMMENT_ALLOW_REVIEWS,
comments, total_nb_comments,
avg_score,
warnings,
border=0, reviews=0,
total_nb_reviews=0,
nickname='', uid=-1, note='',score=5,
can_send_comments=False,
can_attach_files=False,
user_is_subscribed_to_discussion=False,
user_can_unsubscribe_from_discussion=False,
display_comment_rounds=None):
"""
Get table of all comments
@param recID: record id
@param ln: language
@param nb_per_page: number of results per page
@param page: page number
@param display_order: hh = highest helpful score, review only
lh = lowest helpful score, review only
hs = highest star score, review only
ls = lowest star score, review only
od = oldest date
nd = newest date
@param display_since: all= no filtering by date
nd = n days ago
nw = n weeks ago
nm = n months ago
ny = n years ago
where n is a single digit integer between 0 and 9
@param CFG_WEBCOMMENT_ALLOW_REVIEWS: is ranking enable, get from config.py/CFG_WEBCOMMENT_ALLOW_REVIEWS
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param total_nb_comments: total number of comments for this record
@param avg_score: average score of reviews for this record
@param warnings: list of warning tuples (warning_text, warning_color)
@param border: boolean, active if want to show border around each comment/review
@param reviews: boolean, enabled for reviews, disabled for comments
@param can_send_comments: boolean, if user can send comments or not
@param can_attach_files: boolean, if user can attach file to comment or not
@param user_is_subscribed_to_discussion: True if user already receives new comments by email
@param user_can_unsubscribe_from_discussion: True is user is allowed to unsubscribe from discussion
"""
# load the right message language
_ = gettext_set_language(ln)
# CERN hack begins: display full ATLAS user name. Check further below too.
current_user_fullname = ""
override_nickname_p = False
if CFG_CERN_SITE:
from invenio.search_engine import get_all_collections_of_a_record
user_info = collect_user_info(uid)
if 'atlas-readaccess-active-members [CERN]' in user_info['group']:
# An ATLAS member is never anonymous to its colleagues
# when commenting inside ATLAS collections
recid_collections = get_all_collections_of_a_record(recID)
if 'ATLAS' in str(recid_collections):
override_nickname_p = True
current_user_fullname = user_info.get('external_fullname', '')
# CERN hack ends
# naming data fields of comments
if reviews:
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_status = 4
c_nb_reports = 5
c_nb_votes_yes = 6
c_nb_votes_total = 7
c_star_score = 8
c_title = 9
c_id = 10
c_round_name = 11
c_restriction = 12
reply_to = 13
discussion = 'reviews'
comments_link = '<a href="%s/%s/%s/comments/">%s</a> (%i)' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, _('Comments'), total_nb_comments)
reviews_link = '<b>%s (%i)</b>' % (_('Reviews'), total_nb_reviews)
add_comment_or_review = self.tmpl_add_comment_form_with_ranking(recID, uid, current_user_fullname or nickname, ln, '', score, note, warnings, show_title_p=True, can_attach_files=can_attach_files)
else:
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_status = 4
c_nb_reports = 5
c_id = 6
c_round_name = 7
c_restriction = 8
reply_to = 9
discussion = 'comments'
comments_link = '<b>%s (%i)</b>' % (_('Comments'), total_nb_comments)
reviews_link = '<a href="%s/%s/%s/reviews/">%s</a> (%i)' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, _('Reviews'), total_nb_reviews)
add_comment_or_review = self.tmpl_add_comment_form(recID, uid, nickname, ln, note, warnings, can_attach_files=can_attach_files, user_is_subscribed_to_discussion=user_is_subscribed_to_discussion)
# voting links
useful_dict = { 'siteurl' : CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'recID' : recID,
'ln' : ln,
'do' : display_order,
'ds' : display_since,
'nb' : nb_per_page,
'p' : page,
'reviews' : reviews,
'discussion' : discussion
}
useful_yes = '<a href="%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/%(discussion)s/vote?ln=%(ln)s&comid=%%(comid)s&com_value=1&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/%(discussion)s/display">' + _("Yes") + '</a>'
useful_yes %= useful_dict
useful_no = '<a href="%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/%(discussion)s/vote?ln=%(ln)s&comid=%%(comid)s&com_value=-1&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/%(discussion)s/display">' + _("No") + '</a>'
useful_no %= useful_dict
warnings = self.tmpl_warnings(warnings, ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'module' : 'comments',
'function' : 'index',
'discussion': discussion,
'arguments' : 'do=%s&ds=%s&nb=%s' % (display_order, display_since, nb_per_page),
'arg_page' : '&p=%s' % page,
'page' : page,
'rec_id' : recID}
if not req:
req = None
## comments table
comments_rows = ''
last_comment_round_name = None
comment_round_names = [comment[0] for comment in comments]
if comment_round_names:
last_comment_round_name = comment_round_names[-1]
for comment_round_name, comments_list in comments:
comment_round_style = "display:none;"
comment_round_is_open = False
if comment_round_name in display_comment_rounds:
comment_round_is_open = True
comment_round_style = ""
comments_rows += '<div id="cmtRound%s" class="cmtround">' % (comment_round_name)
if not comment_round_is_open and \
(comment_round_name or len(comment_round_names) > 1):
new_cmtgrp = list(display_comment_rounds)
new_cmtgrp.append(comment_round_name)
comments_rows += '''<img src="/img/right-trans.gif" id="cmtarrowiconright%(grp_id)s" alt="Open group" /><img src="/img/down-trans.gif" id="cmtarrowicondown%(grp_id)s" alt="Close group" style="display:none" />
<a class="cmtgrpswitch" name="cmtgrpLink%(grp_id)s" onclick="var cmtarrowicondown=document.getElementById('cmtarrowicondown%(grp_id)s');var cmtarrowiconright=document.getElementById('cmtarrowiconright%(grp_id)s');var subgrp=document.getElementById('cmtSubRound%(grp_id)s');if (subgrp.style.display==''){subgrp.style.display='none';cmtarrowiconright.style.display='';cmtarrowicondown.style.display='none';}else{subgrp.style.display='';cmtarrowiconright.style.display='none';cmtarrowicondown.style.display='';};return false;"''' % {'grp_id': comment_round_name}
comments_rows += 'href=\"%(siteurl)s/%(CFG_SITE_RECORD)s/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s' % link_dic
comments_rows += '&' + '&'.join(["cmtgrp=" + grp for grp in new_cmtgrp if grp != 'none']) + \
'#cmtgrpLink%s' % (comment_round_name) + '\">'
comments_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "</a><br/>"
elif comment_round_name or len(comment_round_names) > 1:
new_cmtgrp = list(display_comment_rounds)
new_cmtgrp.remove(comment_round_name)
comments_rows += '''<img src="/img/right-trans.gif" id="cmtarrowiconright%(grp_id)s" alt="Open group" style="display:none" /><img src="/img/down-trans.gif" id="cmtarrowicondown%(grp_id)s" alt="Close group" />
<a class="cmtgrpswitch" name="cmtgrpLink%(grp_id)s" onclick="var cmtarrowicondown=document.getElementById('cmtarrowicondown%(grp_id)s');var cmtarrowiconright=document.getElementById('cmtarrowiconright%(grp_id)s');var subgrp=document.getElementById('cmtSubRound%(grp_id)s');if (subgrp.style.display==''){subgrp.style.display='none';cmtarrowiconright.style.display='';cmtarrowicondown.style.display='none';}else{subgrp.style.display='';cmtarrowiconright.style.display='none';cmtarrowicondown.style.display='';};return false;"''' % {'grp_id': comment_round_name}
comments_rows += 'href=\"%(siteurl)s/%(CFG_SITE_RECORD)s/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s' % link_dic
comments_rows += '&' + ('&'.join(["cmtgrp=" + grp for grp in new_cmtgrp if grp != 'none']) or 'cmtgrp=none' ) + \
'#cmtgrpLink%s' % (comment_round_name) + '\">'
comments_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name}+ "</a><br/>"
comments_rows += '<div id="cmtSubRound%s" class="cmtsubround" style="%s">' % (comment_round_name,
comment_round_style)
thread_history = [0]
for comment in comments_list:
if comment[reply_to] not in thread_history:
# Going one level down in the thread
thread_history.append(comment[reply_to])
depth = thread_history.index(comment[reply_to])
else:
depth = thread_history.index(comment[reply_to])
thread_history = thread_history[:depth + 1]
# CERN hack begins: display full ATLAS user name.
comment_user_fullname = ""
if CFG_CERN_SITE and override_nickname_p:
comment_user_fullname = get_email(comment[c_user_id])
# CERN hack ends
if comment[c_nickname]:
_nickname = comment[c_nickname]
display = _nickname
else:
(uid, _nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(_nickname, comment_user_fullname or display, ln)
from invenio.webcomment import get_attached_files # FIXME
files = get_attached_files(recID, comment[c_id])
# do NOT delete the HTML comment below. It is used for parsing... (I plead unguilty!)
comments_rows += """
<!-- start comment row -->
<div class="webcomment_comment_depth_%s">""" % (depth)
delete_links = {}
if not reviews:
report_link = '%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/comments/report?ln=%(ln)s&comid=%%(comid)s&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/comments/display' % useful_dict % {'comid':comment[c_id]}
reply_link = '%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/comments/add?ln=%(ln)s&action=REPLY&comid=%%(comid)s' % useful_dict % {'comid':comment[c_id]}
delete_links['mod'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_mod?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
delete_links['auth'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_auth?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
undelete_link = "%s/admin/webcomment/webcommentadmin.py/undel_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
unreport_link = "%s/admin/webcomment/webcommentadmin.py/unreport_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
comments_rows += self.tmpl_get_comment_without_ranking(req, ln, messaging_link, comment[c_user_id], comment[c_date_creation], comment[c_body], comment[c_status], comment[c_nb_reports], reply_link, report_link, undelete_link, delete_links, unreport_link, recID, comment[c_id], files)
else:
report_link = '%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/reviews/report?ln=%(ln)s&comid=%%(comid)s&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/reviews/display' % useful_dict % {'comid': comment[c_id]}
delete_links['mod'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_mod?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
delete_links['auth'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_auth?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
undelete_link = "%s/admin/webcomment/webcommentadmin.py/undel_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
unreport_link = "%s/admin/webcomment/webcommentadmin.py/unreport_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
comments_rows += self.tmpl_get_comment_with_ranking(req, ln, messaging_link, comment[c_user_id], comment[c_date_creation], comment[c_body], comment[c_status], comment[c_nb_reports], comment[c_nb_votes_total], comment[c_nb_votes_yes], comment[c_star_score], comment[c_title], report_link, delete_links, undelete_link, unreport_link, recID)
helpful_label = _("Was this review helpful?")
report_abuse_label = "(" + _("Report abuse") + ")"
yes_no_separator = '<td> / </td>'
if comment[c_nb_reports] >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN or comment[c_status] in ['dm', 'da']:
report_abuse_label = ""
helpful_label = ""
useful_yes = ""
useful_no = ""
yes_no_separator = ""
comments_rows += """
<table>
<tr>
<td>%(helpful_label)s %(tab)s</td>
<td> %(yes)s </td>
%(yes_no_separator)s
<td> %(no)s </td>
<td class="reportabuse">%(tab)s%(tab)s<a href="%(report)s">%(report_abuse_label)s</a></td>
</tr>
</table>""" \
% {'helpful_label': helpful_label,
'yes' : useful_yes % {'comid':comment[c_id]},
'yes_no_separator': yes_no_separator,
'no' : useful_no % {'comid':comment[c_id]},
'report' : report_link % {'comid':comment[c_id]},
'report_abuse_label': comment[c_nb_reports] >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN and '' or report_abuse_label,
'tab' : ' '*2}
# do NOT remove HTML comment below. It is used for parsing...
comments_rows += """
</div>
<!-- end comment row -->"""
comments_rows += '</div></div>'
## page links
page_links = ''
# Previous
if page != 1:
link_dic['arg_page'] = 'p=%s' % (page - 1)
page_links += '<a href=\"%(siteurl)s/%(CFG_SITE_RECORD)s/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s\"><<</a> ' % link_dic
else:
page_links += ' %s ' % (' '*(len(_('Previous'))+7))
# Page Numbers
for i in range(1, nb_pages+1):
link_dic['arg_page'] = 'p=%s' % i
link_dic['page'] = '%s' % i
if i != page:
page_links += '''
<a href=\"%(siteurl)s/%(CFG_SITE_RECORD)s/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s\">%(page)s</a> ''' % link_dic
else:
page_links += ''' <b>%s</b> ''' % i
# Next
if page != nb_pages:
link_dic['arg_page'] = 'p=%s' % (page + 1)
page_links += '''
<a href=\"%(siteurl)s/%(CFG_SITE_RECORD)s/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s\">>></a> ''' % link_dic
else:
page_links += '%s' % (' '*(len(_('Next'))+7))
## stuff for ranking if enabled
if reviews:
if avg_score > 0:
avg_score_img = 'stars-' + str(avg_score).split('.')[0] + '-' + str(avg_score).split('.')[1] + '.png'
else:
avg_score_img = "stars-0-0.png"
ranking_average = '<br /><b>'
ranking_average += _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '</b><img src="' + CFG_SITE_URL + '/img/' + avg_score_img + '" alt="' + str(avg_score) + '" />',
'x_nb_reviews': str(total_nb_reviews)}
ranking_average += '<br />'
else:
ranking_average = ""
write_button_link = '''%s/%s/%s/%s/add''' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, discussion)
write_button_form = '<input type="hidden" name="ln" value="%s"/>'
write_button_form = self.createhiddenform(action=write_button_link,
method="get",
text=write_button_form,
button = reviews and _('Write a review') or _('Write a comment'))
if reviews:
total_label = _("There is a total of %s reviews")
else:
total_label = _("There is a total of %s comments")
total_label %= total_nb_comments
review_or_comment_first = ''
if reviews == 0 and total_nb_comments == 0 and can_send_comments:
review_or_comment_first = _("Start a discussion about any aspect of this document.") + '<br />'
elif reviews == 1 and total_nb_reviews == 0 and can_send_comments:
review_or_comment_first = _("Be the first to review this document.") + '<br />'
# do NOT remove the HTML comments below. Used for parsing
body = '''
%(comments_and_review_tabs)s
<!-- start comments table -->
<div class="webcomment_comment_table">
%(comments_rows)s
</div>
<!-- end comments table -->
%(review_or_comment_first)s
<br />''' % \
{ 'record_label': _("Record"),
'back_label': _("Back to search results"),
'total_label': total_label,
'write_button_form' : write_button_form,
'write_button_form_again' : total_nb_comments>3 and write_button_form or "",
'comments_rows' : comments_rows,
'total_nb_comments' : total_nb_comments,
'comments_or_reviews' : reviews and _('review') or _('comment'),
'comments_or_reviews_title' : reviews and _('Review') or _('Comment'),
'siteurl' : CFG_SITE_URL,
'module' : "comments",
'recid' : recID,
'ln' : ln,
#'border' : border,
'ranking_avg' : ranking_average,
'comments_and_review_tabs' : CFG_WEBCOMMENT_ALLOW_REVIEWS and \
CFG_WEBCOMMENT_ALLOW_COMMENTS and \
'%s | %s <br />' % \
(comments_link, reviews_link) or '',
'review_or_comment_first' : review_or_comment_first
}
# form is not currently used. reserved for an eventual purpose
#form = """
# Display <select name="nb" size="1"> per page
# <option value="all">All</option>
# <option value="10">10</option>
# <option value="25">20</option>
# <option value="50">50</option>
# <option value="100" selected="selected">100</option>
# </select>
# comments per page that are <select name="ds" size="1">
# <option value="all" selected="selected">Any age</option>
# <option value="1d">1 day old</option>
# <option value="3d">3 days old</option>
# <option value="1w">1 week old</option>
# <option value="2w">2 weeks old</option>
# <option value="1m">1 month old</option>
# <option value="3m">3 months old</option>
# <option value="6m">6 months old</option>
# <option value="1y">1 year old</option>
# </select>
# and sorted by <select name="do" size="1">
# <option value="od" selected="selected">Oldest first</option>
# <option value="nd">Newest first</option>
# %s
# </select>
# """ % \
# (reviews==1 and '''
# <option value=\"hh\">most helpful</option>
# <option value=\"lh\">least helpful</option>
# <option value=\"hs\">highest star ranking</option>
# <option value=\"ls\">lowest star ranking</option>
# </select>''' or '''
# </select>''')
#
#form_link = "%(siteurl)s/%(module)s/%(function)s" % link_dic
#form = self.createhiddenform(action=form_link, method="get", text=form, button='Go', recid=recID, p=1)
pages = """
<div>
%(v_label)s %(comments_or_reviews)s %(results_nb_lower)s-%(results_nb_higher)s <br />
%(page_links)s
</div>
""" % \
{'v_label': _("Viewing"),
'page_links': _("Page:") + page_links ,
'comments_or_reviews': reviews and _('review') or _('comment'),
'results_nb_lower': len(comments)>0 and ((page-1) * nb_per_page)+1 or 0,
'results_nb_higher': page == nb_pages and (((page-1) * nb_per_page) + len(comments)) or (page * nb_per_page)}
if nb_pages > 1:
#body = warnings + body + form + pages
body = warnings + body + pages
else:
body = warnings + body
if reviews == 0:
if not user_is_subscribed_to_discussion:
body += '<small>'
body += '<div class="comment-subscribe">' + '<img src="%s/img/mail-icon-12x8.gif" border="0" alt="" />' % CFG_SITE_URL + \
' ' + '<b>' + create_html_link(urlbase=CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + \
str(recID) + '/comments/subscribe',
urlargd={},
link_label=_('Subscribe')) + \
'</b>' + ' to this discussion. You will then receive all new comments by email.' + '</div>'
body += '</small><br />'
elif user_can_unsubscribe_from_discussion:
body += '<small>'
body += '<div class="comment-subscribe">' + '<img src="%s/img/mail-icon-12x8.gif" border="0" alt="" />' % CFG_SITE_URL + \
' ' + '<b>' + create_html_link(urlbase=CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + \
str(recID) + '/comments/unsubscribe',
urlargd={},
link_label=_('Unsubscribe')) + \
'</b>' + ' from this discussion. You will no longer receive emails about new comments.' + '</div>'
body += '</small><br />'
if can_send_comments:
body += add_comment_or_review
else:
body += '<br/><em>' + _("You are not authorized to comment or review.") + '</em>'
return '<div class="webcomment_container">' + body + '</div>'
def create_messaging_link(self, to, display_name, ln=CFG_SITE_LANG):
"""prints a link to the messaging system"""
link = "%s/yourmessages/write?msg_to=%s&ln=%s" % (CFG_SITE_URL, to, ln)
if to:
return '<a href="%s" class="maillink">%s</a>' % (link, display_name)
else:
return display_name
def createhiddenform(self, action="", method="get", text="", button="confirm", cnfrm='', **hidden):
"""
create select with hidden values and submit button
@param action: name of the action to perform on submit
@param method: 'get' or 'post'
@param text: additional text, can also be used to add non hidden input
@param button: value/caption on the submit button
@param cnfrm: if given, must check checkbox to confirm
@param **hidden: dictionary with name=value pairs for hidden input
@return: html form
"""
output = """
<form action="%s" method="%s">""" % (action, method.lower().strip() in ['get', 'post'] and method or 'get')
output += """
<table style="width:90%">
<tr>
<td style="vertical-align: top">
"""
output += text + '\n'
if cnfrm:
output += """
<input type="checkbox" name="confirm" value="1" />"""
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, value)
else:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, hidden[key])
output += """
</td>
</tr>
<tr>
<td>"""
output += """
<input class="adminbutton" type="submit" value="%s" />""" % (button, )
output += """
</td>
</tr>
</table>
</form>"""
return output
def create_write_comment_hiddenform(self, action="", method="get", text="", button="confirm", cnfrm='',
enctype='', form_id=None, form_name=None, **hidden):
"""
create select with hidden values and submit button
@param action: name of the action to perform on submit
@param method: 'get' or 'post'
@param text: additional text, can also be used to add non hidden input
@param button: value/caption on the submit button
@param cnfrm: if given, must check checkbox to confirm
@param form_id: HTML 'id' attribute of the form tag
@param form_name: HTML 'name' attribute of the form tag
@param **hidden: dictionary with name=value pairs for hidden input
@return: html form
"""
enctype_attr = ''
if enctype:
enctype_attr = 'enctype=' + enctype
output = """
<form action="%s" method="%s" %s%s%s>""" % \
(action, method.lower().strip() in ['get', 'post'] and method or 'get',
enctype_attr, form_name and ' name="%s"' % form_name or '',
form_id and ' id="%s"' % form_id or '')
if cnfrm:
output += """
<input type="checkbox" name="confirm" value="1" />"""
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, value)
else:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, hidden[key])
output += text + '\n'
output += """
</form>"""
return output
def tmpl_warnings(self, warnings=[], ln=CFG_SITE_LANG):
"""
Display len(warnings) warning fields
@param warnings: list of warning tuples (warning_text, warning_color)
@param ln=language
@return: html output
"""
if type(warnings) is not list:
warnings = [warnings]
warningbox = ""
if warnings:
for i in range(len(warnings)):
warning_text = warnings[i][0]
warning_color = warnings[i][1]
if warning_color == 'green':
span_class = 'exampleleader'
else:
span_class = 'important'
warningbox += '''
<span class="%(span_class)s">%(warning)s</span><br />''' % \
{ 'span_class' : span_class,
'warning' : warning_text }
return warningbox
else:
return ""
def tmpl_error(self, error, ln=CFG_SITE_LANG):
"""
Display error
@param error: string
@param ln=language
@return: html output
"""
_ = gettext_set_language(ln)
errorbox = ""
if error != "":
errorbox = "<div class=\"errorbox\">\n <b>Error:</b>\n"
errorbox += " <p>"
errorbox += error + " </p>"
errorbox += "</div><br />\n"
return errorbox
def tmpl_add_comment_form(self, recID, uid, nickname, ln, msg,
warnings, textual_msg=None, can_attach_files=False,
user_is_subscribed_to_discussion=False, reply_to=None):
"""
Add form for comments
@param recID: record id
@param uid: user id
@param ln: language
@param msg: comment body contents for when refreshing due to
warning, or when replying to a comment
@param textual_msg: same as 'msg', but contains the textual
version in case user cannot display CKeditor
@param warnings: list of warning tuples (warning_text, warning_color)
@param can_attach_files: if user can upload attach file to record or not
@param user_is_subscribed_to_discussion: True if user already receives new comments by email
@param reply_to: the ID of the comment we are replying to. None if not replying
@return html add comment form
"""
_ = gettext_set_language(ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'module' : 'comments',
'function' : 'add',
'arguments' : 'ln=%s&action=%s' % (ln, 'SUBMIT'),
'recID' : recID}
if textual_msg is None:
textual_msg = msg
# FIXME a cleaner handling of nicknames is needed.
if not nickname:
(uid, nickname, display) = get_user_info(uid)
if nickname:
note = _("Note: Your nickname, %s, will be displayed as author of this comment.") % ('<i>' + nickname + '</i>')
else:
(uid, nickname, display) = get_user_info(uid)
link = '<a href="%s/youraccount/edit">' % CFG_SITE_SECURE_URL
note = _("Note: you have not %(x_url_open)sdefined your nickname%(x_url_close)s. %(x_nickname)s will be displayed as the author of this comment.") % \
{'x_url_open': link,
'x_url_close': '</a>',
'x_nickname': ' <br /><i>' + display + '</i>'}
if not CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR:
note += '<br />' + ' '*10 + cgi.escape('You can use some HTML tags: <a href>, <strong>, <blockquote>, <br />, <p>, <em>, <ul>, <li>, <b>, <i>')
#from invenio.search_engine import print_record
#record_details = print_record(recID=recID, format='hb', ln=ln)
warnings = self.tmpl_warnings(warnings, ln)
# Prepare file upload settings. We must enable file upload in
# the ckeditor + a simple file upload interface (independant from editor)
file_upload_url = None
simple_attach_file_interface = ''
if isGuestUser(uid):
simple_attach_file_interface = "<small><em>%s</em></small><br/>" % _("Once logged in, authorized users can also attach files.")
if can_attach_files:
# Note that files can be uploaded only when user is logged in
#file_upload_url = '%s/%s/%i/comments/attachments/put' % \
# (CFG_SITE_URL, CFG_SITE_RECORD, recID)
simple_attach_file_interface = '''
<div id="uploadcommentattachmentsinterface">
<small>%(attach_msg)s: <em>(%(nb_files_limit_msg)s. %(file_size_limit_msg)s)</em></small><br />
<input class="multi max-%(CFG_WEBCOMMENT_MAX_ATTACHED_FILES)s" type="file" name="commentattachment[]"/><br />
<noscript>
<input type="file" name="commentattachment[]" /><br />
</noscript>
</div>
''' % \
{'CFG_WEBCOMMENT_MAX_ATTACHED_FILES': CFG_WEBCOMMENT_MAX_ATTACHED_FILES,
'attach_msg': CFG_WEBCOMMENT_MAX_ATTACHED_FILES == 1 and _("Optionally, attach a file to this comment") or \
_("Optionally, attach files to this comment"),
'nb_files_limit_msg': _("Max one file") and CFG_WEBCOMMENT_MAX_ATTACHED_FILES == 1 or \
_("Max %i files") % CFG_WEBCOMMENT_MAX_ATTACHED_FILES,
'file_size_limit_msg': CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE > 0 and _("Max %(x_nb_bytes)s per file") % {'x_nb_bytes': (CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE < 1024*1024 and (str(CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE/1024) + 'KB') or (str(CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE/(1024*1024)) + 'MB'))} or ''}
editor = get_html_text_editor(name='msg',
content=msg,
textual_content=textual_msg,
width='100%',
height='400px',
enabled=CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR,
file_upload_url=file_upload_url,
toolbar_set = "WebComment",
ln=ln)
subscribe_to_discussion = ''
if not user_is_subscribed_to_discussion:
# Offer to subscribe to discussion
subscribe_to_discussion = '<small><input type="checkbox" name="subscribe" id="subscribe"/><label for="subscribe">%s</label></small>' % _("Send me an email when a new comment is posted")
form = """<div id="comment-write"><h2>%(add_comment)s</h2>
%(editor)s
<br />
%(simple_attach_file_interface)s
<span class="reportabuse">%(note)s</span>
<div class="submit-area">
%(subscribe_to_discussion)s<br />
<input class="adminbutton" type="submit" value="Add comment" onclick="user_must_confirm_before_leaving_page = false;return true;"/>
%(reply_to)s
</div>
""" % {'note': note,
'record_label': _("Article") + ":",
'comment_label': _("Comment") + ":",
'add_comment': _('Add comment'),
'editor': editor,
'subscribe_to_discussion': subscribe_to_discussion,
'reply_to': reply_to and '<input type="hidden" name="comid" value="%s"/>' % reply_to or '',
'simple_attach_file_interface': simple_attach_file_interface}
form_link = "%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/comments/%(function)s?%(arguments)s" % link_dic
form = self.create_write_comment_hiddenform(action=form_link, method="post", text=form, button='Add comment',
enctype='multipart/form-data', form_id='cmtForm',
form_name='cmtForm')
form += '</div>'
return warnings + form + self.tmpl_page_do_not_leave_comment_page_js(ln=ln)
def tmpl_add_comment_form_with_ranking(self, recID, uid, nickname, ln, msg, score, note,
warnings, textual_msg=None, show_title_p=False,
can_attach_files=False):
"""
Add form for reviews
@param recID: record id
@param uid: user id
@param ln: language
@param msg: comment body contents for when refreshing due to warning
@param textual_msg: the textual version of 'msg' when user cannot display Ckeditor
@param score: review score
@param note: review title
@param warnings: list of warning tuples (warning_text, warning_color)
@param show_title_p: if True, prefix the form with "Add Review" as title
@param can_attach_files: if user can upload attach file to record or not
@return: html add review form
"""
_ = gettext_set_language(ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'module' : 'comments',
'function' : 'add',
'arguments' : 'ln=%s&action=%s' % (ln, 'SUBMIT'),
'recID' : recID}
warnings = self.tmpl_warnings(warnings, ln)
if textual_msg is None:
textual_msg = msg
#from search_engine import print_record
#record_details = print_record(recID=recID, format='hb', ln=ln)
if nickname:
note_label = _("Note: Your nickname, %s, will be displayed as the author of this review.")
note_label %= ('<i>' + nickname + '</i>')
else:
(uid, nickname, display) = get_user_info(uid)
link = '<a href="%s/youraccount/edit">' % CFG_SITE_SECURE_URL
note_label = _("Note: you have not %(x_url_open)sdefined your nickname%(x_url_close)s. %(x_nickname)s will be displayed as the author of this comment.") % \
{'x_url_open': link,
'x_url_close': '</a>',
'x_nickname': ' <br /><i>' + display + '</i>'}
selected0 = ''
selected1 = ''
selected2 = ''
selected3 = ''
selected4 = ''
selected5 = ''
if score == 0:
selected0 = ' selected="selected"'
elif score == 1:
selected1 = ' selected="selected"'
elif score == 2:
selected2 = ' selected="selected"'
elif score == 3:
selected3 = ' selected="selected"'
elif score == 4:
selected4 = ' selected="selected"'
elif score == 5:
selected5 = ' selected="selected"'
## file_upload_url = None
## if can_attach_files:
## file_upload_url = '%s/%s/%i/comments/attachments/put' % \
## (CFG_SITE_URL, CFG_SITE_RECORD, recID)
editor = get_html_text_editor(name='msg',
content=msg,
textual_content=msg,
width='90%',
height='400px',
enabled=CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR,
# file_upload_url=file_upload_url,
toolbar_set = "WebComment",
ln=ln)
form = """%(add_review)s
<table style="width: 100%%">
<tr>
<td style="padding-bottom: 10px;">%(rate_label)s:
<select name=\"score\" size=\"1\">
<option value=\"0\"%(selected0)s>-%(select_label)s-</option>
<option value=\"5\"%(selected5)s>***** (best)</option>
<option value=\"4\"%(selected4)s>****</option>
<option value=\"3\"%(selected3)s>***</option>
<option value=\"2\"%(selected2)s>**</option>
<option value=\"1\"%(selected1)s>* (worst)</option>
</select>
</td>
</tr>
<tr>
<td>%(title_label)s:</td>
</tr>
<tr>
<td style="padding-bottom: 10px;">
<input type="text" name="note" maxlength="250" style="width:90%%" value="%(note)s" />
</td>
</tr>
<tr>
<td>%(write_label)s:</td>
</tr>
<tr>
<td>
%(editor)s
</td>
</tr>
<tr>
<td class="reportabuse">%(note_label)s</td></tr>
</table>
""" % {'article_label': _('Article'),
'rate_label': _("Rate this article"),
'select_label': _("Select a score"),
'title_label': _("Give a title to your review"),
'write_label': _("Write your review"),
'note_label': note_label,
'note' : note!='' and note or "",
'msg' : msg!='' and msg or "",
#'record' : record_details
'add_review': show_title_p and ('<h2>'+_('Add review')+'</h2>') or '',
'selected0': selected0,
'selected1': selected1,
'selected2': selected2,
'selected3': selected3,
'selected4': selected4,
'selected5': selected5,
'editor': editor,
}
form_link = "%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/reviews/%(function)s?%(arguments)s" % link_dic
form = self.createhiddenform(action=form_link, method="post", text=form, button=_('Add Review'))
return warnings + form
def tmpl_add_comment_successful(self, recID, ln, reviews, warnings, success):
"""
@param recID: record id
@param ln: language
@return: html page of successfully added comment/review
"""
_ = gettext_set_language(ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'module' : 'comments',
'function' : 'display',
'arguments' : 'ln=%s&do=od' % ln,
'recID' : recID,
'discussion': reviews == 1 and 'reviews' or 'comments'}
link = "%(siteurl)s/%(CFG_SITE_RECORD)s/%(recID)s/%(discussion)s/%(function)s?%(arguments)s" % link_dic
if warnings:
out = self.tmpl_warnings(warnings, ln) + '<br /><br />'
else:
if reviews:
out = _("Your review was successfully added.") + '<br /><br />'
else:
out = _("Your comment was successfully added.") + '<br /><br />'
link += "#%s" % success
out += '<a href="%s">' % link
out += _('Back to record') + '</a>'
return out
def tmpl_create_multiple_actions_form(self,
form_name="",
form_action="",
method="get",
action_display={},
action_field_name="",
button_label="",
button_name="",
content="",
**hidden):
""" Creates an HTML form with a multiple choice of actions and a button to select it.
@param form_action: link to the receiver of the formular
@param form_name: name of the HTML formular
@param method: either 'GET' or 'POST'
@param action_display: dictionary of actions.
action is HTML name (name of action)
display is the string provided in the popup
@param action_field_name: html name of action field
@param button_label: what's written on the button
@param button_name: html name of the button
@param content: what's inside te formular
@param **hidden: dictionary of name/value pairs of hidden fields.
"""
output = """
<form action="%s" method="%s">""" % (form_action, method)
output += """
<table>
<tr>
<td style="vertical-align: top" colspan="2">
"""
output += content + '\n'
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, value)
else:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, hidden[key])
output += """
</td>
</tr>
<tr>
<td style="text-align:right;">"""
if type(action_display) is dict and len(action_display.keys()):
output += """
<select name="%s">""" % action_field_name
for (key, value) in action_display.items():
output += """
<option value="%s">%s</option>""" % (key, value)
output += """
</select>"""
output += """
</td>
<td style="text-align:left;">
<input class="adminbutton" type="submit" value="%s" name="%s"/>""" % (button_label, button_name)
output += """
</td>
</tr>
</table>
</form>"""
return output
def tmpl_admin_index(self, ln):
"""
Index page
"""
# load the right message language
_ = gettext_set_language(ln)
out = '<ol>'
if CFG_WEBCOMMENT_ALLOW_COMMENTS or CFG_WEBCOMMENT_ALLOW_REVIEWS:
if CFG_WEBCOMMENT_ALLOW_COMMENTS:
out += '<h3>Comments status</h3>'
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/hot?ln=%(ln)s&comments=1">%(hot_cmt_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'hot_cmt_label': _("View most commented records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/latest?ln=%(ln)s&comments=1">%(latest_cmt_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'latest_cmt_label': _("View latest commented records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/comments?ln=%(ln)s&reviews=0">%(reported_cmt_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'reported_cmt_label': _("View all comments reported as abuse")}
if CFG_WEBCOMMENT_ALLOW_REVIEWS:
out += '<h3>Reviews status</h3>'
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/hot?ln=%(ln)s&comments=0">%(hot_rev_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'hot_rev_label': _("View most reviewed records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/latest?ln=%(ln)s&comments=0">%(latest_rev_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'latest_rev_label': _("View latest reviewed records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/comments?ln=%(ln)s&reviews=1">%(reported_rev_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'reported_rev_label': _("View all reviews reported as abuse")}
#<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/delete?ln=%(ln)s&comid=-1">%(delete_label)s</a></li>
out +="""
<h3>General</h3>
<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/users?ln=%(ln)s">%(view_users)s</a></li>
<li><a href="%(siteurl)s/help/admin/webcomment-admin-guide">%(guide)s</a></li>
""" % {'siteurl' : CFG_SITE_URL,
#'delete_label': _("Delete/Undelete comment(s) or suppress abuse report(s)"),
'view_users': _("View all users who have been reported"),
'ln' : ln,
'guide' : _("Guide")}
else:
out += _("Comments and reviews are disabled") + '<br />'
out += '</ol>'
from invenio.bibrankadminlib import addadminbox
return addadminbox('<b>%s</b>'% _("Menu"), [out])
def tmpl_admin_delete_form(self, ln, warnings):
"""
Display admin interface to fetch list of records to delete
@param warnings: list of warning tuples (warning_text, warning_color)
see tmpl_warnings, warning_color is optional
"""
# load the right message language
_ = gettext_set_language(ln)
warnings = self.tmpl_warnings(warnings, ln)
out = '''
<br />
%s<br />
<br />'''% _("Please enter the ID of the comment/review so that you can view it before deciding whether to delete it or not")
form = '''
<table>
<tr>
<td>%s</td>
<td><input type=text name="comid" size="10" maxlength="10" value="" /></td>
</tr>
<tr>
<td><br /></td>
<tr>
</table>
<br />
%s <br/>
<br />
<table>
<tr>
<td>%s</td>
<td><input type=text name="recid" size="10" maxlength="10" value="" /></td>
</tr>
<tr>
<td><br /></td>
<tr>
</table>
<br />
''' % (_("Comment ID:"),
_("Or enter a record ID to list all the associated comments/reviews:"),
_("Record ID:"))
form_link = "%s/admin/webcomment/webcommentadmin.py/delete?ln=%s" % (CFG_SITE_URL, ln)
form = self.createhiddenform(action=form_link, method="get", text=form, button=_('View Comment'))
return warnings + out + form
def tmpl_admin_users(self, ln, users_data):
"""
@param users_data: tuple of ct, i.e. (ct, ct, ...)
where ct is a tuple (total_number_reported, total_comments_reported, total_reviews_reported, total_nb_votes_yes_of_reported,
total_nb_votes_total_of_reported, user_id, user_email, user_nickname)
sorted by order of ct having highest total_number_reported
"""
_ = gettext_set_language(ln)
u_reports = 0
u_comment_reports = 1
u_reviews_reports = 2
u_nb_votes_yes = 3
u_nb_votes_total = 4
u_uid = 5
u_email = 6
u_nickname = 7
if not users_data:
return self.tmpl_warnings([(_("There have been no reports so far."), 'green')])
user_rows = ""
for utuple in users_data:
com_label = _("View all %s reported comments") % utuple[u_comment_reports]
com_link = '''<a href="%s/admin/webcomment/webcommentadmin.py/comments?ln=%s&uid=%s&reviews=0">%s</a><br />''' % \
(CFG_SITE_URL, ln, utuple[u_uid], com_label)
rev_label = _("View all %s reported reviews") % utuple[u_reviews_reports]
rev_link = '''<a href="%s/admin/webcomment/webcommentadmin.py/comments?ln=%s&uid=%s&reviews=1">%s</a>''' % \
(CFG_SITE_URL, ln, utuple[u_uid], rev_label)
if not utuple[u_nickname]:
user_info = get_user_info(utuple[u_uid])
nickname = user_info[2]
else:
nickname = utuple[u_nickname]
if CFG_WEBCOMMENT_ALLOW_REVIEWS:
review_row = """
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>"""
review_row %= (utuple[u_nb_votes_yes],
utuple[u_nb_votes_total] - utuple[u_nb_votes_yes],
utuple[u_nb_votes_total])
else:
review_row = ''
user_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(nickname)s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(email)s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(uid)s</td>%(review_row)s
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray; font-weight: bold;">%(reports)s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(com_link)s%(rev_link)s</td>
</tr>""" % { 'nickname' : nickname,
'email' : utuple[u_email],
'uid' : utuple[u_uid],
'reports' : utuple[u_reports],
'review_row': review_row,
'siteurl' : CFG_SITE_URL,
'ln' : ln,
'com_link' : CFG_WEBCOMMENT_ALLOW_COMMENTS and com_link or "",
'rev_link' : CFG_WEBCOMMENT_ALLOW_REVIEWS and rev_link or ""
}
out = "<br />"
out += _("Here is a list, sorted by total number of reports, of all users who have had a comment reported at least once.")
out += """
<br />
<br />
<table class="admin_wvar" style="width: 100%%;">
<thead>
<tr class="adminheaderleft">
<th>"""
out += _("Nickname") + '</th>\n'
out += '<th>' + _("Email") + '</th>\n'
out += '<th>' + _("User ID") + '</th>\n'
if CFG_WEBCOMMENT_ALLOW_REVIEWS > 0:
out += '<th>' + _("Number positive votes") + '</th>\n'
out += '<th>' + _("Number negative votes") + '</th>\n'
out += '<th>' + _("Total number votes") + '</th>\n'
out += '<th>' + _("Total number of reports") + '</th>\n'
out += '<th>' + _("View all user's reported comments/reviews") + '</th>\n'
out += """
</tr>
</thead>
<tbody>%s
</tbody>
</table>
""" % user_rows
return out
def tmpl_admin_select_comment_checkbox(self, cmt_id):
""" outputs a checkbox named "comidXX" where XX is cmt_id """
return '<input type="checkbox" name="comid%i" />' % int(cmt_id)
def tmpl_admin_user_info(self, ln, nickname, uid, email):
""" prepares informations about a user"""
_ = gettext_set_language(ln)
out = """
%(nickname_label)s: %(messaging)s<br />
%(uid_label)s: %(uid)i<br />
%(email_label)s: <a href="mailto:%(email)s">%(email)s</a>"""
out %= {'nickname_label': _("Nickname"),
'messaging': self.create_messaging_link(uid, nickname, ln),
'uid_label': _("User ID"),
'uid': int(uid),
'email_label': _("Email"),
'email': email}
return out
def tmpl_admin_review_info(self, ln, reviews, nb_reports, cmt_id, rec_id, status):
""" outputs information about a review """
_ = gettext_set_language(ln)
if reviews:
reported_label = _("This review has been reported %i times")
else:
reported_label = _("This comment has been reported %i times")
reported_label %= int(nb_reports)
out = """
%(reported_label)s<br />
<a href="%(siteurl)s/%(CFG_SITE_RECORD)s/%(rec_id)i?ln=%(ln)s">%(rec_id_label)s</a><br />
%(cmt_id_label)s"""
out %= {'reported_label': reported_label,
'rec_id_label': _("Record") + ' #' + str(rec_id),
'siteurl': CFG_SITE_URL,
'CFG_SITE_RECORD' : CFG_SITE_RECORD,
'rec_id': int(rec_id),
'cmt_id_label': _("Comment") + ' #' + str(cmt_id),
'ln': ln}
if status in ['dm', 'da']:
out += '<br /><div style="color:red;">Marked as deleted</div>'
return out
def tmpl_admin_latest(self, ln, comment_data, comments, error, user_collections, collection):
"""
@param comment_data: same type of tuple as that
which is return by webcommentadminlib.py/query_get_latest i.e.
tuple (nickname, uid, date_creation, body, id) if latest comments or
tuple (nickname, uid, date_creation, body, star_score, id) if latest reviews
"""
_ = gettext_set_language(ln)
out = """
<script type='text/javascript'>
function collectionChange()
{
document.collection_form.submit();
}
</script>
"""
out += '<form method="get" name="collection_form" action="%s/admin/webcomment/webcommentadmin.py/latest?ln=%s&comments=%s">' % (CFG_SITE_URL, ln, comments)
out += '<input type="hidden" name="ln" value=%s>' % ln
out += '<input type="hidden" name="comments" value=%s>' % comments
out += '<div> Filter by collection: <select name="collection" onchange="javascript:collectionChange();">'
for collection_name in user_collections:
if collection_name == collection:
out += '<option "SELECTED" value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
else:
out += '<option value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
out += '</select></div></form><br />'
if error == 1:
out += "<i>User is not authorized to view such collection.</i><br />"
return out
elif error == 2:
out += "<i>There are no %s for this collection.</i><br />" % (comments and 'comments' or 'reviews')
return out
out += """
<ol>
"""
for (cmt_tuple, meta_data) in comment_data:
bibrec_id = meta_data[3]
content = format_record(bibrec_id, "hs")
if not comments:
out += """
<li> %(content)s <br/> <span class="moreinfo"> <a class="moreinfo" href=%(comment_url)s> reviewed by %(user)s</a>
(%(stars)s) \"%(body)s\" on <i> %(date)s </i></li> </span> <br/>
""" % {'content': content,
'comment_url': CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(bibrec_id) + '/reviews',
'user':cmt_tuple[0] ,
'stars': '*' * int(cmt_tuple[4]) ,
'body': cmt_tuple[3][:20] + '...',
'date': cmt_tuple[2]}
else:
out += """
<li> %(content)s <br/> <span class="moreinfo"> <a class="moreinfo" href=%(comment_url)s> commented by %(user)s</a>,
\"%(body)s\" on <i> %(date)s </i></li> </span> <br/>
""" % {'content': content,
'comment_url': CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(bibrec_id) + '/comments',
'user':cmt_tuple[0] ,
'body': cmt_tuple[3][:20] + '...',
'date': cmt_tuple[2]}
out += """</ol>"""
return out
def tmpl_admin_hot(self, ln, comment_data, comments, error, user_collections, collection):
"""
@param comment_data: same type of tuple as that
which is return by webcommentadminlib.py/query_get_hot i.e.
tuple (id_bibrec, date_last_comment, users, count)
"""
_ = gettext_set_language(ln)
out = """
<script type='text/javascript'>
function collectionChange()
{
document.collection_form.submit();
}
</script>
"""
out += '<form method="get" name="collection_form" action="%s/admin/webcomment/webcommentadmin.py/hot?ln=%s&comments=%s">' % (CFG_SITE_URL, ln, comments)
out += '<input type="hidden" name="ln" value=%s>' % ln
out += '<input type="hidden" name="comments" value=%s>' % comments
out += '<div> Filter by collection: <select name="collection" onchange="javascript:collectionChange();">'
for collection_name in user_collections:
if collection_name == collection:
out += '<option "SELECTED" value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
else:
out += '<option value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
out += '</select></div></form><br />'
if error == 1:
out += "<i>User is not authorized to view such collection.</i><br />"
return out
elif error == 2:
out += "<i>There are no %s for this collection.</i><br />" % (comments and 'comments' or 'reviews')
return out
for cmt_tuple in comment_data:
bibrec_id = cmt_tuple[0]
content = format_record(bibrec_id, "hs")
last_comment_date = cmt_tuple[1]
total_users = cmt_tuple[2]
total_comments = cmt_tuple[3]
if comments:
comment_url = CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(bibrec_id) + '/comments'
str_comment = int(total_comments) > 1 and 'comments' or 'comment'
else:
comment_url = CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(bibrec_id) + '/reviews'
str_comment = int(total_comments) > 1 and 'reviews' or 'review'
out += """
<li> %(content)s <br/> <span class="moreinfo"> <a class="moreinfo" href=%(comment_url)s> %(total_comments)s
%(str_comment)s</a>
(%(total_users)s %(user)s), latest on <i> %(last_comment_date)s </i></li> </span> <br/>
""" % {'content': content,
'comment_url': comment_url ,
'total_comments': total_comments,
'str_comment': str_comment,
'total_users': total_users,
'user': int(total_users) > 1 and 'users' or 'user',
'last_comment_date': last_comment_date}
out += """</ol>"""
return out
def tmpl_admin_comments(self, ln, uid, comID, recID, comment_data, reviews, error, user_collections, collection):
"""
@param comment_data: same type of tuple as that
which is returned by webcomment.py/query_retrieve_comments_or_remarks i.e.
tuple of comment where comment is
tuple (nickname,
date_creation,
body,
id) if ranking disabled or
tuple (nickname,
date_creation,
body,
nb_votes_yes,
nb_votes_total,
star_score,
title,
id)
"""
_ = gettext_set_language(ln)
coll_form = """
<script type='text/javascript'>
function collectionChange()
{
document.collection_form.submit();
}
</script>
"""
coll_form += '<form method="get" name="collection_form" action="%s/admin/webcomment/webcommentadmin.py/comments?ln=%s&reviews=%s">' % (CFG_SITE_URL, ln, reviews)
coll_form += '<input type="hidden" name="ln" value=%s>' % ln
coll_form += '<input type="hidden" name="reviews" value=%s>' % reviews
coll_form += '<div> Filter by collection: <select name="collection" onchange="javascript:collectionChange();">'
for collection_name in user_collections:
if collection_name == collection:
coll_form += '<option "SELECTED" value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
else:
coll_form += '<option value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
coll_form += '</select></div></form><br />'
if error == 1:
coll_form += "<i>User is not authorized to view such collection.</i><br />"
return coll_form
elif error == 2:
coll_form += "<i>There are no %s for this collection.</i><br />" % (reviews and 'reviews' or 'comments')
return coll_form
comments = []
comments_info = []
checkboxes = []
users = []
for (cmt_tuple, meta_data) in comment_data:
if reviews:
comments.append(self.tmpl_get_comment_with_ranking(None,#request object
ln,
cmt_tuple[0],#nickname
cmt_tuple[1],#userid
cmt_tuple[2],#date_creation
cmt_tuple[3],#body
cmt_tuple[9],#status
0,
cmt_tuple[5],#nb_votes_total
cmt_tuple[4],#nb_votes_yes
cmt_tuple[6],#star_score
cmt_tuple[7]))#title
else:
comments.append(self.tmpl_get_comment_without_ranking(None,#request object
ln,
cmt_tuple[0],#nickname
cmt_tuple[1],#userid
cmt_tuple[2],#date_creation
cmt_tuple[3],#body
cmt_tuple[5],#status
0,
None, #reply_link
None, #report_link
None, #undelete_link
None)) #delete_links
users.append(self.tmpl_admin_user_info(ln,
meta_data[0], #nickname
meta_data[1], #uid
meta_data[2]))#email
if reviews:
status = cmt_tuple[9]
else:
status = cmt_tuple[5]
comments_info.append(self.tmpl_admin_review_info(ln,
reviews,
meta_data[5], # nb abuse reports
meta_data[3], # cmt_id
meta_data[4], # rec_id
status)) # status
checkboxes.append(self.tmpl_admin_select_comment_checkbox(meta_data[3]))
form_link = "%s/admin/webcomment/webcommentadmin.py/del_com?ln=%s" % (CFG_SITE_URL, ln)
out = """
<table class="admin_wvar" style="width:100%%;">
<thead>
<tr class="adminheaderleft">
<th>%(review_label)s</th>
<th>%(written_by_label)s</th>
<th>%(review_info_label)s</th>
<th>%(select_label)s</th>
</tr>
</thead>
<tbody>""" % {'review_label': reviews and _("Review") or _("Comment"),
'written_by_label': _("Written by"),
'review_info_label': _("General informations"),
'select_label': _("Select")}
for i in range (0, len(comments)):
out += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintd" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (comments[i], users[i], comments_info[i], checkboxes[i])
out += """
</tbody>
</table>"""
if reviews:
action_display = {
'delete': _('Delete selected reviews'),
'unreport': _('Suppress selected abuse report'),
'undelete': _('Undelete selected reviews')
}
else:
action_display = {
'undelete': _('Undelete selected comments'),
'delete': _('Delete selected comments'),
'unreport': _('Suppress selected abuse report')
}
form = self.tmpl_create_multiple_actions_form(form_name="admin_comment",
form_action=form_link,
method="post",
action_display=action_display,
action_field_name='action',
button_label=_("OK"),
button_name="okbutton",
content=out)
if uid > 0:
header = '<br />'
if reviews:
header += _("Here are the reported reviews of user %s") % uid
else:
header += _("Here are the reported comments of user %s") % uid
header += '<br /><br />'
if comID > 0 and recID <= 0 and uid <= 0:
if reviews:
header = '<br />' +_("Here is review %s")% comID + '<br /><br />'
else:
header = '<br />' +_("Here is comment %s")% comID + '<br /><br />'
if uid > 0 and comID > 0 and recID <= 0:
if reviews:
header = '<br />' + _("Here is review %(x_cmtID)s written by user %(x_user)s") % {'x_cmtID': comID, 'x_user': uid}
else:
header = '<br />' + _("Here is comment %(x_cmtID)s written by user %(x_user)s") % {'x_cmtID': comID, 'x_user': uid}
header += '<br/ ><br />'
if comID <= 0 and recID <= 0 and uid <= 0:
header = '<br />'
if reviews:
header += _("Here are all reported reviews sorted by the most reported")
else:
header += _("Here are all reported comments sorted by the most reported")
header += "<br /><br />"
elif recID > 0:
header = '<br />'
if reviews:
header += _("Here are all reviews for record %i, sorted by the most reported" % recID)
header += '<br /><a href="%s/admin/webcomment/webcommentadmin.py/delete?comid=&recid=%s&reviews=0">%s</a>' % (CFG_SITE_URL, recID, _("Show comments"))
else:
header += _("Here are all comments for record %i, sorted by the most reported" % recID)
header += '<br /><a href="%s/admin/webcomment/webcommentadmin.py/delete?comid=&recid=%s&reviews=1">%s</a>' % (CFG_SITE_URL, recID, _("Show reviews"))
header += "<br /><br />"
return coll_form + header + form
def tmpl_admin_del_com(self, del_res, ln=CFG_SITE_LANG):
"""
@param del_res: list of the following tuple (comment_id, was_successfully_deleted),
was_successfully_deleted is boolean (0=false, >0=true
"""
_ = gettext_set_language(ln)
table_rows = ''
for deltuple in del_res:
table_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (deltuple[0], deltuple[1]>0 and _("Yes") or "<span class=\"important\">" +_("No") + "</span>")
out = """
<table class="admin_wvar">
<tr class="adminheaderleft">
<td style="padding-right:10px;">%s</td>
<td>%s</td>
</tr>%s
<table>""" % (_("comment ID"), _("successfully deleted"), table_rows)
return out
def tmpl_admin_undel_com(self, del_res, ln=CFG_SITE_LANG):
"""
@param del_res: list of the following tuple (comment_id, was_successfully_undeleted),
was_successfully_undeleted is boolean (0=false, >0=true
"""
_ = gettext_set_language(ln)
table_rows = ''
for deltuple in del_res:
table_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (deltuple[0], deltuple[1]>0 and _("Yes") or "<span class=\"important\">" +_("No") + "</span>")
out = """
<table class="admin_wvar">
<tr class="adminheaderleft">
<td style="padding-right:10px;">%s</td>
<td>%s</td>
</tr>%s
<table>""" % (_("comment ID"), _("successfully undeleted"), table_rows)
return out
def tmpl_admin_suppress_abuse_report(self, del_res, ln=CFG_SITE_LANG):
"""
@param del_res: list of the following tuple (comment_id, was_successfully_deleted),
was_successfully_deleted is boolean (0=false, >0=true
"""
_ = gettext_set_language(ln)
table_rows = ''
for deltuple in del_res:
table_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (deltuple[0], deltuple[1]>0 and _("Yes") or "<span class=\"important\">" +_("No") + "</span>")
out = """
<table class="admin_wvar">
<tr class="adminheaderleft">
<td style ="padding-right: 10px;">%s</td>
<td>%s</td>
</tr>%s
<table>""" % (_("comment ID"), _("successfully suppressed abuse report"), table_rows)
return out
def tmpl_mini_review(self, recID, ln=CFG_SITE_LANG, action='SUBMIT',
avg_score=0, nb_comments_total=0):
"""Display the mini version of reviews (only the grading part)"""
_ = gettext_set_language(ln)
url = '%s/%s/%s/reviews/add?ln=%s&action=%s' % (CFG_SITE_URL, CFG_SITE_RECORD, recID, ln, action)
if avg_score > 0:
score = _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '<b>%.1f</b>' % avg_score,
'x_nb_reviews': nb_comments_total}
else:
score = '(' +_("Not yet reviewed") + ')'
if avg_score == 5:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'full', 'full'
elif avg_score >= 4.5:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'full', 'half'
elif avg_score >= 4:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'full', ''
elif avg_score >= 3.5:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'half', ''
elif avg_score >= 3:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', '', ''
elif avg_score >= 2.5:
s1, s2, s3, s4, s5 = 'full', 'full', 'half', '', ''
elif avg_score >= 2:
s1, s2, s3, s4, s5 = 'full', 'full', '', '', ''
elif avg_score >= 1.5:
s1, s2, s3, s4, s5 = 'full', 'half', '', '', ''
elif avg_score == 1:
s1, s2, s3, s4, s5 = 'full', '', '', '', ''
else:
s1, s2, s3, s4, s5 = '', '', '', '', ''
out = '''
<small class="detailedRecordActions">%(rate)s:</small><br /><br />
<div style="margin:auto;width:160px;">
<span style="display:none;">Rate this document:</span>
<div class="star %(s1)s" ><a href="%(url)s&score=1">1</a>
<div class="star %(s2)s" ><a href="%(url)s&score=2">2</a>
<div class="star %(s3)s" ><a href="%(url)s&score=3">3</a>
<div class="star %(s4)s" ><a href="%(url)s&score=4">4</a>
<div class="star %(s5)s" ><a href="%(url)s&score=5">5</a></div></div></div></div></div>
<div style="clear:both"> </div>
</div>
<small>%(score)s</small>
''' % {'url': url,
'score': score,
'rate': _("Rate this document"),
's1': s1,
's2': s2,
's3': s3,
's4': s4,
's5': s5
}
return out
def tmpl_email_new_comment_header(self, recID, title, reviews,
comID, report_numbers,
can_unsubscribe=True,
ln=CFG_SITE_LANG, uid=-1):
"""
Prints the email header used to notify subscribers that a new
comment/review was added.
@param recid: the ID of the commented/reviewed record
@param title: the title of the commented/reviewed record
@param reviews: True if it is a review, else if a comment
@param comID: the comment ID
@param report_numbers: the report number(s) of the record
@param can_unsubscribe: True if user can unsubscribe from alert
@param ln: language
"""
# load the right message language
_ = gettext_set_language(ln)
user_info = collect_user_info(uid)
out = _("Hello:") + '\n\n' + \
(reviews and _("The following review was sent to %(CFG_SITE_NAME)s by %(user_nickname)s:") or \
_("The following comment was sent to %(CFG_SITE_NAME)s by %(user_nickname)s:")) % \
{'CFG_SITE_NAME': CFG_SITE_NAME,
'user_nickname': user_info['nickname']}
out += '\n(<%s>)' % (CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(recID))
out += '\n\n\n'
return out
def tmpl_email_new_comment_footer(self, recID, title, reviews,
comID, report_numbers,
can_unsubscribe=True,
ln=CFG_SITE_LANG):
"""
Prints the email footer used to notify subscribers that a new
comment/review was added.
@param recid: the ID of the commented/reviewed record
@param title: the title of the commented/reviewed record
@param reviews: True if it is a review, else if a comment
@param comID: the comment ID
@param report_numbers: the report number(s) of the record
@param can_unsubscribe: True if user can unsubscribe from alert
@param ln: language
"""
# load the right message language
_ = gettext_set_language(ln)
out = '\n\n-- \n'
out += _("This is an automatic message, please don't reply to it.")
out += '\n'
out += _("To post another comment, go to <%(x_url)s> instead.") % \
{'x_url': CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(recID) + \
(reviews and '/reviews' or '/comments') + '/add'}
out += '\n'
if not reviews:
out += _("To specifically reply to this comment, go to <%(x_url)s>") % \
{'x_url': CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(recID) + \
'/comments/add?action=REPLY&comid=' + str(comID)}
out += '\n'
if can_unsubscribe:
out += _("To unsubscribe from this discussion, go to <%(x_url)s>") % \
{'x_url': CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/' + str(recID) + \
'/comments/unsubscribe'}
out += '\n'
out += _("For any question, please use <%(CFG_SITE_SUPPORT_EMAIL)s>") % \
{'CFG_SITE_SUPPORT_EMAIL': CFG_SITE_SUPPORT_EMAIL}
return out
def tmpl_email_new_comment_admin(self, recID):
"""
Prints the record information used in the email to notify the
system administrator that a new comment has been posted.
@param recID: the ID of the commented/reviewed record
"""
out = ""
title = get_fieldvalues(recID, "245__a")
authors = ', '.join(get_fieldvalues(recID, "100__a") + get_fieldvalues(recID, "700__a"))
#res_author = ""
#res_rep_num = ""
#for author in authors:
# res_author = res_author + ' ' + author
dates = get_fieldvalues(recID, "260__c")
report_nums = get_fieldvalues(recID, "037__a")
report_nums += get_fieldvalues(recID, "088__a")
report_nums = ', '.join(report_nums)
#for rep_num in report_nums:
# res_rep_num = res_rep_num + ', ' + rep_num
out += " Title = %s \n" % (title and title[0] or "No Title")
out += " Authors = %s \n" % authors
if dates:
out += " Date = %s \n" % dates[0]
out += " Report number = %s" % report_nums
return out
def tmpl_page_do_not_leave_comment_page_js(self, ln):
"""
Code to ask user confirmation when leaving the page, so that the
comment is not lost if clicking by mistake on links.
@param ln: the user language
"""
# load the right message language
_ = gettext_set_language(ln)
out = '''
<script language="JavaScript">
var initial_comment_value = document.forms.cmtForm.msg.value;
var user_must_confirm_before_leaving_page = true;
window.onbeforeunload = confirmExit;
function confirmExit() {
var editor_type_field = document.getElementById('%(name)seditortype');
if (editor_type_field && editor_type_field.value == 'ckeditor') {
var oEditor = CKEDITOR.instances.%(name)s;
if (user_must_confirm_before_leaving_page && oEditor.checkDirty()) {
/* Might give false positives, when editor pre-loaded
with content. But is better than the opposite */
return "%(message)s";
}
} else {
if (user_must_confirm_before_leaving_page && document.forms.cmtForm.msg.value != initial_comment_value){
return "%(message)s";
}
}
}
</script>
''' % {'message': _('Your comment will be lost.').replace('"', '\\"'),
'name': 'msg'}
return out
| gpl-2.0 |
danieljaouen/ansible | test/runner/lib/changes.py | 18 | 5945 | """Detect changes in Ansible code."""
from __future__ import absolute_import, print_function
import re
import os
from lib.util import (
ApplicationError,
SubprocessError,
MissingEnvironmentVariable,
CommonConfig,
display,
)
from lib.http import (
HttpClient,
urlencode,
)
from lib.git import (
Git,
)
class InvalidBranch(ApplicationError):
"""Exception for invalid branch specification."""
def __init__(self, branch, reason):
"""
:type branch: str
:type reason: str
"""
message = 'Invalid branch: %s\n%s' % (branch, reason)
super(InvalidBranch, self).__init__(message)
self.branch = branch
class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
pass
class ShippableChanges(object):
"""Change information for Shippable build."""
def __init__(self, args, git):
"""
:type args: CommonConfig
:type git: Git
"""
self.args = args
try:
self.branch = os.environ['BRANCH']
self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
self.commit = os.environ['COMMIT']
self.project_id = os.environ['PROJECT_ID']
self.commit_range = os.environ['SHIPPABLE_COMMIT_RANGE']
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
if self.is_tag:
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
if self.is_pr:
self.paths = sorted(git.get_diff_names([self.commit_range]))
self.diff = git.get_diff([self.commit_range])
else:
merge_runs = self.get_merge_runs(self.project_id, self.branch)
last_successful_commit = self.get_last_successful_commit(git, merge_runs)
if last_successful_commit:
self.paths = sorted(git.get_diff_names([last_successful_commit, self.commit]))
self.diff = git.get_diff([last_successful_commit, self.commit])
else:
# first run for branch
self.paths = None # act as though change detection not enabled, do not filter targets
self.diff = []
def get_merge_runs(self, project_id, branch):
"""
:type project_id: str
:type branch: str
:rtype: list[dict]
"""
params = dict(
isPullRequest='false',
projectIds=project_id,
branch=branch,
)
client = HttpClient(self.args, always=True)
response = client.get('https://api.shippable.com/runs?%s' % urlencode(params))
return response.json()
@staticmethod
def get_last_successful_commit(git, merge_runs):
"""
:type git: Git
:type merge_runs: dict | list[dict]
:rtype: str
"""
if 'id' in merge_runs and merge_runs['id'] == 4004:
display.warning('Unable to find project. Cannot determine changes. All tests will be executed.')
return None
merge_runs = sorted(merge_runs, key=lambda r: r['createdAt'])
known_commits = set()
last_successful_commit = None
for merge_run in merge_runs:
commit_sha = merge_run['commitSha']
if commit_sha not in known_commits:
known_commits.add(commit_sha)
if merge_run['statusCode'] == 30:
if git.is_valid_ref(commit_sha):
last_successful_commit = commit_sha
if last_successful_commit is None:
display.warning('No successful commit found. All tests will be executed.')
return last_successful_commit
class LocalChanges(object):
"""Change information for local work."""
def __init__(self, args, git):
"""
:type args: CommonConfig
:type git: Git
"""
self.args = args
self.current_branch = git.get_branch()
if self.is_official_branch(self.current_branch):
raise InvalidBranch(branch=self.current_branch,
reason='Current branch is not a feature branch.')
self.fork_branch = None
self.fork_point = None
self.local_branches = sorted(git.get_branches())
self.official_branches = sorted([b for b in self.local_branches if self.is_official_branch(b)])
for self.fork_branch in self.official_branches:
try:
self.fork_point = git.get_branch_fork_point(self.fork_branch)
break
except SubprocessError:
pass
if self.fork_point is None:
raise ApplicationError('Unable to auto-detect fork branch and fork point.')
# tracked files (including unchanged)
self.tracked = sorted(git.get_file_names(['--cached']))
# untracked files (except ignored)
self.untracked = sorted(git.get_file_names(['--others', '--exclude-standard']))
# tracked changes (including deletions) committed since the branch was forked
self.committed = sorted(git.get_diff_names([self.fork_point, 'HEAD']))
# tracked changes (including deletions) which are staged
self.staged = sorted(git.get_diff_names(['--cached']))
# tracked changes (including deletions) which are not staged
self.unstaged = sorted(git.get_diff_names([]))
# diff of all tracked files from fork point to working copy
self.diff = git.get_diff([self.fork_point])
@staticmethod
def is_official_branch(name):
"""
:type name: str
:rtype: bool
"""
if name == 'devel':
return True
if re.match(r'^stable-[0-9]+\.[0-9]+$', name):
return True
return False
| gpl-3.0 |
xiangel/hue | desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/Util/asn1.py | 122 | 12727 | # -*- coding: ascii -*-
#
# Util/asn1.py : Minimal support for ASN.1 DER binary encoding.
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
from Crypto.Util.number import long_to_bytes, bytes_to_long
import sys
from Crypto.Util.py3compat import *
__all__ = [ 'DerObject', 'DerInteger', 'DerOctetString', 'DerNull', 'DerSequence', 'DerObjectId' ]
class DerObject:
"""Base class for defining a single DER object.
Instantiate this class ONLY when you have to decode a DER element.
"""
# Known TAG types
typeTags = { 'SEQUENCE': 0x30, 'BIT STRING': 0x03, 'INTEGER': 0x02,
'OCTET STRING': 0x04, 'NULL': 0x05, 'OBJECT IDENTIFIER': 0x06 }
def __init__(self, ASN1Type=None, payload=b('')):
"""Initialize the DER object according to a specific type.
The ASN.1 type is either specified as the ASN.1 string (e.g.
'SEQUENCE'), directly with its numerical tag or with no tag
at all (None)."""
if isInt(ASN1Type) or ASN1Type is None:
self.typeTag = ASN1Type
else:
if len(ASN1Type)==1:
self.typeTag = ord(ASN1Type)
else:
self.typeTag = self.typeTags.get(ASN1Type)
self.payload = payload
def isType(self, ASN1Type):
return self.typeTags[ASN1Type]==self.typeTag
def _lengthOctets(self, payloadLen):
"""Return a byte string that encodes the given payload length (in
bytes) in a format suitable for a DER length tag (L).
"""
if payloadLen>127:
encoding = long_to_bytes(payloadLen)
return bchr(len(encoding)+128) + encoding
return bchr(payloadLen)
def encode(self):
"""Return a complete DER element, fully encoded as a TLV."""
return bchr(self.typeTag) + self._lengthOctets(len(self.payload)) + self.payload
def _decodeLen(self, idx, der):
"""Given a (part of a) DER element, and an index to the first byte of
a DER length tag (L), return a tuple with the payload size,
and the index of the first byte of the such payload (V).
Raises a ValueError exception if the DER length is invalid.
Raises an IndexError exception if the DER element is too short.
"""
length = bord(der[idx])
if length<=127:
return (length,idx+1)
payloadLength = bytes_to_long(der[idx+1:idx+1+(length & 0x7F)])
if payloadLength<=127:
raise ValueError("Not a DER length tag.")
return (payloadLength, idx+1+(length & 0x7F))
def decode(self, derEle, noLeftOvers=0):
"""Decode a complete DER element, and re-initializes this
object with it.
@param derEle A complete DER element. It must start with a DER T
tag.
@param noLeftOvers Indicate whether it is acceptable to complete the
parsing of the DER element and find that not all
bytes in derEle have been used.
@return Index of the first unused byte in the given DER element.
Raises a ValueError exception in case of parsing errors.
Raises an IndexError exception if the DER element is too short.
"""
try:
self.typeTag = bord(derEle[0])
if (self.typeTag & 0x1F)==0x1F:
raise ValueError("Unsupported DER tag")
(length,idx) = self._decodeLen(1, derEle)
if noLeftOvers and len(derEle) != (idx+length):
raise ValueError("Not a DER structure")
self.payload = derEle[idx:idx+length]
except IndexError:
raise ValueError("Not a valid DER SEQUENCE.")
return idx+length
class DerInteger(DerObject):
def __init__(self, value = 0):
"""Class to model an INTEGER DER element.
Limitation: only non-negative values are supported.
"""
DerObject.__init__(self, 'INTEGER')
self.value = value
def encode(self):
"""Return a complete INTEGER DER element, fully encoded as a TLV."""
self.payload = long_to_bytes(self.value)
if bord(self.payload[0])>127:
self.payload = bchr(0x00) + self.payload
return DerObject.encode(self)
def decode(self, derEle, noLeftOvers=0):
"""Decode a complete INTEGER DER element, and re-initializes this
object with it.
@param derEle A complete INTEGER DER element. It must start with a DER
INTEGER tag.
@param noLeftOvers Indicate whether it is acceptable to complete the
parsing of the DER element and find that not all
bytes in derEle have been used.
@return Index of the first unused byte in the given DER element.
Raises a ValueError exception if the DER element is not a
valid non-negative INTEGER.
Raises an IndexError exception if the DER element is too short.
"""
tlvLength = DerObject.decode(self, derEle, noLeftOvers)
if self.typeTag!=self.typeTags['INTEGER']:
raise ValueError ("Not a DER INTEGER.")
if bord(self.payload[0])>127:
raise ValueError ("Negative INTEGER.")
self.value = bytes_to_long(self.payload)
return tlvLength
class DerSequence(DerObject):
"""Class to model a SEQUENCE DER element.
This object behave like a dynamic Python sequence.
Sub-elements that are INTEGERs, look like Python integers.
Any other sub-element is a binary string encoded as the complete DER
sub-element (TLV).
"""
def __init__(self, startSeq=None):
"""Initialize the SEQUENCE DER object. Always empty
initially."""
DerObject.__init__(self, 'SEQUENCE')
if startSeq==None:
self._seq = []
else:
self._seq = startSeq
## A few methods to make it behave like a python sequence
def __delitem__(self, n):
del self._seq[n]
def __getitem__(self, n):
return self._seq[n]
def __setitem__(self, key, value):
self._seq[key] = value
def __setslice__(self,i,j,sequence):
self._seq[i:j] = sequence
def __delslice__(self,i,j):
del self._seq[i:j]
def __getslice__(self, i, j):
return self._seq[max(0, i):max(0, j)]
def __len__(self):
return len(self._seq)
def append(self, item):
return self._seq.append(item)
def hasInts(self):
"""Return the number of items in this sequence that are numbers."""
return len(filter(isInt, self._seq))
def hasOnlyInts(self):
"""Return True if all items in this sequence are numbers."""
return self._seq and self.hasInts()==len(self._seq)
def encode(self):
"""Return the DER encoding for the ASN.1 SEQUENCE, containing
the non-negative integers and longs added to this object.
Limitation: Raises a ValueError exception if it some elements
in the sequence are neither Python integers nor complete DER INTEGERs.
"""
self.payload = b('')
for item in self._seq:
try:
self.payload += item
except:
try:
self.payload += DerInteger(item).encode()
except:
raise ValueError("Trying to DER encode an unknown object")
return DerObject.encode(self)
def decode(self, derEle, noLeftOvers=0):
"""Decode a complete SEQUENCE DER element, and re-initializes this
object with it.
@param derEle A complete SEQUENCE DER element. It must start with a DER
SEQUENCE tag.
@param noLeftOvers Indicate whether it is acceptable to complete the
parsing of the DER element and find that not all
bytes in derEle have been used.
@return Index of the first unused byte in the given DER element.
DER INTEGERs are decoded into Python integers. Any other DER
element is not decoded. Its validity is not checked.
Raises a ValueError exception if the DER element is not a
valid DER SEQUENCE.
Raises an IndexError exception if the DER element is too short.
"""
self._seq = []
try:
tlvLength = DerObject.decode(self, derEle, noLeftOvers)
if self.typeTag!=self.typeTags['SEQUENCE']:
raise ValueError("Not a DER SEQUENCE.")
# Scan one TLV at once
idx = 0
while idx<len(self.payload):
typeTag = bord(self.payload[idx])
if typeTag==self.typeTags['INTEGER']:
newInteger = DerInteger()
idx += newInteger.decode(self.payload[idx:])
self._seq.append(newInteger.value)
else:
itemLen,itemIdx = self._decodeLen(idx+1,self.payload)
self._seq.append(self.payload[idx:itemIdx+itemLen])
idx = itemIdx + itemLen
except IndexError:
raise ValueError("Not a valid DER SEQUENCE.")
return tlvLength
class DerOctetString(DerObject):
def __init__(self, value = b('')):
DerObject.__init__(self, 'OCTET STRING')
self.payload = value
def decode(self, derEle, noLeftOvers=0):
p = DerObject.decode(derEle, noLeftOvers)
if not self.isType("OCTET STRING"):
raise ValueError("Not a valid OCTET STRING.")
return p
class DerNull(DerObject):
def __init__(self):
DerObject.__init__(self, 'NULL')
class DerObjectId(DerObject):
def __init__(self):
DerObject.__init__(self, 'OBJECT IDENTIFIER')
def decode(self, derEle, noLeftOvers=0):
p = DerObject.decode(derEle, noLeftOvers)
if not self.isType("OBJECT IDENTIFIER"):
raise ValueError("Not a valid OBJECT IDENTIFIER.")
return p
def isInt(x):
test = 0
try:
test += x
except TypeError:
return 0
return 1
| apache-2.0 |
xyx119/TeamTalk | win-client/3rdParty/src/json/scons-tools/globtool.py | 256 | 1667 | import fnmatch
import os
def generate( env ):
def Glob( env, includes = None, excludes = None, dir = '.' ):
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
helper function to environment.
Glob both the file-system files.
includes: list of file name pattern included in the return list when matched.
excludes: list of file name pattern exluced from the return list.
Example:
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
"""
def filterFilename(path):
abs_path = os.path.join( dir, path )
if not os.path.isfile(abs_path):
return 0
fn = os.path.basename(path)
match = 0
for include in includes:
if fnmatch.fnmatchcase( fn, include ):
match = 1
break
if match == 1 and not excludes is None:
for exclude in excludes:
if fnmatch.fnmatchcase( fn, exclude ):
match = 0
break
return match
if includes is None:
includes = ('*',)
elif type(includes) in ( type(''), type(u'') ):
includes = (includes,)
if type(excludes) in ( type(''), type(u'') ):
excludes = (excludes,)
dir = env.Dir(dir).abspath
paths = os.listdir( dir )
def makeAbsFileNode( path ):
return env.File( os.path.join( dir, path ) )
nodes = filter( filterFilename, paths )
return map( makeAbsFileNode, nodes )
from SCons.Script import Environment
Environment.Glob = Glob
def exists(env):
"""
Tool always exists.
"""
return True
| apache-2.0 |
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/urlparse.py | 60 | 15164 | """Parse (absolute and relative) URLs.
urlparse module is based upon the following RFC specifications.
RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
and L. Masinter, January 2005.
RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter
and L.Masinter, December 1999.
RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T.
Berners-Lee, R. Fielding, and L. Masinter, August 1998.
RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zwinski, July 1998.
RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
1995.
RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
McCahill, December 1994
RFC 3986 is considered the current standard and any future changes to
urlparse module should conform with it. The urlparse module is
currently not entirely compliant with this RFC due to defacto
scenarios for parsing, and for backward compatibility purposes, some
parsing quirks from older RFCs are retained. The testcases in
test_urlparse.py provides a good indicator of parsing behavior.
"""
import re
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
# A classification of schemes ('' means apply by default)
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
'wais', 'file', 'https', 'shttp', 'mms',
'prospero', 'rtsp', 'rtspu', '', 'sftp',
'svn', 'svn+ssh']
uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
'imap', 'wais', 'file', 'mms', 'https', 'shttp',
'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
'svn', 'svn+ssh', 'sftp','nfs','git', 'git+ssh']
uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
'mms', '', 'sftp', 'tel']
# These are not actually used anymore, but should stay for backwards
# compatibility. (They are undocumented, but have a public-looking name.)
non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
'nntp', 'wais', 'https', 'shttp', 'snews',
'file', 'prospero', '']
# Characters valid in scheme names
scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789'
'+-.')
MAX_CACHE_SIZE = 20
_parse_cache = {}
def clear_cache():
"""Clear the parse cache."""
_parse_cache.clear()
class ResultMixin(object):
"""Shared methods for the parsed result objects."""
@property
def username(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
userinfo = userinfo.split(":", 1)[0]
return userinfo
return None
@property
def password(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)[1]
return None
@property
def hostname(self):
netloc = self.netloc.split('@')[-1]
if '[' in netloc and ']' in netloc:
return netloc.split(']')[0][1:].lower()
elif ':' in netloc:
return netloc.split(':')[0].lower()
elif netloc == '':
return None
else:
return netloc.lower()
@property
def port(self):
netloc = self.netloc.split('@')[-1].split(']')[-1]
if ':' in netloc:
port = netloc.split(':')[1]
if port:
port = int(port, 10)
# verify legal port
if (0 <= port <= 65535):
return port
return None
from collections import namedtuple
class SplitResult(namedtuple('SplitResult', 'scheme netloc path query fragment'), ResultMixin):
__slots__ = ()
def geturl(self):
return urlunsplit(self)
class ParseResult(namedtuple('ParseResult', 'scheme netloc path params query fragment'), ResultMixin):
__slots__ = ()
def geturl(self):
return urlunparse(self)
def urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
tuple = urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = tuple
if scheme in uses_params and ';' in url:
url, params = _splitparams(url)
else:
params = ''
return ParseResult(scheme, netloc, url, params, query, fragment)
def _splitparams(url):
if '/' in url:
i = url.find(';', url.rfind('/'))
if i < 0:
return url, ''
else:
i = url.find(';')
return url[:i], url[i+1:]
def _splitnetloc(url, start=0):
delim = len(url) # position of end of domain part of url, default is end
for c in '/?#': # look for delimiters; the order is NOT important
wdelim = url.find(c, start) # find first of this delim
if wdelim >= 0: # if found
delim = min(delim, wdelim) # use earliest delim position
return url[start:delim], url[delim:] # return (domain, rest)
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return cached
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
for c in url[:i]:
if c not in scheme_chars:
break
else:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i+1:]
if not rest or any(c not in '0123456789' for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
def urlunparse(data):
"""Put a parsed URL back together again. This may result in a
slightly different, but equivalent URL, if the URL that was parsed
originally had redundant delimiters, e.g. a ? with an empty query
(the draft states that these are equivalent)."""
scheme, netloc, url, params, query, fragment = data
if params:
url = "%s;%s" % (url, params)
return urlunsplit((scheme, netloc, url, query, fragment))
def urlunsplit(data):
"""Combine the elements of a tuple as returned by urlsplit() into a
complete URL as a string. The data argument can be any five-item iterable.
This may result in a slightly different, but equivalent URL, if the URL that
was parsed originally had unnecessary delimiters (for example, a ? with an
empty query; the RFC states that these are equivalent)."""
scheme, netloc, url, query, fragment = data
if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
if url and url[:1] != '/': url = '/' + url
url = '//' + (netloc or '') + url
if scheme:
url = scheme + ':' + url
if query:
url = url + '?' + query
if fragment:
url = url + '#' + fragment
return url
def urljoin(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter."""
if not base:
return url
if not url:
return base
bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
urlparse(base, '', allow_fragments)
scheme, netloc, path, params, query, fragment = \
urlparse(url, bscheme, allow_fragments)
if scheme != bscheme or scheme not in uses_relative:
return url
if scheme in uses_netloc:
if netloc:
return urlunparse((scheme, netloc, path,
params, query, fragment))
netloc = bnetloc
if path[:1] == '/':
return urlunparse((scheme, netloc, path,
params, query, fragment))
if not path and not params:
path = bpath
params = bparams
if not query:
query = bquery
return urlunparse((scheme, netloc, path,
params, query, fragment))
segments = bpath.split('/')[:-1] + path.split('/')
# XXX The stuff below is bogus in various ways...
if segments[-1] == '.':
segments[-1] = ''
while '.' in segments:
segments.remove('.')
while 1:
i = 1
n = len(segments) - 1
while i < n:
if (segments[i] == '..'
and segments[i-1] not in ('', '..')):
del segments[i-1:i+1]
break
i = i+1
else:
break
if segments == ['', '..']:
segments[-1] = ''
elif len(segments) >= 2 and segments[-1] == '..':
segments[-2:] = ['']
return urlunparse((scheme, netloc, '/'.join(segments),
params, query, fragment))
def urldefrag(url):
"""Removes any existing fragment from URL.
Returns a tuple of the defragmented URL and the fragment. If
the URL contained no fragments, the second element is the
empty string.
"""
if '#' in url:
s, n, p, a, q, frag = urlparse(url)
defrag = urlunparse((s, n, p, a, q, ''))
return defrag, frag
else:
return url, ''
try:
unicode
except NameError:
def _is_unicode(x):
return 0
else:
def _is_unicode(x):
return isinstance(x, unicode)
# unquote method for parse_qs and parse_qsl
# Cannot use directly from urllib as it would create a circular reference
# because urllib uses urlparse methods (urljoin). If you update this function,
# update it also in urllib. This code duplication does not existin in Python3.
_hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a+b, chr(int(a+b,16)))
for a in _hexdig for b in _hexdig)
_asciire = re.compile('([\x00-\x7f]+)')
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
if _is_unicode(s):
if '%' not in s:
return s
bits = _asciire.split(s)
res = [bits[0]]
append = res.append
for i in range(1, len(bits), 2):
append(unquote(str(bits[i])).decode('latin1'))
append(bits[i + 1])
return ''.join(res)
bits = s.split('%')
# fastpath
if len(bits) == 1:
return s
res = [bits[0]]
append = res.append
for item in bits[1:]:
try:
append(_hextochr[item[:2]])
append(item[2:])
except KeyError:
append('%')
append(item)
return ''.join(res)
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
dict = {}
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
if name in dict:
dict[name].append(value)
else:
dict[name] = [value]
return dict
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
strict_parsing: flag indicating what to do with parsing errors. If
false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
Returns a list, as G-d intended.
"""
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError, "bad query field: %r" % (name_value,)
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = unquote(nv[0].replace('+', ' '))
value = unquote(nv[1].replace('+', ' '))
r.append((name, value))
return r
| bsd-3-clause |
mahendra-r/home-assistant | homeassistant/components/notify/nma.py | 9 | 2684 | """
homeassistant.components.notify.nma
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NMA (Notify My Android) notification service.
Configuration:
To use the NMA notifier you will need to add something like the following
to your configuration.yaml file.
notify:
platform: nma
api_key: YOUR_API_KEY
Variables:
api_key
*Required
Enter the API key for NMA. Go to https://www.notifymyandroid.com and create a
new API key to use with Home Assistant.
Details for the API : https://www.notifymyandroid.com/api.jsp
"""
import logging
import xml.etree.ElementTree as ET
from homeassistant.helpers import validate_config
from homeassistant.components.notify import (
DOMAIN, ATTR_TITLE, BaseNotificationService)
from homeassistant.const import CONF_API_KEY
_LOGGER = logging.getLogger(__name__)
_RESOURCE = 'https://www.notifymyandroid.com/publicapi/'
def get_service(hass, config):
""" Get the NMA notification service. """
if not validate_config(config,
{DOMAIN: [CONF_API_KEY]},
_LOGGER):
return None
try:
# pylint: disable=unused-variable
from requests import Session
except ImportError:
_LOGGER.exception(
"Unable to import requests. "
"Did you maybe not install the 'Requests' package?")
return None
nma = Session()
response = nma.get(_RESOURCE + 'verify',
params={"apikey": config[DOMAIN][CONF_API_KEY]})
tree = ET.fromstring(response.content)
if tree[0].tag == 'error':
_LOGGER.error("Wrong API key supplied. %s", tree[0].text)
else:
return NmaNotificationService(config[DOMAIN][CONF_API_KEY])
# pylint: disable=too-few-public-methods
class NmaNotificationService(BaseNotificationService):
""" Implements notification service for NMA. """
def __init__(self, api_key):
# pylint: disable=no-name-in-module, unused-variable
from requests import Session
self._api_key = api_key
self._data = {"apikey": self._api_key}
self.nma = Session()
def send_message(self, message="", **kwargs):
""" Send a message to a user. """
title = kwargs.get(ATTR_TITLE)
self._data['application'] = 'home-assistant'
self._data['event'] = title
self._data['description'] = message
self._data['priority'] = 0
response = self.nma.get(_RESOURCE + 'notify',
params=self._data)
tree = ET.fromstring(response.content)
if tree[0].tag == 'error':
_LOGGER.exception(
"Unable to perform request. Error: %s", tree[0].text)
| mit |
marcsans/cnn-physics-perception | phy/lib/python2.7/site-packages/theano/tensor/shared_randomstreams.py | 6 | 4882 | """
Define RandomStreams, providing random number variables for Theano
graphs.
"""
import copy
import numpy
from theano.compile.sharedvalue import (SharedVariable, shared_constructor,
shared)
from theano.tensor import raw_random
__docformat__ = "restructuredtext en"
class RandomStateSharedVariable(SharedVariable):
pass
@shared_constructor
def randomstate_constructor(value, name=None, strict=False,
allow_downcast=None, borrow=False):
"""
SharedVariable Constructor for RandomState.
"""
if not isinstance(value, numpy.random.RandomState):
raise TypeError
if not borrow:
value = copy.deepcopy(value)
return RandomStateSharedVariable(
type=raw_random.random_state_type,
value=value,
name=name,
strict=strict,
allow_downcast=allow_downcast)
class RandomStreams(raw_random.RandomStreamsBase):
"""
Module component with similar interface to numpy.random
(numpy.random.RandomState)
Parameters
----------
seed: None or int
A default seed to initialize the RandomState
instances after build. See `RandomStreamsInstance.__init__`
for more details.
"""
def updates(self):
return list(self.state_updates)
def __init__(self, seed=None):
super(RandomStreams, self).__init__()
# A list of pairs of the form (input_r, output_r). This will be
# over-ridden by the module instance to contain stream generators.
self.state_updates = []
# Instance variable should take None or integer value. Used to seed the
# random number generator that provides seeds for member streams.
self.default_instance_seed = seed
# numpy.RandomState instance that gen() uses to seed new streams.
self.gen_seedgen = numpy.random.RandomState(seed)
def seed(self, seed=None):
"""
Re-initialize each random stream.
Parameters
----------
seed : None or integer in range 0 to 2**30
Each random stream will be assigned a unique state that depends
deterministically on this value.
Returns
-------
None
"""
if seed is None:
seed = self.default_instance_seed
seedgen = numpy.random.RandomState(seed)
for old_r, new_r in self.state_updates:
old_r_seed = seedgen.randint(2 ** 30)
old_r.set_value(numpy.random.RandomState(int(old_r_seed)),
borrow=True)
def __getitem__(self, item):
"""
Retrieve the numpy RandomState instance associated with a particular
stream.
Parameters
----------
item
A variable of type RandomStateType, associated
with this RandomStream.
Returns
-------
numpy RandomState (or None, before initialize)
Notes
-----
This is kept for compatibility with `tensor.randomstreams.RandomStreams`.
The simpler syntax ``item.rng.get_value()`` is also valid.
"""
return item.get_value(borrow=True)
def __setitem__(self, item, val):
"""
Set the numpy RandomState instance associated with a particular stream.
Parameters
----------
item
A variable of type RandomStateType, associated with this
RandomStream.
val : numpy RandomState
The new value.
Returns
-------
None
Notes
-----
This is kept for compatibility with `tensor.randomstreams.RandomStreams`.
The simpler syntax ``item.rng.set_value(val)`` is also valid.
"""
item.set_value(val, borrow=True)
def gen(self, op, *args, **kwargs):
"""
Create a new random stream in this container.
Parameters
----------
op
A RandomFunction instance to
args
Interpreted by `op`.
kwargs
Interpreted by `op`.
Returns
-------
Tensor Variable
The symbolic random draw part of op()'s return value.
This function stores the updated RandomStateType Variable
for use at `build` time.
"""
seed = int(self.gen_seedgen.randint(2 ** 30))
random_state_variable = shared(numpy.random.RandomState(seed))
# Add a reference to distinguish from other shared variables
random_state_variable.tag.is_rng = True
new_r, out = op(random_state_variable, *args, **kwargs)
out.rng = random_state_variable
out.update = (random_state_variable, new_r)
self.state_updates.append(out.update)
random_state_variable.default_update = new_r
return out
| mit |
zouyapeng/horizon_change | openstack_dashboard/dashboards/project/data_processing/cluster_templates/tests.py | 7 | 3621 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.cluster_templates:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.cluster_templates:details', args=['id'])
class DataProcessingClusterTemplateTests(test.TestCase):
@test.create_stubs({api.sahara: ('cluster_template_list',)})
def test_index(self):
api.sahara.cluster_template_list(IsA(http.HttpRequest)) \
.AndReturn(self.cluster_templates.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res,
'project/data_processing.cluster_templates/'
'cluster_templates.html')
self.assertContains(res, 'Cluster Templates')
self.assertContains(res, 'Name')
@test.create_stubs({api.sahara: ('cluster_template_get',),
api.nova: ('flavor_get',)})
def test_details(self):
flavor = self.flavors.first()
ct = self.cluster_templates.first()
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id) \
.MultipleTimes().AndReturn(flavor)
api.sahara.cluster_template_get(IsA(http.HttpRequest),
IsA(unicode)) \
.MultipleTimes().AndReturn(ct)
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(res,
'project/data_processing.cluster_templates/'
'details.html')
@test.create_stubs({api.sahara: ('cluster_template_get',
'plugin_get_version_details',
'nodegroup_template_find')})
def test_copy(self):
ct = self.cluster_templates.first()
ngts = self.nodegroup_templates.list()
configs = self.plugins_configs.first()
api.sahara.cluster_template_get(IsA(http.HttpRequest),
ct.id) \
.AndReturn(ct)
api.sahara.plugin_get_version_details(IsA(http.HttpRequest),
ct.plugin_name,
ct.hadoop_version) \
.MultipleTimes().AndReturn(configs)
api.sahara.nodegroup_template_find(IsA(http.HttpRequest),
plugin_name=ct.plugin_name,
hadoop_version=ct.hadoop_version) \
.MultipleTimes().AndReturn(ngts)
self.mox.ReplayAll()
url = reverse('horizon:project:data_processing.cluster_templates:copy',
args=[ct.id])
res = self.client.get(url)
workflow = res.context['workflow']
step = workflow.get_step("generalconfigaction")
self.assertEqual(step.action['cluster_template_name'].field.initial,
ct.name + "-copy")
| apache-2.0 |
matthappens/taskqueue | taskqueue/venv_tq/lib/python2.7/site-packages/boto/ec2/volume.py | 13 | 10262 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Elastic Block Storage Volume
"""
from boto.resultset import ResultSet
from boto.ec2.tag import Tag
from boto.ec2.ec2object import TaggedEC2Object
class Volume(TaggedEC2Object):
"""
Represents an EBS volume.
:ivar id: The unique ID of the volume.
:ivar create_time: The timestamp of when the volume was created.
:ivar status: The status of the volume.
:ivar size: The size (in GB) of the volume.
:ivar snapshot_id: The ID of the snapshot this volume was created
from, if applicable.
:ivar attach_data: An AttachmentSet object.
:ivar zone: The availability zone this volume is in.
:ivar type: The type of volume (standard or consistent-iops)
:ivar iops: If this volume is of type consistent-iops, this is
the number of IOPS provisioned (10-300).
"""
def __init__(self, connection=None):
super(Volume, self).__init__(connection)
self.id = None
self.create_time = None
self.status = None
self.size = None
self.snapshot_id = None
self.attach_data = None
self.zone = None
self.type = None
self.iops = None
def __repr__(self):
return 'Volume:%s' % self.id
def startElement(self, name, attrs, connection):
retval = super(Volume, self).startElement(name, attrs, connection)
if retval is not None:
return retval
if name == 'attachmentSet':
self.attach_data = AttachmentSet()
return self.attach_data
elif name == 'tagSet':
self.tags = ResultSet([('item', Tag)])
return self.tags
else:
return None
def endElement(self, name, value, connection):
if name == 'volumeId':
self.id = value
elif name == 'createTime':
self.create_time = value
elif name == 'status':
if value != '':
self.status = value
elif name == 'size':
self.size = int(value)
elif name == 'snapshotId':
self.snapshot_id = value
elif name == 'availabilityZone':
self.zone = value
elif name == 'volumeType':
self.type = value
elif name == 'iops':
self.iops = int(value)
else:
setattr(self, name, value)
def _update(self, updated):
self.__dict__.update(updated.__dict__)
def update(self, validate=False, dry_run=False):
"""
Update the data associated with this volume by querying EC2.
:type validate: bool
:param validate: By default, if EC2 returns no data about the
volume the update method returns quietly. If
the validate param is True, however, it will
raise a ValueError exception if no data is
returned from EC2.
"""
# Check the resultset since Eucalyptus ignores the volumeId param
unfiltered_rs = self.connection.get_all_volumes(
[self.id],
dry_run=dry_run
)
rs = [x for x in unfiltered_rs if x.id == self.id]
if len(rs) > 0:
self._update(rs[0])
elif validate:
raise ValueError('%s is not a valid Volume ID' % self.id)
return self.status
def delete(self, dry_run=False):
"""
Delete this EBS volume.
:rtype: bool
:return: True if successful
"""
return self.connection.delete_volume(self.id, dry_run=dry_run)
def attach(self, instance_id, device, dry_run=False):
"""
Attach this EBS volume to an EC2 instance.
:type instance_id: str
:param instance_id: The ID of the EC2 instance to which it will
be attached.
:type device: str
:param device: The device on the instance through which the
volume will be exposed (e.g. /dev/sdh)
:rtype: bool
:return: True if successful
"""
return self.connection.attach_volume(
self.id,
instance_id,
device,
dry_run=dry_run
)
def detach(self, force=False, dry_run=False):
"""
Detach this EBS volume from an EC2 instance.
:type force: bool
:param force: Forces detachment if the previous detachment
attempt did not occur cleanly. This option can lead to
data loss or a corrupted file system. Use this option only
as a last resort to detach a volume from a failed
instance. The instance will not have an opportunity to
flush file system caches nor file system meta data. If you
use this option, you must perform file system check and
repair procedures.
:rtype: bool
:return: True if successful
"""
instance_id = None
if self.attach_data:
instance_id = self.attach_data.instance_id
device = None
if self.attach_data:
device = self.attach_data.device
return self.connection.detach_volume(
self.id,
instance_id,
device,
force,
dry_run=dry_run
)
def create_snapshot(self, description=None, dry_run=False):
"""
Create a snapshot of this EBS Volume.
:type description: str
:param description: A description of the snapshot.
Limited to 256 characters.
:rtype: :class:`boto.ec2.snapshot.Snapshot`
:return: The created Snapshot object
"""
return self.connection.create_snapshot(
self.id,
description,
dry_run=dry_run
)
def volume_state(self):
"""
Returns the state of the volume. Same value as the status attribute.
"""
return self.status
def attachment_state(self):
"""
Get the attachment state.
"""
state = None
if self.attach_data:
state = self.attach_data.status
return state
def snapshots(self, owner=None, restorable_by=None, dry_run=False):
"""
Get all snapshots related to this volume. Note that this requires
that all available snapshots for the account be retrieved from EC2
first and then the list is filtered client-side to contain only
those for this volume.
:type owner: str
:param owner: If present, only the snapshots owned by the
specified user will be returned. Valid values are:
* self
* amazon
* AWS Account ID
:type restorable_by: str
:param restorable_by: If present, only the snapshots that
are restorable by the specified account id will be returned.
:rtype: list of L{boto.ec2.snapshot.Snapshot}
:return: The requested Snapshot objects
"""
rs = self.connection.get_all_snapshots(
owner=owner,
restorable_by=restorable_by,
dry_run=dry_run
)
mine = []
for snap in rs:
if snap.volume_id == self.id:
mine.append(snap)
return mine
class AttachmentSet(object):
"""
Represents an EBS attachmentset.
:ivar id: The unique ID of the volume.
:ivar instance_id: The unique ID of the attached instance
:ivar status: The status of the attachment
:ivar attach_time: Attached since
:ivar device: The device the instance has mapped
"""
def __init__(self):
self.id = None
self.instance_id = None
self.status = None
self.attach_time = None
self.device = None
def __repr__(self):
return 'AttachmentSet:%s' % self.id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'volumeId':
self.id = value
elif name == 'instanceId':
self.instance_id = value
elif name == 'status':
self.status = value
elif name == 'attachTime':
self.attach_time = value
elif name == 'device':
self.device = value
else:
setattr(self, name, value)
class VolumeAttribute(object):
def __init__(self, parent=None):
self.id = None
self._key_name = None
self.attrs = {}
def startElement(self, name, attrs, connection):
if name == 'autoEnableIO':
self._key_name = name
return None
def endElement(self, name, value, connection):
if name == 'value':
if value.lower() == 'true':
self.attrs[self._key_name] = True
else:
self.attrs[self._key_name] = False
elif name == 'volumeId':
self.id = value
else:
setattr(self, name, value)
| mit |
ossdemura/django-miniblog | src/Lib/site-packages/django/contrib/gis/db/models/fields.py | 45 | 17598 | from collections import defaultdict
from django.contrib.gis import forms, gdal
from django.contrib.gis.db.models.lookups import (
RasterBandTransform, gis_lookups,
)
from django.contrib.gis.db.models.proxy import SpatialProxy
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.geometry.backend import Geometry, GeometryException
from django.core.exceptions import ImproperlyConfigured
from django.db.models.expressions import Expression
from django.db.models.fields import Field
from django.utils import six
from django.utils.translation import ugettext_lazy as _
# Local cache of the spatial_ref_sys table, which holds SRID data for each
# spatial database alias. This cache exists so that the database isn't queried
# for SRID info each time a distance query is constructed.
_srid_cache = defaultdict(dict)
def get_srid_info(srid, connection):
"""
Returns the units, unit name, and spheroid WKT associated with the
given SRID from the `spatial_ref_sys` (or equivalent) spatial database
table for the given database connection. These results are cached.
"""
from django.contrib.gis.gdal import SpatialReference
global _srid_cache
try:
# The SpatialRefSys model for the spatial backend.
SpatialRefSys = connection.ops.spatial_ref_sys()
except NotImplementedError:
SpatialRefSys = None
alias, get_srs = (
(connection.alias, lambda srid: SpatialRefSys.objects.using(connection.alias).get(srid=srid).srs)
if SpatialRefSys else
(None, SpatialReference)
)
if srid not in _srid_cache[alias]:
srs = get_srs(srid)
units, units_name = srs.units
sphere_name = srs['spheroid']
spheroid = 'SPHEROID["%s",%s,%s]' % (sphere_name, srs.semi_major, srs.inverse_flattening)
_srid_cache[alias][srid] = (units, units_name, spheroid)
return _srid_cache[alias][srid]
class GeoSelectFormatMixin(object):
def select_format(self, compiler, sql, params):
"""
Returns the selection format string, depending on the requirements
of the spatial backend. For example, Oracle and MySQL require custom
selection formats in order to retrieve geometries in OGC WKT. For all
other fields a simple '%s' format string is returned.
"""
connection = compiler.connection
srid = compiler.query.get_context('transformed_srid')
if srid:
sel_fmt = '%s(%%s, %s)' % (connection.ops.transform, srid)
else:
sel_fmt = '%s'
if connection.ops.select:
# This allows operations to be done on fields in the SELECT,
# overriding their values -- used by the Oracle and MySQL
# spatial backends to get database values as WKT, and by the
# `transform` method.
sel_fmt = connection.ops.select % sel_fmt
return sel_fmt % sql, params
class BaseSpatialField(Field):
"""
The Base GIS Field.
It's used as a base class for GeometryField and RasterField. Defines
properties that are common to all GIS fields such as the characteristics
of the spatial reference system of the field.
"""
description = _("The base GIS field.")
empty_strings_allowed = False
# Geodetic units.
geodetic_units = ('decimal degree', 'degree')
def __init__(self, verbose_name=None, srid=4326, spatial_index=True, **kwargs):
"""
The initialization function for base spatial fields. Takes the following
as keyword arguments:
srid:
The spatial reference system identifier, an OGC standard.
Defaults to 4326 (WGS84).
spatial_index:
Indicates whether to create a spatial index. Defaults to True.
Set this instead of 'db_index' for geographic fields since index
creation is different for geometry columns.
"""
# Setting the index flag with the value of the `spatial_index` keyword.
self.spatial_index = spatial_index
# Setting the SRID and getting the units. Unit information must be
# easily available in the field instance for distance queries.
self.srid = srid
# Setting the verbose_name keyword argument with the positional
# first parameter, so this works like normal fields.
kwargs['verbose_name'] = verbose_name
super(BaseSpatialField, self).__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super(BaseSpatialField, self).deconstruct()
# Always include SRID for less fragility; include spatial index if it's
# not the default value.
kwargs['srid'] = self.srid
if self.spatial_index is not True:
kwargs['spatial_index'] = self.spatial_index
return name, path, args, kwargs
def db_type(self, connection):
return connection.ops.geo_db_type(self)
# The following functions are used to get the units, their name, and
# the spheroid corresponding to the SRID of the BaseSpatialField.
def _get_srid_info(self, connection):
# Get attributes from `get_srid_info`.
self._units, self._units_name, self._spheroid = get_srid_info(self.srid, connection)
def spheroid(self, connection):
if not hasattr(self, '_spheroid'):
self._get_srid_info(connection)
return self._spheroid
def units(self, connection):
if not hasattr(self, '_units'):
self._get_srid_info(connection)
return self._units
def units_name(self, connection):
if not hasattr(self, '_units_name'):
self._get_srid_info(connection)
return self._units_name
def geodetic(self, connection):
"""
Returns true if this field's SRID corresponds with a coordinate
system that uses non-projected units (e.g., latitude/longitude).
"""
units_name = self.units_name(connection)
return units_name.lower() in self.geodetic_units if units_name else self.srid == 4326
def get_placeholder(self, value, compiler, connection):
"""
Returns the placeholder for the spatial column for the
given value.
"""
return connection.ops.get_geom_placeholder(self, value, compiler)
def get_srid(self, obj):
"""
Return the default SRID for the given geometry or raster, taking into
account the SRID set for the field. For example, if the input geometry
or raster doesn't have an SRID, then the SRID of the field will be
returned.
"""
srid = obj.srid # SRID of given geometry.
if srid is None or self.srid == -1 or (srid == -1 and self.srid != -1):
return self.srid
else:
return srid
def get_db_prep_save(self, value, connection):
"""
Prepare the value for saving in the database.
"""
if isinstance(value, Geometry) or value:
return connection.ops.Adapter(self.get_prep_value(value))
else:
return None
def get_raster_prep_value(self, value, is_candidate):
"""
Return a GDALRaster if conversion is successful, otherwise return None.
"""
if isinstance(value, gdal.GDALRaster):
return value
elif is_candidate:
try:
return gdal.GDALRaster(value)
except GDALException:
pass
elif isinstance(value, dict):
try:
return gdal.GDALRaster(value)
except GDALException:
raise ValueError("Couldn't create spatial object from lookup value '%s'." % value)
def get_prep_value(self, value):
"""
Spatial lookup values are either a parameter that is (or may be
converted to) a geometry or raster, or a sequence of lookup values
that begins with a geometry or raster. This routine sets up the
geometry or raster value properly and preserves any other lookup
parameters.
"""
value = super(BaseSpatialField, self).get_prep_value(value)
# For IsValid lookups, boolean values are allowed.
if isinstance(value, (Expression, bool)):
return value
elif isinstance(value, (tuple, list)):
obj = value[0]
seq_value = True
else:
obj = value
seq_value = False
# When the input is not a geometry or raster, attempt to construct one
# from the given string input.
if isinstance(obj, Geometry):
pass
else:
# Check if input is a candidate for conversion to raster or geometry.
is_candidate = isinstance(obj, (bytes, six.string_types)) or hasattr(obj, '__geo_interface__')
# Try to convert the input to raster.
raster = self.get_raster_prep_value(obj, is_candidate)
if raster:
obj = raster
elif is_candidate:
try:
obj = Geometry(obj)
except (GeometryException, GDALException):
raise ValueError("Couldn't create spatial object from lookup value '%s'." % obj)
else:
raise ValueError('Cannot use object with type %s for a spatial lookup parameter.' % type(obj).__name__)
# Assigning the SRID value.
obj.srid = self.get_srid(obj)
if seq_value:
lookup_val = [obj]
lookup_val.extend(value[1:])
return tuple(lookup_val)
else:
return obj
for klass in gis_lookups.values():
BaseSpatialField.register_lookup(klass)
class GeometryField(GeoSelectFormatMixin, BaseSpatialField):
"""
The base Geometry field -- maps to the OpenGIS Specification Geometry type.
"""
description = _("The base Geometry field -- maps to the OpenGIS Specification Geometry type.")
form_class = forms.GeometryField
# The OpenGIS Geometry name.
geom_type = 'GEOMETRY'
def __init__(self, verbose_name=None, dim=2, geography=False, **kwargs):
"""
The initialization function for geometry fields. In addition to the
parameters from BaseSpatialField, it takes the following as keyword
arguments:
dim:
The number of dimensions for this geometry. Defaults to 2.
extent:
Customize the extent, in a 4-tuple of WGS 84 coordinates, for the
geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults
to (-180.0, -90.0, 180.0, 90.0).
tolerance:
Define the tolerance, in meters, to use for the geometry field
entry in the `USER_SDO_GEOM_METADATA` table. Defaults to 0.05.
"""
# Setting the dimension of the geometry field.
self.dim = dim
# Is this a geography rather than a geometry column?
self.geography = geography
# Oracle-specific private attributes for creating the entry in
# `USER_SDO_GEOM_METADATA`
self._extent = kwargs.pop('extent', (-180.0, -90.0, 180.0, 90.0))
self._tolerance = kwargs.pop('tolerance', 0.05)
super(GeometryField, self).__init__(verbose_name=verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(GeometryField, self).deconstruct()
# Include kwargs if they're not the default values.
if self.dim != 2:
kwargs['dim'] = self.dim
if self.geography is not False:
kwargs['geography'] = self.geography
return name, path, args, kwargs
# ### Routines specific to GeometryField ###
def get_distance(self, value, lookup_type, connection):
"""
Returns a distance number in units of the field. For example, if
`D(km=1)` was passed in and the units of the field were in meters,
then 1000 would be returned.
"""
return connection.ops.get_distance(self, value, lookup_type)
def get_db_prep_value(self, value, connection, *args, **kwargs):
return connection.ops.Adapter(
super(GeometryField, self).get_db_prep_value(value, connection, *args, **kwargs),
**({'geography': True} if self.geography else {})
)
def from_db_value(self, value, expression, connection, context):
if value:
if not isinstance(value, Geometry):
value = Geometry(value)
srid = value.srid
if not srid and self.srid != -1:
value.srid = self.srid
return value
# ### Routines overloaded from Field ###
def contribute_to_class(self, cls, name, **kwargs):
super(GeometryField, self).contribute_to_class(cls, name, **kwargs)
# Setup for lazy-instantiated Geometry object.
setattr(cls, self.attname, SpatialProxy(Geometry, self))
def formfield(self, **kwargs):
defaults = {'form_class': self.form_class,
'geom_type': self.geom_type,
'srid': self.srid,
}
defaults.update(kwargs)
if (self.dim > 2 and 'widget' not in kwargs and
not getattr(defaults['form_class'].widget, 'supports_3d', False)):
defaults['widget'] = forms.Textarea
return super(GeometryField, self).formfield(**defaults)
def _get_db_prep_lookup(self, lookup_type, value, connection):
"""
Prepare for the database lookup, and return any spatial parameters
necessary for the query. This includes wrapping any geometry
parameters with a backend-specific adapter and formatting any distance
parameters into the correct units for the coordinate system of the
field.
Only used by the deprecated GeoQuerySet and to be
RemovedInDjango20Warning.
"""
# Populating the parameters list, and wrapping the Geometry
# with the Adapter of the spatial backend.
if isinstance(value, (tuple, list)):
params = [connection.ops.Adapter(value[0])]
# Getting the distance parameter in the units of the field.
params += self.get_distance(value[1:], lookup_type, connection)
else:
params = [connection.ops.Adapter(value)]
return params
# The OpenGIS Geometry Type Fields
class PointField(GeometryField):
geom_type = 'POINT'
form_class = forms.PointField
description = _("Point")
class LineStringField(GeometryField):
geom_type = 'LINESTRING'
form_class = forms.LineStringField
description = _("Line string")
class PolygonField(GeometryField):
geom_type = 'POLYGON'
form_class = forms.PolygonField
description = _("Polygon")
class MultiPointField(GeometryField):
geom_type = 'MULTIPOINT'
form_class = forms.MultiPointField
description = _("Multi-point")
class MultiLineStringField(GeometryField):
geom_type = 'MULTILINESTRING'
form_class = forms.MultiLineStringField
description = _("Multi-line string")
class MultiPolygonField(GeometryField):
geom_type = 'MULTIPOLYGON'
form_class = forms.MultiPolygonField
description = _("Multi polygon")
class GeometryCollectionField(GeometryField):
geom_type = 'GEOMETRYCOLLECTION'
form_class = forms.GeometryCollectionField
description = _("Geometry collection")
class ExtentField(GeoSelectFormatMixin, Field):
"Used as a return value from an extent aggregate"
description = _("Extent Aggregate Field")
def get_internal_type(self):
return "ExtentField"
class RasterField(BaseSpatialField):
"""
Raster field for GeoDjango -- evaluates into GDALRaster objects.
"""
description = _("Raster Field")
geom_type = 'RASTER'
geography = False
def _check_connection(self, connection):
# Make sure raster fields are used only on backends with raster support.
if not connection.features.gis_enabled or not connection.features.supports_raster:
raise ImproperlyConfigured('Raster fields require backends with raster support.')
def db_type(self, connection):
self._check_connection(connection)
return super(RasterField, self).db_type(connection)
def from_db_value(self, value, expression, connection, context):
return connection.ops.parse_raster(value)
def get_db_prep_value(self, value, connection, prepared=False):
self._check_connection(connection)
# Prepare raster for writing to database.
if not prepared:
value = connection.ops.deconstruct_raster(value)
return super(RasterField, self).get_db_prep_value(value, connection, prepared)
def contribute_to_class(self, cls, name, **kwargs):
super(RasterField, self).contribute_to_class(cls, name, **kwargs)
# Setup for lazy-instantiated Raster object. For large querysets, the
# instantiation of all GDALRasters can potentially be expensive. This
# delays the instantiation of the objects to the moment of evaluation
# of the raster attribute.
setattr(cls, self.attname, SpatialProxy(gdal.GDALRaster, self))
def get_transform(self, name):
try:
band_index = int(name)
return type(
'SpecificRasterBandTransform',
(RasterBandTransform, ),
{'band_index': band_index}
)
except ValueError:
pass
return super(RasterField, self).get_transform(name)
| mit |
bobcyw/django | tests/admin_docs/tests.py | 114 | 20476 | import datetime
import sys
import unittest
from django.conf import settings
from django.contrib.admindocs import utils
from django.contrib.admindocs.views import get_return_data_type
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.test import TestCase, modify_settings, override_settings
from django.test.utils import captured_stderr
from .models import Company, Person
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_docs.urls')
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.admindocs'})
class AdminDocsTestCase(TestCase):
pass
class MiscTests(AdminDocsTestCase):
def setUp(self):
User.objects.create_superuser('super', None, 'secret')
self.client.login(username='super', password='secret')
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
@override_settings(SITE_ID=None) # will restore SITE_ID after the test
def test_no_sites_framework(self):
"""
Without the sites framework, should not access SITE_ID or Site
objects. Deleting settings is fine here as UserSettingsHolder is used.
"""
Site.objects.all().delete()
del settings.SITE_ID
self.client.get('/admindocs/views/') # should not raise
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewTests(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.login(username='super', password='secret')
def test_index(self):
self.client.logout()
response = self.client.get(reverse('django-admindocs-docroot'), follow=True)
# Should display the login screen
self.assertContains(response,
'<input type="hidden" name="next" value="/admindocs/" />', html=True)
self.client.login(username='super', password='secret')
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(response, '<h1>Documentation</h1>', html=True)
self.assertContains(response,
'<h1 id="site-name"><a href="/admin/">Django '
'administration</a></h1>')
def test_bookmarklets(self):
response = self.client.get(reverse('django-admindocs-bookmarklets'))
self.assertContains(response, '/admindocs/views/')
def test_templatetag_index(self):
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<h3 id="built_in-extends">extends</h3>', html=True)
def test_templatefilter_index(self):
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True)
def test_view_index(self):
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(response,
'<h3><a href="/admindocs/views/django.contrib.admindocs.views.BaseAdminDocsView/">/admindocs/</a></h3>',
html=True)
self.assertContains(response, 'Views by namespace test')
self.assertContains(response, 'Name: <code>test:func</code>.')
def test_view_detail(self):
response = self.client.get(
reverse('django-admindocs-views-detail',
args=['django.contrib.admindocs.views.BaseAdminDocsView']))
# View docstring
self.assertContains(response, 'Base view for admindocs views.')
def test_view_detail_illegal_import(self):
"""
#23601 - Ensure the view exists in the URLconf.
"""
response = self.client.get(
reverse('django-admindocs-views-detail',
args=['urlpatterns_reverse.nonimported_module.view']))
self.assertEqual(response.status_code, 404)
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_model_index(self):
response = self.client.get(reverse('django-admindocs-models-index'))
self.assertContains(
response,
'<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)</h2>',
html=True
)
def test_template_detail(self):
response = self.client.get(reverse('django-admindocs-templates',
args=['admin_doc/template_detail.html']))
self.assertContains(response,
'<h1>Template: "admin_doc/template_detail.html"</h1>', html=True)
def test_missing_docutils(self):
utils.docutils_is_available = False
try:
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(response,
'<h3>The admin documentation system requires Python\'s '
'<a href="http://docutils.sf.net/">docutils</a> library.</h3>',
html=True)
self.assertContains(response,
'<h1 id="site-name"><a href="/admin/">Django '
'administration</a></h1>')
finally:
utils.docutils_is_available = True
@override_settings(TEMPLATES=[{
'NAME': 'ONE',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'NAME': 'TWO',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}])
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewWithMultipleEngines(AdminDocViewTests):
def test_templatefilter_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<title>Template filters</title>', html=True)
def test_templatetag_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<title>Template tags</title>', html=True)
class XViewMiddlewareTest(TestDataMixin, AdminDocsTestCase):
def test_xview_func(self):
user = User.objects.get(username='super')
response = self.client.head('/xview/func/')
self.assertNotIn('X-View', response)
self.client.login(username='super', password='secret')
response = self.client.head('/xview/func/')
self.assertIn('X-View', response)
self.assertEqual(response['X-View'], 'admin_docs.views.xview')
user.is_staff = False
user.save()
response = self.client.head('/xview/func/')
self.assertNotIn('X-View', response)
user.is_staff = True
user.is_active = False
user.save()
response = self.client.head('/xview/func/')
self.assertNotIn('X-View', response)
def test_xview_class(self):
user = User.objects.get(username='super')
response = self.client.head('/xview/class/')
self.assertNotIn('X-View', response)
self.client.login(username='super', password='secret')
response = self.client.head('/xview/class/')
self.assertIn('X-View', response)
self.assertEqual(response['X-View'], 'admin_docs.views.XViewClass')
user.is_staff = False
user.save()
response = self.client.head('/xview/class/')
self.assertNotIn('X-View', response)
user.is_staff = True
user.is_active = False
user.save()
response = self.client.head('/xview/class/')
self.assertNotIn('X-View', response)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class DefaultRoleTest(AdminDocsTestCase):
def test_parse_rst(self):
"""
``django.contrib.admindocs.utils.parse_rst`` should use
``cmsreference`` as the default role.
"""
markup = ('<p><a class="reference external" href="/admindocs/%s">'
'title</a></p>\n')
self.assertEqual(utils.parse_rst('`title`', 'model'),
markup % 'models/title/')
self.assertEqual(utils.parse_rst('`title`', 'view'),
markup % 'views/title/')
self.assertEqual(utils.parse_rst('`title`', 'template'),
markup % 'templates/title/')
self.assertEqual(utils.parse_rst('`title`', 'filter'),
markup % 'filters/#title')
self.assertEqual(utils.parse_rst('`title`', 'tag'),
markup % 'tags/#title')
def test_publish_parts(self):
"""
Django shouldn't break the default role for interpreted text
when ``publish_parts`` is used directly, by setting it to
``cmsreference``. See #6681.
"""
import docutils
self.assertNotEqual(docutils.parsers.rst.roles.DEFAULT_INTERPRETED_ROLE,
'cmsreference')
source = 'reST, `interpreted text`, default role.'
markup = '<p>reST, <cite>interpreted text</cite>, default role.</p>\n'
parts = docutils.core.publish_parts(source=source, writer_name="html4css1")
self.assertEqual(parts['fragment'], markup)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestModelDetailView(TestDataMixin, AdminDocsTestCase):
"""
Tests that various details render correctly
"""
def setUp(self):
self.client.login(username='super', password='secret')
with captured_stderr() as self.docutils_stderr:
self.response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'Person']))
def test_method_excludes(self):
"""
Methods that begin with strings defined in
``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE``
should not get displayed in the admin docs.
"""
self.assertContains(self.response, "<td>get_full_name</td>")
self.assertNotContains(self.response, "<td>_get_full_name</td>")
self.assertNotContains(self.response, "<td>add_image</td>")
self.assertNotContains(self.response, "<td>delete_image</td>")
self.assertNotContains(self.response, "<td>set_status</td>")
self.assertNotContains(self.response, "<td>save_changes</td>")
def test_methods_with_arguments(self):
"""
Methods that take arguments should also displayed.
"""
self.assertContains(self.response, "<h3>Methods with arguments</h3>")
self.assertContains(self.response, "<td>rename_company</td>")
self.assertContains(self.response, "<td>dummy_function</td>")
self.assertContains(self.response, "<td>suffix_company_name</td>")
def test_methods_with_arguments_display_arguments(self):
"""
Methods with arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>new_name</td>")
def test_methods_with_arguments_display_arguments_default_value(self):
"""
Methods with keyword arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>suffix='ltd'</td>")
def test_methods_with_multiple_arguments_display_arguments(self):
"""
Methods with multiple arguments should have all their arguments
displayed, but omitting 'self'.
"""
self.assertContains(self.response, "<td>baz, rox, *some_args, **some_kwargs</td>")
def test_method_data_types(self):
"""
We should be able to get a basic idea of the type returned
by a method
"""
company = Company.objects.create(name="Django")
person = Person.objects.create(
first_name="Human",
last_name="User",
company=company
)
self.assertEqual(
get_return_data_type(person.get_status_count.__name__),
'Integer'
)
self.assertEqual(
get_return_data_type(person.get_groups_list.__name__),
'List'
)
def test_descriptions_render_correctly(self):
"""
The ``description`` field should render correctly for each type of field
"""
# help text in fields
self.assertContains(self.response, "<td>first name - The person's first name</td>")
self.assertContains(self.response, "<td>last name - The person's last name</td>")
# method docstrings
self.assertContains(self.response, "<p>Get the full name of the person</p>")
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
markup = '<p>the related %s object</p>'
company_markup = markup % (link % ("admin_docs.company", "admin_docs.Company"))
# foreign keys
self.assertContains(self.response, company_markup)
# foreign keys with help text
self.assertContains(self.response, "%s\n - place of work" % company_markup)
# many to many fields
self.assertContains(
self.response,
"number of related %s objects" % (link % ("admin_docs.group", "admin_docs.Group"))
)
self.assertContains(
self.response,
"all related %s objects" % (link % ("admin_docs.group", "admin_docs.Group"))
)
# "raw" and "include" directives are disabled
self.assertContains(self.response, '<p>"raw" directive disabled.</p>',)
self.assertContains(self.response, '.. raw:: html\n :file: admin_docs/evilfile.txt')
self.assertContains(self.response, '<p>"include" directive disabled.</p>',)
self.assertContains(self.response, '.. include:: admin_docs/evilfile.txt')
out = self.docutils_stderr.getvalue()
self.assertIn('"raw" directive disabled', out)
self.assertIn('"include" directive disabled', out)
def test_model_with_many_to_one(self):
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
response = self.client.get(
reverse('django-admindocs-models-detail', args=['admin_docs', 'company'])
)
self.assertContains(
response,
"number of related %s objects" % (link % ("admin_docs.person", "admin_docs.Person"))
)
self.assertContains(
response,
"all related %s objects" % (link % ("admin_docs.person", "admin_docs.Person"))
)
def test_model_with_no_backward_relations_render_only_relevant_fields(self):
"""
A model with ``related_name`` of `+` should not show backward relationship
links in admin docs
"""
response = self.client.get(
reverse('django-admindocs-models-detail',
args=['admin_docs', 'family']))
fields = response.context_data.get('fields')
self.assertEqual(len(fields), 2)
def test_model_docstring_renders_correctly(self):
summary = (
'<h2 class="subhead"><p>Stores information about a person, related to <a class="reference external" '
'href="/admindocs/models/myapp.company/">myapp.Company</a>.</p></h2>'
)
subheading = '<p><strong>Notes</strong></p>'
body = '<p>Use <tt class="docutils literal">save_changes()</tt> when saving this object.</p>'
model_body = (
'<dl class="docutils"><dt><tt class="'
'docutils literal">company</tt></dt><dd>Field storing <a class="'
'reference external" href="/admindocs/models/myapp.company/">'
'myapp.Company</a> where the person works.</dd></dl>'
)
self.assertContains(self.response, 'DESCRIPTION')
self.assertContains(self.response, summary, html=True)
self.assertContains(self.response, subheading, html=True)
self.assertContains(self.response, body, html=True)
self.assertContains(self.response, model_body, html=True)
def test_model_detail_title(self):
self.assertContains(self.response, '<h1>admin_docs.Person</h1>', html=True)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestUtils(AdminDocsTestCase):
"""
This __doc__ output is required for testing. I copied this example from
`admindocs` documentation. (TITLE)
Display an individual :model:`myapp.MyModel`.
**Context**
``RequestContext``
``mymodel``
An instance of :model:`myapp.MyModel`.
**Template:**
:template:`myapp/my_template.html` (DESCRIPTION)
some_metadata: some data
"""
def setUp(self):
self.docstring = self.__doc__
def test_trim_docstring(self):
trim_docstring_output = utils.trim_docstring(self.docstring)
trimmed_docstring = (
'This __doc__ output is required for testing. I copied this '
'example from\n`admindocs` documentation. (TITLE)\n\n'
'Display an individual :model:`myapp.MyModel`.\n\n'
'**Context**\n\n``RequestContext``\n\n``mymodel``\n'
' An instance of :model:`myapp.MyModel`.\n\n'
'**Template:**\n\n:template:`myapp/my_template.html` '
'(DESCRIPTION)\n\nsome_metadata: some data'
)
self.assertEqual(trim_docstring_output, trimmed_docstring)
def test_parse_docstring(self):
title, description, metadata = utils.parse_docstring(self.docstring)
docstring_title = (
'This __doc__ output is required for testing. I copied this example from\n'
'`admindocs` documentation. (TITLE)'
)
docstring_description = (
'Display an individual :model:`myapp.MyModel`.\n\n'
'**Context**\n\n``RequestContext``\n\n``mymodel``\n'
' An instance of :model:`myapp.MyModel`.\n\n'
'**Template:**\n\n:template:`myapp/my_template.html` '
'(DESCRIPTION)'
)
self.assertEqual(title, docstring_title)
self.assertEqual(description, docstring_description)
self.assertEqual(metadata, {'some_metadata': 'some data'})
def test_title_output(self):
title, description, metadata = utils.parse_docstring(self.docstring)
title_output = utils.parse_rst(title, 'model', 'model:admindocs')
self.assertIn('TITLE', title_output)
title_rendered = (
'<p>This __doc__ output is required for testing. I copied this '
'example from\n<a class="reference external" '
'href="/admindocs/models/admindocs/">admindocs</a> documentation. '
'(TITLE)</p>\n'
)
self.assertHTMLEqual(title_output, title_rendered)
def test_description_output(self):
title, description, metadata = utils.parse_docstring(self.docstring)
description_output = utils.parse_rst(description, 'model', 'model:admindocs')
description_rendered = (
'<p>Display an individual <a class="reference external" '
'href="/admindocs/models/myapp.mymodel/">myapp.MyModel</a>.</p>\n'
'<p><strong>Context</strong></p>\n<p><tt class="docutils literal">'
'RequestContext</tt></p>\n<dl class="docutils">\n<dt><tt class="'
'docutils literal">mymodel</tt></dt>\n<dd>An instance of <a class="'
'reference external" href="/admindocs/models/myapp.mymodel/">'
'myapp.MyModel</a>.</dd>\n</dl>\n<p><strong>Template:</strong></p>'
'\n<p><a class="reference external" href="/admindocs/templates/'
'myapp/my_template.html/">myapp/my_template.html</a> (DESCRIPTION)'
'</p>\n'
)
self.assertHTMLEqual(description_output, description_rendered)
| bsd-3-clause |
axilleas/ansible | v1/ansible/runner/connection_plugins/accelerate.py | 109 | 15527 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import json
import os
import base64
import socket
import struct
import time
from ansible.callbacks import vvv, vvvv
from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.runner.connection_plugins.ssh import Connection as SSHConnection
from ansible.runner.connection_plugins.paramiko_ssh import Connection as ParamikoConnection
from ansible import utils
from ansible import constants
# the chunk size to read and send, assuming mtu 1500 and
# leaving room for base64 (+33%) encoding and header (8 bytes)
# ((1400-8)/4)*3) = 1044
# which leaves room for the TCP/IP header. We set this to a
# multiple of the value to speed up file reads.
CHUNK_SIZE=1044*20
class Connection(object):
''' raw socket accelerated connection '''
def __init__(self, runner, host, port, user, password, private_key_file, *args, **kwargs):
self.runner = runner
self.host = host
self.context = None
self.conn = None
self.user = user
self.key = utils.key_for_hostname(host)
self.port = port[0]
self.accport = port[1]
self.is_connected = False
self.has_pipelining = False
self.become_methods_supported=['sudo']
if not self.port:
self.port = constants.DEFAULT_REMOTE_PORT
elif not isinstance(self.port, int):
self.port = int(self.port)
if not self.accport:
self.accport = constants.ACCELERATE_PORT
elif not isinstance(self.accport, int):
self.accport = int(self.accport)
if self.runner.original_transport == "paramiko":
self.ssh = ParamikoConnection(
runner=self.runner,
host=self.host,
port=self.port,
user=self.user,
password=password,
private_key_file=private_key_file
)
else:
self.ssh = SSHConnection(
runner=self.runner,
host=self.host,
port=self.port,
user=self.user,
password=password,
private_key_file=private_key_file
)
if not getattr(self.ssh, 'shell', None):
self.ssh.shell = utils.plugins.shell_loader.get('sh')
# attempt to work around shared-memory funness
if getattr(self.runner, 'aes_keys', None):
utils.AES_KEYS = self.runner.aes_keys
def _execute_accelerate_module(self):
args = "password=%s port=%s minutes=%d debug=%d ipv6=%s" % (
base64.b64encode(self.key.__str__()),
str(self.accport),
constants.ACCELERATE_DAEMON_TIMEOUT,
int(utils.VERBOSITY),
self.runner.accelerate_ipv6,
)
if constants.ACCELERATE_MULTI_KEY:
args += " multi_key=yes"
inject = dict(password=self.key)
if getattr(self.runner, 'accelerate_inventory_host', False):
inject = utils.combine_vars(inject, self.runner.inventory.get_variables(self.runner.accelerate_inventory_host))
else:
inject = utils.combine_vars(inject, self.runner.inventory.get_variables(self.host))
vvvv("attempting to start up the accelerate daemon...")
self.ssh.connect()
tmp_path = self.runner._make_tmp_path(self.ssh)
return self.runner._execute_module(self.ssh, tmp_path, 'accelerate', args, inject=inject)
def connect(self, allow_ssh=True):
''' activates the connection object '''
try:
if not self.is_connected:
wrong_user = False
tries = 3
self.conn = socket.socket()
self.conn.settimeout(constants.ACCELERATE_CONNECT_TIMEOUT)
vvvv("attempting connection to %s via the accelerated port %d" % (self.host,self.accport))
while tries > 0:
try:
self.conn.connect((self.host,self.accport))
break
except socket.error:
vvvv("connection to %s failed, retrying..." % self.host)
time.sleep(0.1)
tries -= 1
if tries == 0:
vvv("Could not connect via the accelerated connection, exceeded # of tries")
raise AnsibleError("FAILED")
elif wrong_user:
vvv("Restarting daemon with a different remote_user")
raise AnsibleError("WRONG_USER")
self.conn.settimeout(constants.ACCELERATE_TIMEOUT)
if not self.validate_user():
# the accelerated daemon was started with a
# different remote_user. The above command
# should have caused the accelerate daemon to
# shutdown, so we'll reconnect.
wrong_user = True
except AnsibleError, e:
if allow_ssh:
if "WRONG_USER" in e:
vvv("Switching users, waiting for the daemon on %s to shutdown completely..." % self.host)
time.sleep(5)
vvv("Falling back to ssh to startup accelerated mode")
res = self._execute_accelerate_module()
if not res.is_successful():
raise AnsibleError("Failed to launch the accelerated daemon on %s (reason: %s)" % (self.host,res.result.get('msg')))
return self.connect(allow_ssh=False)
else:
raise AnsibleError("Failed to connect to %s:%s" % (self.host,self.accport))
self.is_connected = True
return self
def send_data(self, data):
packed_len = struct.pack('!Q',len(data))
return self.conn.sendall(packed_len + data)
def recv_data(self):
header_len = 8 # size of a packed unsigned long long
data = b""
try:
vvvv("%s: in recv_data(), waiting for the header" % self.host)
while len(data) < header_len:
d = self.conn.recv(header_len - len(data))
if not d:
vvvv("%s: received nothing, bailing out" % self.host)
return None
data += d
vvvv("%s: got the header, unpacking" % self.host)
data_len = struct.unpack('!Q',data[:header_len])[0]
data = data[header_len:]
vvvv("%s: data received so far (expecting %d): %d" % (self.host,data_len,len(data)))
while len(data) < data_len:
d = self.conn.recv(data_len - len(data))
if not d:
vvvv("%s: received nothing, bailing out" % self.host)
return None
vvvv("%s: received %d bytes" % (self.host, len(d)))
data += d
vvvv("%s: received all of the data, returning" % self.host)
return data
except socket.timeout:
raise AnsibleError("timed out while waiting to receive data")
def validate_user(self):
'''
Checks the remote uid of the accelerated daemon vs. the
one specified for this play and will cause the accel
daemon to exit if they don't match
'''
vvvv("%s: sending request for validate_user" % self.host)
data = dict(
mode='validate_user',
username=self.user,
)
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("Failed to send command to %s" % self.host)
vvvv("%s: waiting for validate_user response" % self.host)
while True:
# we loop here while waiting for the response, because a
# long running command may cause us to receive keepalive packets
# ({"pong":"true"}) rather than the response we want.
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if "pong" in response:
# it's a keepalive, go back to waiting
vvvv("%s: received a keepalive packet" % self.host)
continue
else:
vvvv("%s: received the validate_user response: %s" % (self.host, response))
break
if response.get('failed'):
return False
else:
return response.get('rc') == 0
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
if executable == "":
executable = constants.DEFAULT_EXECUTABLE
if self.runner.become and sudoable:
cmd, prompt, success_key = utils.make_become_cmd(cmd, become_user, executable, self.runner.become_method, '', self.runner.become_exe)
vvv("EXEC COMMAND %s" % cmd)
data = dict(
mode='command',
cmd=cmd,
tmp_path=tmp_path,
executable=executable,
)
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("Failed to send command to %s" % self.host)
while True:
# we loop here while waiting for the response, because a
# long running command may cause us to receive keepalive packets
# ({"pong":"true"}) rather than the response we want.
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if "pong" in response:
# it's a keepalive, go back to waiting
vvvv("%s: received a keepalive packet" % self.host)
continue
else:
vvvv("%s: received the response" % self.host)
break
return (response.get('rc',None), '', response.get('stdout',''), response.get('stderr',''))
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: %s" % in_path)
fd = file(in_path, 'rb')
fstat = os.stat(in_path)
try:
vvv("PUT file is %d bytes" % fstat.st_size)
last = False
while fd.tell() <= fstat.st_size and not last:
vvvv("file position currently %ld, file size is %ld" % (fd.tell(), fstat.st_size))
data = fd.read(CHUNK_SIZE)
if fd.tell() >= fstat.st_size:
last = True
data = dict(mode='put', data=base64.b64encode(data), out_path=out_path, last=last)
if self.runner.become:
data['user'] = self.runner.become_user
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("failed to send the file to %s" % self.host)
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if response.get('failed',False):
raise AnsibleError("failed to put the file in the requested location")
finally:
fd.close()
vvvv("waiting for final response after PUT")
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if response.get('failed',False):
raise AnsibleError("failed to put the file in the requested location")
def fetch_file(self, in_path, out_path):
''' save a remote file to the specified path '''
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
data = dict(mode='fetch', in_path=in_path)
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("failed to initiate the file fetch with %s" % self.host)
fh = open(out_path, "w")
try:
bytes = 0
while True:
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if response.get('failed', False):
raise AnsibleError("Error during file fetch, aborting")
out = base64.b64decode(response['data'])
fh.write(out)
bytes += len(out)
# send an empty response back to signify we
# received the last chunk without errors
data = utils.jsonify(dict())
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("failed to send ack during file fetch")
if response.get('last', False):
break
finally:
# we don't currently care about this final response,
# we just receive it and drop it. It may be used at some
# point in the future or we may just have the put/fetch
# operations not send back a final response at all
response = self.recv_data()
vvv("FETCH wrote %d bytes to %s" % (bytes, out_path))
fh.close()
def close(self):
''' terminate the connection '''
# Be a good citizen
try:
self.conn.close()
except:
pass
| gpl-3.0 |
inspyration/odoo | addons/website_blog/__openerp__.py | 10 | 1768 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Blogs',
'category': 'Website',
'website': 'https://www.odoo.com/page/blog-engine',
'summary': 'News, Blogs, Announces, Discussions',
'version': '1.0',
'description': """
OpenERP Blog
============
""",
'author': 'OpenERP SA',
'depends': ['knowledge', 'website_mail', 'website_partner'],
'data': [
'data/website_blog_data.xml',
'views/website_blog_views.xml',
'views/website_blog_templates.xml',
'security/ir.model.access.csv',
'security/website_blog.xml',
],
'demo': [
'data/website_blog_demo.xml'
],
'test': [
'tests/test_website_blog.yml'
],
'qweb': [
'static/src/xml/*.xml'
],
'installable': True,
'application': True,
}
| agpl-3.0 |
apache/beam | sdks/python/apache_beam/io/gcp/datastore/v1new/datastore_write_it_test.py | 5 | 2578 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An integration test for datastore_write_it_pipeline
This test creates entities and writes them to Cloud Datastore. Subsequently,
these entities are read from Cloud Datastore, compared to the expected value
for the entity, and deleted.
There is no output; instead, we use `assert_that` transform to verify the
results in the pipeline.
"""
# pytype: skip-file
import logging
import random
import unittest
from datetime import datetime
from hamcrest.core.core.allof import all_of
from nose.plugins.attrib import attr
from apache_beam.testing.pipeline_verifiers import PipelineStateMatcher
from apache_beam.testing.test_pipeline import TestPipeline
try:
from apache_beam.io.gcp.datastore.v1new import datastore_write_it_pipeline
except ImportError:
datastore_write_it_pipeline = None # type: ignore
class DatastoreWriteIT(unittest.TestCase):
NUM_ENTITIES = 1001
LIMIT = 500
def run_datastore_write(self, limit=None):
test_pipeline = TestPipeline(is_integration_test=True)
current_time = datetime.now().strftime("%m%d%H%M%S")
seed = random.randint(0, 100000)
kind = 'testkind%s%d' % (current_time, seed)
pipeline_verifiers = [PipelineStateMatcher()]
extra_opts = {
'kind': kind,
'num_entities': self.NUM_ENTITIES,
'on_success_matcher': all_of(*pipeline_verifiers)
}
if limit is not None:
extra_opts['limit'] = limit
datastore_write_it_pipeline.run(
test_pipeline.get_full_options_as_args(**extra_opts))
@attr('IT')
@unittest.skipIf(
datastore_write_it_pipeline is None, 'GCP dependencies are not installed')
def test_datastore_write_limit(self):
self.run_datastore_write(limit=self.LIMIT)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| apache-2.0 |
ict-felix/stack | vt_manager_kvm/src/python/vt_manager_kvm/controller/dispatchers/ui/GUIdispatcher.py | 1 | 17272 | from django.core.urlresolvers import reverse
from django.forms.models import modelformset_factory
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.views.generic import simple
from django.views.generic import list_detail, simple
from django.views.generic.create_update import apply_extra_context
from vt_manager_kvm.models import *
from vt_manager_kvm.communication.utils.XmlHelper import XmlHelper
import uuid, time, logging
from django.template import loader, RequestContext
from django.core.xheaders import populate_xheaders
from django.contrib import messages
#News
from vt_manager_kvm.controller.drivers.VTDriver import VTDriver
from vt_manager_kvm.utils.HttpUtils import HttpUtils
from vt_manager_kvm.models.NetworkInterface import NetworkInterface
from vt_manager_kvm.models.MacRange import MacRange
from vt_manager_kvm.controller.dispatchers.xmlrpc.InformationDispatcher import InformationDispatcher
from vt_manager_kvm.controller.dispatchers.forms.NetworkInterfaceForm import MgmtBridgeForm
from vt_manager_kvm.controller.dispatchers.forms.ServerForm import ServerForm
from django.db import transaction
def userIsIslandManager(request):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
@transaction.commit_on_success
def servers_crud(request, server_id=None):
"""Show a page for the user to add/edit an VTServer """
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
vmProjects = {}
vmSlices = {}
try:
for vm in VTDriver.getVMsInServer(VTDriver.getServerById(server_id)):
if vm.projectName not in vmProjects:
vmProjects[vm.projectName] = vm.projectId
if vm.sliceName not in vmSlices:
vmSlices[vm.sliceName] = vm.sliceId
except Exception as e:
print e
pass
serverFormClass = HttpUtils.getFormFromModel(VTServer)
ifaceFormClass = HttpUtils.getFormFromModel(NetworkInterface)
IfaceFormSetClass = modelformset_factory(NetworkInterface)
if server_id != None:
server = get_object_or_404(VTServer, pk=server_id)
else:
server = None
if request.method == "GET":
#serverForm = serverFormClass(instance=server)
serverForm = ServerForm(instance=server, prefix ="server")
if server != None:
mgmt = server.getNetworkInterfaces().filter(isMgmt = True)
if mgmt:
mgmt = mgmt.get()
mgmtIfaceForm = MgmtBridgeForm({'mgmtBridge-name':mgmt.getName(), 'mgmtBridge-mac':mgmt.getMacStr()}, prefix ="mgmtBridge")
else:
mgmtIfaceForm = MgmtBridgeForm(prefix ="mgmtBridge")
data = server.getNetworkInterfaces().filter(isMgmt = False)
if data:
IfaceFormSetClass = modelformset_factory(NetworkInterface,extra = 0)
ifaceformset = IfaceFormSetClass(queryset= data)
else:
mgmtIfaceForm = MgmtBridgeForm(prefix ="mgmtBridge")
ifaceformset = IfaceFormSetClass(queryset= NetworkInterface.objects.none())
elif request.method == "POST":
#serverForm = serverFormClass(request.POST, instance=server)
serverForm = ServerForm(request.POST, instance=server, prefix ="server")
ifaceformset = IfaceFormSetClass(request.POST)
mgmtIfaceForm = MgmtBridgeForm(request.POST, prefix ="mgmtBridge")
if serverForm.is_valid() and ifaceformset.is_valid() and mgmtIfaceForm.is_valid():
ifaces = ifaceformset.save(commit = False)
if server == None:
server = serverForm.save(commit = False)
try:
server = VTDriver.crudServerFromInstance(server)
VTDriver.setMgmtBridge(request, server)
VTDriver.crudDataBridgeFromInstance(server, ifaces,request.POST.getlist("DELETE"))
except Exception as e:
print e
e = HttpUtils.processException(e)
context = {"exception":e, "serverForm": serverForm, 'vmProjects': vmProjects, 'vmSlices': vmSlices,'ifaceformset' : ifaceformset, 'mgmtIfaceForm' : mgmtIfaceForm}
if server_id != None: context["server"] = server
return simple.direct_to_template(
request,
template="servers/servers_crud.html",
extra_context=context,
)
# Returns to server's admin page and rollback transactions
return HttpResponseRedirect('/servers/admin/')
else:
return HttpResponseNotAllowed("GET", "POST")
context = {"serverForm": serverForm, 'vmProjects': vmProjects, 'vmSlices': vmSlices,'ifaceformset' : ifaceformset, 'mgmtIfaceForm' : mgmtIfaceForm}
if server_id != None: context["server"] = server
return simple.direct_to_template(
request,
template="servers/servers_crud.html",
extra_context=context,
)
def admin_servers(request):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
servers = VTDriver.getAllServers()
return simple.direct_to_template(
request, template="servers/admin_servers.html",
extra_context={"servers_ids": servers})
def delete_server(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
if request.method == 'POST':
try:
VTDriver.deleteServer(VTDriver.getServerById(server_id))
return HttpResponseRedirect(reverse('dashboard'))
except Exception as e:
logging.error(e)
e = HttpUtils.processException(e)
return simple.direct_to_template(request,
template = 'servers/delete_server.html',
extra_context = {'user':request.user, 'exception':e, 'next':reverse("admin_servers")},
)
elif request.method == 'GET':
return simple.direct_to_template(request,
template = 'servers/delete_server.html',
extra_context = {'user':request.user, 'next':reverse("admin_servers"),'object':VTDriver.getServerById(server_id)},
)
def action_vm(request, server_id, vm_id, action):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
if(action == 'list'):
return simple.direct_to_template(
request, template="servers/server_vm_details.html",
extra_context={"vm": VTDriver.getVMbyId(vm_id), "server_id":server_id}
)
elif(action == 'check_status'):
#XXX: Do this function if needed
return simple.direct_to_template(
request, template="servers/list_vm.html",
extra_context={"vm": VM.objects.get(id = vm_id)}
)
elif(action == 'force_update_server'):
InformationDispatcher.forceListActiveVMs(serverID=server_id)
elif(action == 'force_update_vm'):
InformationDispatcher.forceListActiveVMs(vmID=vm_id)
else:
#XXX: serverUUID should be passed in a different way
VTDriver.PropagateActionToProvisioningDispatcher(vm_id, VTServer.objects.get(id=server_id).uuid, action)
#return HttpResponseRedirect(reverse('edit_server', args = [server_id]))
return HttpResponse("")
def subscribeEthernetRanges(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
macRanges = MacRange.objects.all()
if server_id != None:
server = get_object_or_404(VTServer, pk=server_id)
else:
raise Exception ("NO SERVER")
if request.method == "GET":
return simple.direct_to_template(request,
template = 'servers/servers_subscribeEthernetRanges.html',
extra_context = {'server': server, 'macRanges':macRanges},
)
elif request.method=='POST':
VTDriver.manageEthernetRanges(request,server,macRanges)
return HttpResponseRedirect(reverse('edit_server', args = [server_id]))
else:
return HttpResponseNotAllowed("GET", "POST")
def subscribeIp4Ranges(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
ipRanges = Ip4Range.objects.all()
if server_id != None:
server = get_object_or_404(VTServer, pk=server_id)
else:
raise Exception ("NO SERVER")
if request.method == "GET":
return simple.direct_to_template(request,
template = 'servers/servers_subscribeIp4Ranges.html',
extra_context = {'server': server, 'ipRanges':ipRanges},
)
elif request.method=='POST':
VTDriver.manageIp4Ranges(request,server,ipRanges)
return HttpResponseRedirect(reverse('edit_server', args = [server_id]))
else:
return HttpResponseNotAllowed("GET", "POST")
def list_vms(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
vmProjects = {}
vmSlices = {}
try:
for vm in VTDriver.getVMsInServer(VTDriver.getServerById(server_id)):
if vm.projectName not in vmProjects:
vmProjects[vm.projectName] = vm.projectId
if vm.sliceName not in vmSlices:
vmSlices[vm.sliceName] = vm.sliceId
except Exception as e:
print e
pass
server = get_object_or_404(VTServer, pk=server_id)
context = { 'vmProjects': vmProjects, 'vmSlices': vmSlices,'server':server}
return simple.direct_to_template(
request,
template="servers/servers_list_vms.html",
extra_context=context,
)
'''
Networking point of entry
'''
from vt_manager_kvm.controller.networking.EthernetController import EthernetController
from vt_manager_kvm.controller.networking.Ip4Controller import Ip4Controller
from vt_manager_kvm.models.MacRange import MacRange
NETWORKING_ACTION_ADD="add"
NETWORKING_ACTION_EDIT="edit"
NETWORKING_ACTION_DELETE="delete"
NETWORKING_ACTION_SHOW="show"
NETWORKING_ACTION_ADDEXCLUDED="addExcluded"
NETWORKING_ACTION_REMOVEXCLUDED="removeExcluded"
NETWORKING_POSSIBLE_ACTIONS=(NETWORKING_ACTION_ADD,NETWORKING_ACTION_DELETE,NETWORKING_ACTION_EDIT,NETWORKING_ACTION_SHOW,NETWORKING_ACTION_ADDEXCLUDED,NETWORKING_ACTION_REMOVEXCLUDED,None)
def networkingDashboard(request):#,rangeId):
extra_context = {"section": "networking","subsection":"None"}
extra_context["macRanges"] = EthernetController.listRanges()
extra_context["MacRange"] = MacRange
extra_context["ip4Ranges"] = Ip4Controller.listRanges()
extra_context["Ip4Range"] = Ip4Range
template = "networking/index.html"
return simple.direct_to_template(
request,
extra_context=extra_context,
template=template,
)
def manageIp4(request,rangeId=None,action=None,ip4Id=None):
if not action in NETWORKING_POSSIBLE_ACTIONS:
raise Exception("Unknown action")
#Define context
extra_context = {"section": "networking","subsection":"ip4"+str(action),}
#Add process
if (action == NETWORKING_ACTION_ADD):
if request.method == "GET":
#Show form
extra_context["form"] = HttpUtils.getFormFromModel(Ip4Range)
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeCrud.html",
)
return
# return HttpResponseRedirect("/networking/ip4/")
elif request.method == "POST":
try:
instance = HttpUtils.getInstanceFromForm(request,Ip4Range)
#Create Range
Ip4Controller.createRange(instance)
return HttpResponseRedirect("/networking/ip4/")
except Exception as e:
print e
extra_context["form"] = HttpUtils.processExceptionForm(e,request,Ip4Range)
#Process creation query
#return HttpResponseRedirect("/networking/ip4/")
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeCrud.html",
)
#Show
if ((action == None) or (action==NETWORKING_ACTION_SHOW)) and (not rangeId==None):
instance = Ip4Controller.getRange(rangeId)
extra_context["range"] = instance
#return HttpResponseRedirect("/networking/ip4/")
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeDetail.html",
)
#Edit
#TODO
#Add excluded Ip
if (action == NETWORKING_ACTION_ADDEXCLUDED) and (request.method == "POST"):
if not request.method == "POST":
raise Exception("Invalid method")
try:
instance = Ip4Controller.getRange(rangeId)
extra_context["range"] = instance
#Create excluded
Ip4Controller.addExcludedIp4(instance,request)
return HttpResponseRedirect("/networking/ip4/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeDetail.html",
)
#Release excluded Ip
if (action == NETWORKING_ACTION_REMOVEXCLUDED) and (request.method == "POST"):
try:
instance = Ip4Controller.getRange(rangeId)
#Create excluded
Ip4Controller.removeExcludedIp4(instance,ip4Id)
#FIXME: Why initial instance is not refreshed?
instance = Ip4Controller.getRange(rangeId)
extra_context["range"] = instance
return HttpResponseRedirect("/networking/ip4/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeDetail.html",
)
#Delete
if (action == NETWORKING_ACTION_DELETE) and (request.method == "POST"):
try:
Ip4Controller.deleteRange(rangeId)
return HttpResponseRedirect("/networking/ip4/")
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
extra_context["ranges"] = Ip4Controller.listRanges()
template = "networking/ip4/index.html"
return simple.direct_to_template(
request,
extra_context = extra_context,
template=template,
)
def manageEthernet(request,rangeId=None,action=None,macId=None):
if not action in NETWORKING_POSSIBLE_ACTIONS:
raise Exception("Unknown action")
#Define context
extra_context = {"section": "networking","subsection":"ethernet",}
#Add process
if (action == NETWORKING_ACTION_ADD):
if request.method == "GET":
#Show form
extra_context["form"] = HttpUtils.getFormFromModel(MacRange)
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeCrud.html",
)
return
elif request.method == "POST":
try:
instance = HttpUtils.getInstanceFromForm(request,MacRange)
#Create Range
EthernetController.createRange(instance)
return HttpResponseRedirect("/networking/ethernet/")
except Exception as e:
print e
extra_context["form"] = HttpUtils.processExceptionForm(e,request,MacRange)
#Process creation query
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeCrud.html",
)
#Show
if ((action == None) or (action==NETWORKING_ACTION_SHOW)) and (not rangeId==None):
instance = EthernetController.getRange(rangeId)
extra_context["range"] = instance
#return HttpResponseRedirect("/networking/ethernet/")
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeDetail.html",
)
#Edit
#TODO
#Add excluded Mac
if (action == NETWORKING_ACTION_ADDEXCLUDED) and (request.method == "POST"):
if not request.method == "POST":
raise Exception("Invalid method")
try:
instance = EthernetController.getRange(rangeId)
extra_context["range"] = instance
#Create excluded
EthernetController.addExcludedMac(instance,request)
return HttpResponseRedirect("/networking/ethernet/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeDetail.html",
)
#Release excluded Mac
if (action == NETWORKING_ACTION_REMOVEXCLUDED) and (request.method == "POST"):
try:
instance = EthernetController.getRange(rangeId)
#Create excluded
#FIXME: Why initial instance is not refreshed?
EthernetController.removeExcludedMac(instance,macId)
instance = EthernetController.getRange(rangeId)
extra_context["range"] = instance
return HttpResponseRedirect("/networking/ethernet/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeDetail.html",
)
#Delete
if (action == NETWORKING_ACTION_DELETE) and (request.method == "POST"):
try:
EthernetController.deleteRange(rangeId)
return HttpResponseRedirect("/networking/ethernet/")
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
#Listing ranges
extra_context["ranges"] = EthernetController.listRanges()
return simple.direct_to_template(
request,
extra_context = extra_context,
template = "networking/ethernet/index.html",
)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.