code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.contrib.auth.signals import user_logged_in
from markup_deprecated.templatetags.markup import markdown
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from django.dispatch import receiver
from django.db import models
from django.db.models import Q
#from cms.models import CMSPlugin
from easy_thumbnails.fields import ThumbnailerImageField
from taggit.managers import TaggableManager
from . import validators
from .managers import UserManager
avatar_min_max_dimension = {}
if validators.AVATAR_MIN_DIMENSION:
avatar_min_max_dimension.update({
'min_w': validators.AVATAR_MIN_DIMENSION[0],
'min_h': validators.AVATAR_MIN_DIMENSION[1]
})
if validators.AVATAR_MAX_DIMENSION:
avatar_help_text = _('Please upload an image with a side length between %(min_w)dx%(min_h)d px and %(max_w)dx%(max_h)d px.')
avatar_min_max_dimension.update({
'max_w': validators.AVATAR_MAX_DIMENSION[0],
'max_h': validators.AVATAR_MAX_DIMENSION[1]
})
else:
avatar_help_text = _('Please upload an image with a side length of at least %(min_w)dx%(min_h)d px.')
else:
if validators.AVATAR_MAX_DIMENSION:
avatar_help_text = _('Please upload an image with a side length of at most %(max_w)dx%(max_h)d px.')
avatar_min_max_dimension.update({
'max_w': validators.AVATAR_MAX_DIMENSION[0],
'max_h': validators.AVATAR_MAX_DIMENSION[1]
})
else:
avatar_help_text = ''
avatar_help_text = avatar_help_text % avatar_min_max_dimension
class BadgeStatus(models.Model):
name = models.CharField(_('Name'), max_length=50)
slug = models.SlugField(_('slug'), max_length=50)
class Meta:
ordering = ('name',)
verbose_name = _('Status')
verbose_name_plural = _('Statuses')
def __unicode__(self):
return self.name
class User(AbstractBaseUser, PermissionsMixin):
username = models.CharField(_("Username"), max_length=256, unique=True)
email = models.EmailField(_("E-Mail"), unique=True)
first_name = models.CharField(_("First name"), max_length=30)
last_name = models.CharField(_("Last name"), max_length=30)
is_staff = models.BooleanField(_("Staff status"), default=False,
help_text=_("Designates whether the user can log into this admin site."))
is_active = models.BooleanField(_("Active"), default=True,
help_text=_("Designates whether this user should be treated as active."
" Unselect this instead of deleting accounts."))
date_joined = models.DateTimeField(_("Date joined"), default=timezone.now)
short_info = models.TextField(_('short info'), blank=True)
avatar = ThumbnailerImageField(
_('avatar'), upload_to='avatars', null=True, blank=True,
help_text=avatar_help_text,
validators=[validators.avatar_dimension, validators.avatar_format]
)
num_accompanying_children = models.PositiveIntegerField(
_('Number of accompanying children'), null=True, blank=True, default=0)
age_accompanying_children = models.CharField(_("Age of accompanying children"),
blank=True, max_length=20)
twitter = models.CharField(_("Twitter"), blank=True, max_length=20,
validators=[validators.twitter_username])
website = models.URLField(_("Website"), blank=True)
organisation = models.TextField(_('Organisation'), blank=True)
display_name = models.CharField(_("Display name"), max_length=255,
help_text=_('What name should be displayed to other people?'),
blank=True)
addressed_as = models.CharField(_("Address me as"), max_length=255,
help_text=_('How should we call you in mails and dialogs throughout the website?'),
blank=True)
accept_pysv_conferences = models.BooleanField(_('Allow copying to PySV conferences'),
default=False, blank=True)
accept_ep_conferences = models.BooleanField(_('Allow copying to EPS conferences'),
default=False, blank=True)
accept_job_offers = models.BooleanField(_('Allow sponsors to send job offers'),
default=False, blank=True)
badge_status = models.ManyToManyField('accounts.BadgeStatus', blank=True,
verbose_name=_('Badge status'), related_name='users')
sessions_attending = models.ManyToManyField('schedule.Session', blank=True,
related_name='attendees', verbose_name=_('Trainings'),
limit_choices_to=Q(kind__slug__in=settings.SCHEDULE_ATTENDING_POSSIBLE))
tags = TaggableManager(blank=True)
objects = UserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _("User")
verbose_name_plural = _("Users")
ordering = ['last_name', 'first_name', 'email']
permissions = (
('send_user_mails', _('Allow sending mails to users through the website')),
('export_guidebook', _('Allow export of guidebook data')),
('see_checkin_info', _('Allow seeing check-in information')),
('perform_purchase', _('Allow performing purchases'))
)
@cached_property
def short_info_rendered(self):
return markdown(self.short_info, 'safe')
def __unicode__(self):
return self.get_full_name()
@property
def full_name(self):
"""Legacy support for older views."""
return self.get_full_name()
def get_full_name(self):
"""Return either the full name or email if no name has been set."""
full_name = self.email
if self.first_name and self.last_name:
full_name = u'{} {}'.format(self.first_name, self.last_name)
return full_name
def get_short_name(self):
"""Return either the shortened full name or email if no name has been set."""
short_name = self.email
if self.first_name and self.last_name:
short_name = u'{}. {}'.format(self.first_name[0], self.last_name)
return short_name
def get_display_name(self):
"""Return either the display_name for get_full_name of the user."""
return self.display_name or self.get_full_name()
def signup(self, first_name, last_name, commit=True):
"""Update the fields required for sign-up and accept the terms and conditions."""
self.first_name = first_name
self.last_name = last_name
if commit:
self.save()
#class UserListPlugin(CMSPlugin):
#
# badge_status = models.ManyToManyField('BadgeStatus', blank=True,
# verbose_name=_('Status'))
# additional_names = models.TextField(_('Additional names'), blank=True,
# default='', help_text=_('Users without account. One name per line.'))
#
# def copy_relations(self, oldinstance):
# self.badge_status = oldinstance.badge_status.all()
#
# @property
# def additional_names_list(self):
# return list(set(bs for bs in self.additional_names.split('\n') if bs))
#
@receiver(user_logged_in)
def show_logged_in_message(request, user, **kwargs):
messages.success(request, _("You've logged in successfully."),
fail_silently=True)
|
pysv/djep
|
pyconde/accounts/models.py
|
Python
|
bsd-3-clause
| 7,406
|
"""
@author: Maneesh D
@email: maneeshd77@gmail.com
"""
from multiprocessing import Process, Queue
def is_prime(num):
limit = int(num ** 0.5) + 1
for i in range(2, limit):
if num % i == 0:
return False
return True
def sum_primes(start, end, q):
total = 0
for num in range(start, end+1):
if is_prime(num):
total += num
q.put(total)
def main():
q = Queue()
p1 = Process(target=sum_primes, args=[2, 500000, q],)
p2 = Process(target=sum_primes, args=[500001, 1000000, q],)
p3 = Process(target=sum_primes, args=[1000001, 1500000, q], )
p4 = Process(target=sum_primes, args=[1500001, 2000000, q], )
p1.start()
p2.start()
p3.start()
p4.start()
p1.join()
p2.join()
p3.join()
p4.join()
total = q.get() + q.get() + q.get() + q.get()
print("The Sum of prime numbers till 2 million is %d" % total)
if __name__ == '__main__':
from timeit import Timer
t = Timer(stmt="main()", setup="from primes_multi_processing import main")
print("Execution Time= %.3f Seconds" % t.timeit(number=1))
|
maneeshd/PyTutorial
|
Advanced/Multi-Threading & Processing/multiprocessing/primes_multi_processing.py
|
Python
|
mit
| 1,126
|
#!/usr/bin/env python
#
# Copyright 2005,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import audio_alsa
class qa_alsa (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_000_nop (self):
"""Just see if we can import the module...
They may not have ALSA drivers, etc. Don't try to run anything"""
pass
if __name__ == '__main__':
gr_unittest.main ()
|
GREO/GNU-Radio
|
gr-audio-alsa/src/qa_alsa.py
|
Python
|
gpl-3.0
| 1,240
|
import os, glob
from imp import find_module, load_module
EXTENSIONS_PATH = os.path.dirname(os.path.abspath(__file__))
EXTENSIONS_DIRECTORIES = glob.glob(os.path.join(EXTENSIONS_PATH, '*/'))
extensions_parameters = []
def import_extension(extension_directory):
extension_name = os.path.basename(os.path.normpath(extension_directory))
py_files = glob.glob(os.path.join(extension_directory, "*.py"))
module_names = [
os.path.basename(f)[:-3]
for f in py_files
if not os.path.basename(f).startswith('_')
]
for module_name in module_names:
module = find_module(module_name, [extension_directory])
load_module(module_name, *module)
param_file = os.path.join(extension_directory, 'parameters.xml')
if os.path.isfile(param_file):
extensions_parameters.append(param_file)
for extension_dir in EXTENSIONS_DIRECTORIES:
import_extension(extension_dir)
|
SophieIPP/openfisca-france
|
openfisca_france/model/extensions/__init__.py
|
Python
|
agpl-3.0
| 865
|
from django.conf.urls import *
from django.http import HttpResponseRedirect
from anon_provider import AnonProvider, openid_page
urlpatterns = patterns('',
(r'^$', lambda r: HttpResponseRedirect('/openid/')),
(r'^server/$', AnonProvider()),
(r'^(\w+)/$', openid_page),
)
|
fgirault/smeuhsocial
|
apps/django_openid/demos/provider/urls.py
|
Python
|
mit
| 283
|
import os
import pickle
import random
import re
from datetime import datetime
from data_processing import (InteractiveAnswer, _in_list, colorit, space_fill,
split_wrd)
BOARDER_LENGTH = 40
class Quest():
def __init__(self, q, sel=None, ta=None, args={}):
'''
Class representing a Question.
Parameters
----------
basic arguments:
q : question. necessary. list.
sel : selections. list.
ta : true answer. list.
extensable arguments:
args : dict with sets of {'name': 'value'}.
'''
self.q = q
self.sel = sel
self.ta = ta
self.args = args
def __str__(self):
'''Visualize the `Quest`.'''
return '{\n\tq: %s,\n\tsel: %s,\n\tta: %s,\n\targs: %s\n}' % \
(self.q, self.sel, self.ta, self.args)
def __eq__(self, value):
'''Evalue two `Quest`s as equal.'''
if type(value) != type(self): return False
for i in ['q', 'sel', 'ta', 'args']:
if self.__getattribute__(i) != value.__getattribute__(i):
return False
return True
def __hash__(self):
return (hash('\n'.join(self.q)) + hash('\n'.join(self.sel)) + \
hash('\n'.join(self.ta)) + hash('\n'.join(self.args))) % int(1e+16)
class QuestForm(list):
def __init__(self, *args, **kwargs):
super(QuestForm, self).__init__(*args, **kwargs)
def __getitem__(self, ind):
if type(ind) == int:
return super(QuestForm, self).__getitem__(ind)
if type(ind) == slice:
return QuestForm(super(QuestForm, self).__getitem__(ind))
else:
returns = QuestForm()
for i in ind:
returns.append(self[i])
return returns
def append(self, *args, **kwargs):
super(QuestForm, self).append(*args, **kwargs)
return self
class QuestFormTextLoader():
'''QuestForm Loader for text files.'''
def __init__(self,
questpattern,
qpattern,
selpattern=None,
tapattern=None,
argpattern={}):
'''
Parameters
----------
questpattern : regex pattern for a question. necessary.
qpattern : regex pattern for question text in a question. necessary.
selpattern : regex pattern for selections.
a question can have several matching selections.
tapattern : regex pattern for true answer.
argpattern : dict with {'arg_name' : 'arg_regex'} sets.
'''
self.questpattern = questpattern
self.qpattern = qpattern
self.selpattern = selpattern
self.tapattern = tapattern
self.argpattern = dict(argpattern)
self.is_cached = False
def get_cached_qf(self, togo='Curdata.data'):
'''Load cached QuestForm.'''
if togo in os.listdir():
if InteractiveAnswer(
'Cached data found.Continue?', yes_or_no=True).get():
with open(togo, 'rb') as f:
return pickle.load(f)
else:
datas = ["Create a new data"] + [
i for i in os.listdir() if re.findall(r'.*\.data$', i)
]
if not datas: return
print("Cached data not found, listing other datas")
for i in range(len(datas)):
print('\t%3s: \t%s' % (i, datas[i]))
no = InteractiveAnswer(
'Which one to choose?',
verify=range(len(datas)),
serializer=
lambda x: [int(i) for i in re.findall(r'[0-9]+', x)]).get()[0]
if no == 0:
return
else:
with open(datas[no], 'rb') as f:
return pickle.load(f)
def _load(self, queststr):
questform = QuestForm()
for quest in re.findall(self.questpattern, queststr):
qitem = re.findall(self.qpattern, quest)
selitem = re.findall(self.selpattern,
quest) if self.selpattern else None
taitem = re.findall(self.tapattern,
quest) if self.tapattern else None
argitem = [(patnam,re.findall(self.argpattern(patnam),quest)) \
for patnam in self.argpattern] if self.argpattern else {}
questform = questform.append(
Quest(q=qitem, sel=selitem, ta=taitem, args=argitem))
return questform
def load(self, queststr):
'''Search queststr, match arguments and returns a QuestForm.'''
qf = self.get_cached_qf()
if qf is not None:
self.is_cached = True
return qf
if 'MainData.data' in os.listdir():
with open('MainData.data', 'rb') as f:
qf = pickle.load(f)
else:
qf = self._load(queststr)
with open('MainData.data', 'wb') as f:
pickle.dump(qf, f)
return qf
class QuestFormExcelLoader(QuestFormTextLoader):
'''QuestForm Loader for excel files. Requires `pandas` module.'''
def __init__(self, qcol, selcol=None, tacol=None, argcol={}):
'''
Parameters
----------
questpattern : regex pattern for a question. necessary.
qpattern : regex pattern for question text in a question. necessary.
selpattern : regex pattern for selections.
a question can have several matching selections.
tapattern : regex pattern for true answer.
argpattern : dict with {'arg_name' : 'arg_regex'} sets.
'''
super(QuestFormExcelLoader, self).__init__(None, qcol, selcol, tacol,
argcol)
def _load(self, questdf):
import pandas as pd
if type(questdf) == str: questdf = pd.read_excel(questdf)
questform = QuestForm()
for q in range(len(questdf)):
quest = questdf.ix[q]
qitem = quest[self.qpattern]
selitem = quest[self.selpattern] if self.selpattern else None
taitem = quest[self.tapattern] if self.tapattern else None
argitem = {
pat: quest[self.argpattern[pat]]
for pat in self.argpattern
} if self.argpattern else {}
qitem = None if qitem is None else ([qitem] if isinstance(
qitem, str) else list(qitem))
selitem = None if selitem is None else ([selitem] if isinstance(
selitem, str) else list(selitem))
taitem = None if taitem is None else ([taitem] if isinstance(
taitem, str) else list(taitem))
questform = questform.append(
Quest(q=qitem, sel=selitem, ta=taitem, args=argitem))
return questform
class BeginQuestForm():
'''Class for rendering the exam.'''
def __init__(self,
qf,
arrange='qast',
no_score=False,
input_manner=None,
no_filter=False,
storage='l|w',
filenames=['Curdata.data', 'Wrongdata.data']):
'''
Parameters
----------
qf : QuestForm. The QuestForm that test on.
storage : str with several units separated by `|`.
each unit contains one or more of `twol`.
`t` indicates Quests that marked as true.
`w` indicates Quests that marked as false.
`o` indicates Quests that marked as others.
`l` indicates Quests that isn't marked.
filenames : list with each element indicates the filename of
the output of `storage` option.
arrange : iterable. each element should be one argument in a `Quest` object.
`question` indicates the question text.
`args` indicates all args.
`selections` indicates the question text.
`trueanswer` indicates the trueanswer text.
`label` may indicate the `lable` keyword in `args` child in `Quest`.
If not ambiguous, you can use `q` or `que` to indicate `question`,
or `a` to indicate `answer`.
no_filter : determines whether to record the True/False/others score.
input_manner : a class with a .get() method returns input text.
designed for `InteractiveAnswer` class.
no_filter : determines whether to filter the qf by `self.sel_chap`.
'''
self.qf = qf
self.starttime = datetime.now()
self.correct = []
self.wrong = []
self.other = []
self.arrange = arrange
self.storage = storage
self.store_filenames = filenames
self.no_score = no_score
self.input_manner = input_manner
self.status = []
self.no_filter = no_filter
def selchap(self, qf):
'''
Dummy function to select chapters (or filtering the QuestForm).
Override this funtion to make it work.
'''
return qf
def oninit(self):
'''Things done on initialize'''
if InteractiveAnswer('Randomize?', yes_or_no=True).get():
random.shuffle(self.arranged_index)
print('\n', '=' * BOARDER_LENGTH, '\n')
print(
space_fill(
self.starttime.strftime('%Y-%m-%d %H:%M:%S'), BOARDER_LENGTH))
print(space_fill('Find %d questions.' % (self.length), BOARDER_LENGTH))
print(space_fill('start test.', BOARDER_LENGTH))
print('\n', '=' * BOARDER_LENGTH, '\n')
def _report(self):
''' Report prints.'''
print('\n\n', '=' * BOARDER_LENGTH, '\n')
usedtime = (datetime.now() - self.starttime).seconds
(usedtime, s) = divmod(usedtime, 60)
(h, m) = divmod(usedtime, 60)
print(space_fill('Total Time: %d hours, %d minutes, %d seconds'\
%(h, m, s) ,BOARDER_LENGTH))
if self.no_score: pass
elif len(self.correct) + len(self.wrong) != 0:
c = len(self.correct)
w = len(self.wrong)
print('Correct: ', c)
print('Wrong: ', w)
print('Score: %.2f' % (c / (c + w) * 100))
print('\n', '-' * BOARDER_LENGTH, '\n')
self.show_status(h)
print('\n', '=' * BOARDER_LENGTH, '\n')
def onkill(self):
''' Things done on kill/interrupt.'''
print('\n\n', '=' * BOARDER_LENGTH, '\n')
print(space_fill('Interrupted', BOARDER_LENGTH))
self._report()
self.store_data(level=self.storage, filenames=self.store_filenames)
return
def onfinish(self):
''' Things done on finishing exam.'''
print('\n\n', '=' * BOARDER_LENGTH, '\n')
print(space_fill('Finished', BOARDER_LENGTH))
self._report()
self.store_data(level=self.storage, filenames=self.store_filenames)
return
def store_data(self,
filenames=['Curdata.data', 'Wrongdata.data'],
level='l|w'):
''' Stores data.'''
# get left quests
l = [
i for i in range(len(self.qf))
if not (_in_list(i, self.correct) | _in_list(i, self.wrong)
| _in_list(i, self.other))
]
_level = level.split('|')
for fn, lv in zip(filenames, range(len(_level))):
index = []
# add required quests to index
for i, j in zip('cwol', [self.correct, self.wrong, self.other, l]):
if i in _level[lv]: index += j
index.sort()
qf = self.qf[index]
# TODO: duplicated. add append/write method as an option
if fn == 'Curdata.data':
if len(qf) != 0:
with open(fn, 'wb') as f:
pickle.dump(qf, f)
else:
try:
os.remove(fn)
except:
pass
else:
if fn not in os.listdir():
with open(fn, 'wb') as f:
pickle.dump(qf, f)
else:
with open(fn, 'rb') as f:
data = pickle.load(f)
data = QuestForm(data + qf)
with open(fn, 'wb') as f:
pickle.dump(data, f)
def raise_quest(self, quest, **kwargs):
'''Loop to raise a `Quest` according to `self.arrange`.'''
ans = None
for a in self.arrange:
if re.findall('^' + a, 'quest'):
self.raise_q(quest, **kwargs)
elif re.findall('^' + a, 'args'):
if not quest.args: continue
for k in quest.args:
print(k + ':', quest.args[k])
elif re.findall('^' + a, 'selection'):
self.raise_sel(quest, **kwargs)
elif re.findall('^' + a, 'true_answer'):
ans = self.get_input(self.input_manner)
ans = self.check_ans(ans, quest, **kwargs)
if ans is not True or self.no_score:
self.raise_ta(quest, **kwargs)
else:
for k in quest.args:
if re.findall('^' + a, k):
print(k + ':', quest.args[k])
print('\n', '-' * BOARDER_LENGTH, '\n')
return ans
def get_input(self, input_manner=None):
'''Get user input if input_manner is not given.'''
if input_manner is None:
return input('Your Answer: ')
else:
try:
return input_manner.get()
except AttributeError:
raise TypeError('`input_manner` should have a `get()` method')
def start(self):
'''Starting point.'''
try:
if not self.no_filter: self.qf = self.selchap(self.qf)
self.length = len(self.qf)
self.arranged_index = list(range(self.length))
self.oninit()
for quest in self.arranged_index:
tof = self.raise_quest(self.qf[quest], qid=quest)
if tof is True:
self.correct.append(quest)
self.status.append(
((datetime.now() - self.starttime).seconds, 1))
elif tof is False:
self.wrong.append(quest)
self.status.append(
((datetime.now() - self.starttime).seconds, 0))
else:
self.other.append(quest)
self.status.append(
((datetime.now() - self.starttime).seconds, 2))
self.onfinish()
except (KeyboardInterrupt, EOFError):
self.onkill()
def raise_q(self, quest, **kwargs):
'''Raises question in a `Quest`. You may want to overwrite it'''
print(
'Question %d/%d: ' %
(len(self.other) + len(self.correct) + len(self.wrong) + 1,
self.length),
end='')
print('\n'.join(quest.q))
return
def raise_sel(self, quest, **kwargs):
'''Raises selections in a `Quest`. You may want to overwrite it'''
if quest.sel: print('\n'.join(quest.sel))
def raise_ta(self, quest, **kwargs):
'''Raises true answer in a `Quest`. You may want to overwrite it'''
if quest.ta: print('True Answer:', ' '.join(quest.ta))
def check_ans(self, ans, quest, **kwargs):
'''
Check answer. returns True or False or other to your convenience.
You may want to overwrite it.
'''
if self.no_score: return True
if ans == ''.join(quest.ta):
print(colorit('Correct!', 'green'))
return True
else:
print(colorit('WRONG!', 'red'))
return False
def show_status(self, hduration):
''' Show statistics before exit. '''
result = []
tempres = [0, 0, 0]
status = self.status
if hduration == 0:
inteval = 3 * 60
if hduration > 0:
inteval = 5 * hduration * 60
cursec = inteval
for i in status:
while cursec - i[0] <= 0:
result.append(tempres)
tempres = [0, 0, 0]
cursec += inteval
tempres[i[1]] += 1
result.append(tempres)
total = inteval
for i in result:
print('%3dm:' % (total / 60),
colorit('+' * i[1], 'green') + colorit('-' * i[0], 'red'))
total += inteval
return result
|
heyrict/exam
|
exam.py
|
Python
|
apache-2.0
| 16,883
|
from collections import namedtuple
from functools import wraps
import json
import typing
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.core.exceptions import (
PermissionDenied,
ValidationError,
)
from django.contrib import messages
import django.urls as url
from django.db import transaction, models
from django.http.response import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseForbidden,
HttpResponseRedirect,
)
from django.http import Http404
from django.shortcuts import (
redirect,
render,
get_object_or_404,
)
from django.utils import timezone
from django.utils.formats import localize
from django.utils.translation import gettext as _
from django.views.csrf import csrf_failure as django_csrf_failure
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_http_methods
from django.views.generic import RedirectView
from ..checkout_api import clerk_logout_fn
from .. import ajax_util
from ..forms import ItemRemoveForm, VendorItemForm, VendorBoxForm, remove_item_from_receipt as _remove_item_from_receipt
from ..fields import ItemPriceField
from ..models import (
Box,
Clerk,
Event,
EventPermission,
Item,
ItemType,
Vendor,
UserAdapter,
UIText,
Receipt,
)
from ..stats import ItemCountData, ItemEurosData
from ..util import get_form
from ..utils import (
barcode_view,
is_vendor_open,
is_registration_closed_for_users,
require_vendor_open,
)
from ..templatetags.kirppu_tags import get_dataurl
from .vendors import get_multi_vendor_values
import pubcode
__all__ = [
"index",
"MobileRedirect",
"item_add",
"item_hide",
"item_to_not_printed",
"item_to_printed",
"item_update_price",
"item_update_name",
"item_update_type",
"all_to_print",
"box_add",
"box_hide",
"box_print",
"box_content",
"get_items",
"get_boxes",
"get_clerk_codes",
"get_counter_commands",
"get_boxes_codes",
"checkout_view",
"overseer_view",
"stats_view",
"type_stats_view",
"statistical_stats_view",
"vendor_view",
"accept_terms",
"remove_item_from_receipt",
"lost_and_found_list",
"kirppu_csrf_failure",
]
def index(request):
return redirect("kirppu:front_page")
class MobileRedirect(RedirectView):
permanent = False
pattern_name = "kirppu:mobile"
@login_required
@require_http_methods(["POST"])
@require_vendor_open
def item_add(request, event):
if not Vendor.has_accepted(request, event):
return HttpResponseBadRequest("Missing acceptance of terms.")
vendor = Vendor.get_or_create_vendor(request, event)
form = VendorItemForm(request.POST, event)
if not form.is_valid():
return HttpResponseBadRequest(form.get_any_error())
item_cnt = Item.objects.filter(vendor=vendor).count()
# Create the items and construct a response containing all the items that have been added.
response = []
max_items = settings.KIRPPU_MAX_ITEMS_PER_VENDOR
data = form.db_values()
name = data.pop("name")
for suffix in form.cleaned_data["suffixes"]:
if item_cnt >= max_items:
error_msg = _(u"You have %(max_items)s items, which is the maximum. No more items can be registered.")
return HttpResponseBadRequest(error_msg % {'max_items': max_items})
item_cnt += 1
suffixed_name = (name + u" " + suffix).strip() if suffix else name
try:
item = Item.new(
name=suffixed_name,
vendor=vendor,
**data
)
except ValidationError as e:
return HttpResponseBadRequest(" ".join(e.messages))
item_dict = item.as_public_dict()
item_dict['barcode_dataurl'] = get_dataurl(item.code, 'png')
response.append(item_dict)
return HttpResponse(json.dumps(response), 'application/json')
@login_required
@require_http_methods(["POST"])
def item_hide(request, event_slug, code):
event = get_object_or_404(Event, slug=event_slug)
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
item = get_object_or_404(Item.objects.select_for_update(), code=code, vendor=vendor)
item.hidden = True
item.save(update_fields=("hidden",))
return HttpResponse()
@login_required
@require_http_methods(['POST'])
def item_to_not_printed(request, event_slug, code):
event = get_object_or_404(Event, slug=event_slug)
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
item = get_object_or_404(Item.objects.select_for_update(), code=code, vendor=vendor, box__isnull=True)
if settings.KIRPPU_COPY_ITEM_WHEN_UNPRINTED:
# Create a duplicate of the item with a new code and hide the old item.
# This way, even if the user forgets to attach the new tags, the old
# printed tag is still in the system.
if not is_vendor_open(request, event):
return HttpResponseForbidden("Registration is closed")
new_item = Item.new(
name=item.name,
price=item.price,
vendor=item.vendor,
type=item.type,
state=Item.ADVERTISED,
itemtype=item.itemtype,
adult=item.adult
)
item.hidden = True
else:
item.printed = False
new_item = item
item.save(update_fields=("hidden", "printed"))
item_dict = {
'vendor_id': new_item.vendor_id,
'code': new_item.code,
'barcode_dataurl': get_dataurl(item.code, 'png'),
'name': new_item.name,
'price': str(new_item.price).replace('.', ','),
'type': new_item.type,
'adult': new_item.adult,
}
return HttpResponse(json.dumps(item_dict), 'application/json')
@login_required
@require_http_methods(["POST"])
def item_to_printed(request, event_slug, code):
event = get_object_or_404(Event, slug=event_slug)
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
item = get_object_or_404(Item.objects.select_for_update(), code=code, vendor=vendor, box__isnull=True)
item.printed = True
item.save(update_fields=("printed",))
return HttpResponse()
@login_required
@require_http_methods(["POST"])
@require_vendor_open
def item_update_price(request, event, code):
try:
price = ItemPriceField().clean(request.POST.get('value'))
except ValidationError as error:
return HttpResponseBadRequest(u' '.join(error.messages))
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
item = get_object_or_404(Item.objects.select_for_update(), code=code, vendor=vendor, box__isnull=True)
if item.is_locked():
return HttpResponseBadRequest("Item has been brought to event. Price can't be changed.")
item.price = str(price)
item.save(update_fields=("price",))
return HttpResponse(str(price).replace(".", ","))
@login_required
@require_http_methods(["POST"])
@require_vendor_open
def item_update_name(request, event, code):
name = request.POST.get("value", "no name")
name = name[:80]
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
item = get_object_or_404(Item.objects.select_for_update(), code=code, vendor=vendor)
if item.is_locked():
return HttpResponseBadRequest("Item has been brought to event. Name can't be changed.")
item.name = name
item.save(update_fields=("name",))
return HttpResponse(name)
@login_required
@require_http_methods(["POST"])
def item_update_type(request, event_slug, code):
event = get_object_or_404(Event, slug=event_slug)
tag_type = request.POST.get("tag_type", None)
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
item = get_object_or_404(Item.objects.select_for_update(), code=code, vendor=vendor)
item.type = tag_type
item.save(update_fields=("type",))
return HttpResponse()
@login_required
@require_http_methods(["POST"])
def all_to_print(request, event_slug):
event = get_object_or_404(Event, slug=event_slug)
vendor = Vendor.get_vendor(request, event)
items = Item.objects.filter(vendor=vendor).filter(printed=False).filter(box__isnull=True)
items.update(printed=True)
return HttpResponse()
@login_required
@require_http_methods(["POST"])
@require_vendor_open
def box_add(request, event):
if not event.use_boxes:
raise Http404()
if not Vendor.has_accepted(request, event):
return HttpResponseBadRequest("Missing acceptance of terms.")
vendor = Vendor.get_vendor(request, event)
form = VendorBoxForm(request.POST, event)
if not form.is_valid():
return HttpResponseBadRequest(form.get_any_error())
data = form.db_values()
# Verify that user doesn't exceed his/hers item quota with the box.
max_items = settings.KIRPPU_MAX_ITEMS_PER_VENDOR
item_cnt = Item.objects.filter(vendor=vendor).count()
count = data["count"]
if item_cnt >= max_items:
error_msg = _(u"You have %(max_items)s items, which is the maximum. No more items can be registered.")
return HttpResponseBadRequest(error_msg % {'max_items': max_items})
elif max_items < count + item_cnt:
error_msg = _(u"You have %(item_cnt)s items. "
u"Creating this box would cause the items to exceed the maximum number of allowed items.")
return HttpResponseBadRequest(error_msg % {'item_cnt': item_cnt})
# Create the box and items. and construct a response containing box and all the items that have been added.
box = Box.new(
vendor=vendor,
**data
)
box_dict = box.as_public_dict()
box_dict["vendor_id"] = vendor.id
box_dict["event"] = event
return render(request, "kirppu/app_boxes_box.html", box_dict)
@login_required
@require_http_methods(["POST"])
def box_hide(request, event_slug, box_id):
event = get_object_or_404(Event, slug=event_slug)
if not event.use_boxes:
raise Http404()
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
box = get_object_or_404(Box.objects, id=box_id)
box_vendor = box.get_vendor()
if box_vendor.id != vendor.id:
raise Http404()
box.set_hidden(True)
return HttpResponse()
@login_required
@require_http_methods(["POST"])
def box_print(request, event_slug, box_id):
event = get_object_or_404(Event, slug=event_slug)
if not event.use_boxes:
raise Http404()
with transaction.atomic():
vendor = Vendor.get_vendor(request, event)
box = get_object_or_404(Box.objects, id=box_id)
box_vendor = box.get_vendor()
if box_vendor.id != vendor.id:
raise Http404()
box.set_printed(True)
return HttpResponse()
@login_required
@require_http_methods(["GET"])
@barcode_view
def box_content(request, event_slug, box_id, bar_type):
"""
Get a page containing the contents of one box for printing
:param request: HttpRequest object.
:type request: django.http.request.HttpRequest
:param bar_type: Image type of the generated bar. See `kirppu_tags.barcode_dataurl`.
:type bar_type: str
:return: HttpResponse or HttpResponseBadRequest
"""
event = get_object_or_404(Event, slug=event_slug)
if not event.use_boxes:
raise Http404()
vendor = Vendor.get_vendor(request, event)
boxes = Box.objects.filter(id=box_id, item__vendor=vendor, item__hidden=False).distinct()
if boxes.count() == 0:
raise Http404()
box = boxes[0]
item = box.get_representative_item()
render_params = {
'box': box,
'item': item,
'bar_type': bar_type,
'CURRENCY': settings.KIRPPU_CURRENCY,
}
return render(request, "kirppu/app_boxes_content.html", render_params)
class MenuItem(typing.NamedTuple):
name: typing.Optional[str]
url: typing.Optional[str]
active: typing.Optional[bool]
sub_items: typing.Optional[typing.List["MenuItem"]]
def _vendor_menu_contents(request, event: Event) -> typing.List[MenuItem]:
"""
Generate menu for Vendor views.
Returned tuple contains entries for the menu, each entry containing a
name, url, and flag indicating whether the entry is currently active
or not.
:param event: The event.
:param request: Current request being processed.
:return: List of menu items containing name, url and active fields.
"""
active = request.resolver_match.view_name
def fill(name, func, sub=None, query=None, is_global=False):
if not is_global:
kwargs = {"event_slug": event.slug}
else:
kwargs = {}
link = url.reverse(func, kwargs=kwargs) if func else None
from urllib.parse import quote
if query:
link += "?" + "&".join(
quote(k, safe="") + (("=" + quote(v, safe="")) if v else "")
for k, v in query.items())
return MenuItem(name, link, func == active, sub)
items = [
fill(_(u"Home"), "kirppu:vendor_view"),
]
if not event.source_db:
items.append(fill(_(u"Item list"), "kirppu:page"))
if event.use_boxes:
items.append(fill(_(u"Box list"), "kirppu:vendor_boxes"))
if event.mobile_view_visible:
items.append(fill(_("Mobile"), "kirppu:mobile"))
manage_sub = []
permissions = EventPermission.get(event, request.user)
if not event.source_db and (request.user.is_staff or UserAdapter.is_clerk(request.user, event)):
manage_sub.append(fill(_(u"Checkout commands"), "kirppu:commands"))
if event.checkout_active:
manage_sub.append(fill(_(u"Checkout"), "kirppu:checkout_view"))
if event.use_boxes:
manage_sub.append(fill(_("Box codes"), "kirppu:box_codes"))
if not event.source_db and (request.user.is_staff or permissions.can_see_clerk_codes):
manage_sub.append(fill(_(u"Clerk codes"), "kirppu:clerks"))
if not event.source_db and (request.user.is_staff or permissions.can_see_accounting):
manage_sub.append(fill(_(u"Lost and Found"), "kirppu:lost_and_found"))
if request.user.is_staff\
or UserAdapter.is_clerk(request.user, event)\
or permissions.can_see_statistics:
manage_sub.append(fill(_(u"Statistics"), "kirppu:stats_view"))
if request.user.is_staff:
try:
manage_sub.append(fill(_(u"Site administration"), "admin:index", is_global=True))
except url.NoReverseMatch as e:
pass
if manage_sub:
items.append(fill(_(u"Management"), "", manage_sub))
if permissions.can_see_accounting:
accounting_sub = [
fill(_("View"), "kirppu:accounting"),
fill(_("Download"), "kirppu:accounting", query={"download": ""}),
MenuItem(None, None, None, None),
fill(_("View items"), "kirppu:item_dump", query={"txt": ""}),
fill(_("View items (CSV)"), "kirppu:item_dump"),
]
items.append(fill(_("Accounting"), "", accounting_sub))
return items
@login_required
@require_http_methods(["GET"])
@barcode_view
def get_items(request, event_slug, bar_type):
"""
Get a page containing all items for vendor.
:param request: HttpRequest object.
:type request: django.http.request.HttpRequest
:return: HttpResponse or HttpResponseBadRequest
"""
event = get_object_or_404(Event, slug=event_slug)
event.require_default_db()
user = request.user
if user.is_staff and "user" in request.GET:
user = get_object_or_404(get_user_model(), username=request.GET["user"])
vendor = Vendor.get_vendor(request, event)
vendor_data = get_multi_vendor_values(request, event)
if event.multiple_vendors_per_user and user.is_staff and "user" in request.GET:
raise NotImplementedError # FIXME: Decide how this should work.
vendor_items = Item.objects.filter(vendor=vendor, hidden=False, box__isnull=True)
items = vendor_items.filter(printed=False)
printed_items = vendor_items.filter(printed=True)
# Order from newest to oldest, because that way new items are added
# to the top and the user immediately sees them without scrolling
# down.
items = items.order_by('-id')
item_name_placeholder = UIText.get_text(event, "item_placeholder", _("Ranma ½ Vol."))
render_params = {
'event': event,
'source_event': event.get_real_event(),
'items': items,
'printed_items': printed_items,
'bar_type': bar_type,
'item_name_placeholder': item_name_placeholder,
'profile_url': settings.PROFILE_URL,
'terms_accepted': vendor.terms_accepted if vendor is not None else False,
'is_registration_open': is_vendor_open(request, event),
'is_registration_closed_for_users': is_registration_closed_for_users(event=event),
'menu': _vendor_menu_contents(request, event),
'itemTypes': ItemType.as_tuple(event),
'CURRENCY': settings.KIRPPU_CURRENCY,
'PRICE_MIN_MAX': settings.KIRPPU_MIN_MAX_PRICE,
}
render_params.update(vendor_data)
return render(request, "kirppu/app_items.html", render_params)
@login_required
@require_http_methods(["GET"])
def get_boxes(request, event_slug):
"""
Get a page containing all boxes for vendor.
:param request: HttpRequest object.
:type request: django.http.request.HttpRequest
:return: HttpResponse or HttpResponseBadRequest
"""
event = get_object_or_404(Event, slug=event_slug)
event.require_default_db()
if not event.use_boxes:
raise Http404()
user = request.user
if user.is_staff and "user" in request.GET:
user = get_object_or_404(get_user_model(), username=request.GET["user"])
vendor = Vendor.get_vendor(request, event)
vendor_data = get_multi_vendor_values(request, event)
boxes = Box.objects.filter(item__vendor=vendor, item__hidden=False).distinct()
boxes = boxes.select_related("representative_item__itemtype")
# Order from newest to oldest, because that way new boxes are added
# to the top and the user immediately sees them without scrolling
# down.
boxes = boxes.order_by('-id')
box_name_placeholder = UIText.get_text(event, "box_placeholder", _("Box full of Ranma"))
render_params = {
'event': event,
'source_event': event.get_real_event(),
'boxes': boxes,
'box_name_placeholder': box_name_placeholder,
'profile_url': settings.PROFILE_URL,
'terms_accepted': vendor.terms_accepted if vendor is not None else False,
'is_registration_open': is_vendor_open(request, event),
'is_registration_closed_for_users': is_registration_closed_for_users(event),
'menu': _vendor_menu_contents(request, event),
'itemTypes': ItemType.as_tuple(event),
'CURRENCY': settings.KIRPPU_CURRENCY,
'PRICE_MIN_MAX': settings.KIRPPU_MIN_MAX_PRICE,
}
render_params.update(vendor_data)
return render(request, "kirppu/app_boxes.html", render_params)
@login_required
@barcode_view
def get_clerk_codes(request, event_slug, bar_type):
event = get_object_or_404(Event, slug=event_slug)
if not (request.user.is_staff or EventPermission.get(event, request.user).can_see_clerk_codes):
return HttpResponseForbidden()
bound = []
unbound = []
code_item = namedtuple("CodeItem", "name code")
for c in Clerk.objects.filter(event=event, access_key__isnull=False):
if not c.is_valid_code:
continue
code = c.get_code()
if c.user is not None:
name = c.user.get_short_name()
if len(name) == 0:
name = c.user.get_username()
bound.append(code_item(name=name, code=code))
else:
unbound.append(code_item(name="", code=code))
items = None
if bound or unbound:
bound.sort(key=lambda a: a.name + a.code)
unbound.sort(key=lambda a: a.code)
items = bound + unbound
# Generate a code to check it's length.
name, code = items[0]
width = pubcode.Code128(code, charset='B').width(add_quiet_zone=True)
else:
width = None # Doesn't matter.
return render(request, "kirppu/app_clerks.html", {
'event': event,
'items': items,
'bar_type': bar_type,
'repeat': range(1),
'barcode_width': width,
})
@login_required
@barcode_view
def get_counter_commands(request, event_slug, bar_type):
event = get_object_or_404(Event, slug=event_slug)
if not (request.user.is_staff or UserAdapter.is_clerk(request.user, event)):
raise PermissionDenied()
return render(request, "kirppu/app_commands.html", {
'event_slug': event_slug,
'title': _(u"Counter commands"),
})
@login_required
@barcode_view
def get_boxes_codes(request, event_slug, bar_type):
event = get_object_or_404(Event, slug=event_slug)
if not event.use_boxes:
raise Http404()
if not (request.user.is_staff or UserAdapter.is_clerk(request.user, event)):
raise PermissionDenied()
boxes = Box.objects.filter(representative_item__vendor__event=event, box_number__isnull=False).order_by("box_number")
vm = []
for box in boxes:
code = "box%d" % box.box_number
img = pubcode.Code128(code, charset='B').data_url(image_format=bar_type, add_quiet_zone=True)
r = box.get_representative_item() # type: Item
vm.append({
"name": box.description,
"code": code,
"data_url": img,
"adult": r.adult,
"vendor_id": r.vendor_id,
"price": r.price_fmt,
"bundle_size": box.bundle_size,
"box_number": box.box_number,
})
return render(request, "kirppu/boxes_list.html", {
"bar_type": bar_type,
"boxes": vm
})
@ensure_csrf_cookie
def checkout_view(request, event_slug):
"""
Checkout view.
:param request: HttpRequest object
:type request: django.http.request.HttpRequest
:return: Response containing the view.
:rtype: HttpResponse
"""
event = get_object_or_404(Event, slug=event_slug)
if not event.checkout_active:
raise PermissionDenied()
clerk_logout_fn(request)
context = {
'CURRENCY': settings.KIRPPU_CURRENCY,
'PURCHASE_MAX': settings.KIRPPU_MAX_PURCHASE,
'event': event,
}
if settings.KIRPPU_AUTO_CLERK and settings.DEBUG:
if settings.KIRPPU_AUTO_CLERK != "*":
real_clerks = Clerk.objects.filter(event=event, user__username=settings.KIRPPU_AUTO_CLERK)
else:
real_clerks = Clerk.objects.filter(event=event, user__isnull=False)
for clerk in real_clerks:
if clerk.is_enabled:
context["auto_clerk"] = clerk.get_code()
break
return render(request, "kirppu/app_checkout.html", context)
@ensure_csrf_cookie
def overseer_view(request, event_slug):
"""Overseer view."""
event = get_object_or_404(Event, slug=event_slug)
if not event.checkout_active:
raise PermissionDenied()
try:
ajax_util.require_user_features(counter=True, clerk=True, overseer=True)(lambda _: None)(request)
except ajax_util.AjaxError:
return redirect('kirppu:checkout_view', event_slug=event.slug)
else:
context = {
'event': event,
'itemtypes': ItemType.as_tuple(event),
'itemstates': Item.STATE,
'CURRENCY': settings.KIRPPU_CURRENCY,
}
return render(request, 'kirppu/app_overseer.html', context)
def _statistics_access(fn):
@wraps(fn)
def inner(request, event_slug, *args, **kwargs):
event = get_object_or_404(Event, slug=event_slug)
try:
if not EventPermission.get(event, request.user).can_see_statistics:
ajax_util.require_user_features(counter=True, clerk=True, staff_override=True)(lambda _: None)(request)
# else: User has permissions, no further checks needed.
except ajax_util.AjaxError:
if event.checkout_active:
return redirect('kirppu:checkout_view', event_slug=event.slug)
else:
raise PermissionDenied()
return fn(request, event, *args, **kwargs)
return inner
@ensure_csrf_cookie
@_statistics_access
def stats_view(request, event: Event):
"""Stats view."""
original_event = event
event = event.get_real_event()
ic = ItemCountData(ItemCountData.GROUP_ITEM_TYPE, event=event)
ie = ItemEurosData(ItemEurosData.GROUP_ITEM_TYPE, event=event)
sum_name = _("Sum")
item_types = (ItemType.objects
.using(event.get_real_database_alias())
.filter(event=event)
.order_by("order")
.values_list("id", "title"))
number_of_items = [
ic.data_set(item_type, type_name)
for item_type, type_name in item_types
]
number_of_items.append(ic.data_set("sum", sum_name))
number_of_euros = [
ie.data_set(item_type, type_name)
for item_type, type_name in item_types
]
number_of_euros.append(ie.data_set("sum", sum_name))
vendor_item_data_counts = []
vendor_item_data_euros = []
vic = ItemCountData(ItemCountData.GROUP_VENDOR, event=event)
vie = ItemEurosData(ItemEurosData.GROUP_VENDOR, event=event)
vie.use_cents = True
vendor_item_data_row_size = 0
for vendor_id in vic.keys():
name = _("Vendor %i") % vendor_id
counts = vic.data_set(vendor_id, name)
euros = vie.data_set(vendor_id, name)
if vendor_item_data_row_size == 0:
vendor_item_data_row_size = len(list(counts.property_names))
vendor_item_data_counts.append(counts)
vendor_item_data_euros.append(euros)
context = {
'event': event,
'event_slug': original_event.slug,
'number_of_items': number_of_items,
'number_of_euros': number_of_euros,
'vendor_item_data_counts': vendor_item_data_counts,
'vendor_item_data_euros': vendor_item_data_euros,
'vendor_item_data_row_size': vendor_item_data_row_size,
'vendor_item_data_order': json.dumps(ItemCountData.columns()),
'CURRENCY': settings.KIRPPU_CURRENCY["raw"],
}
return render(request, 'kirppu/app_stats.html', context)
@ensure_csrf_cookie
@_statistics_access
def type_stats_view(request, event: Event, type_id):
original_event = event
event = event.get_real_event()
item_type = get_object_or_404(ItemType.objects.using(event.get_real_database_alias()), event=event, id=int(type_id))
return render(request, "kirppu/type_stats.html", {
"event": original_event,
"type_id": item_type.id,
"type_title": item_type.title,
})
def _float_array(array):
# noinspection PyPep8Naming
INFINITY = float('inf')
def _float(f):
if f != f:
return "NaN"
elif f == INFINITY:
return 'Infinity'
elif f == -INFINITY:
return '-Infinity'
return float.__repr__(f)
line_length = 20
o = [
", ".join(_float(e) for e in array[i:i + line_length])
for i in range(0, len(array), line_length)
]
return "[\n" + ",\n".join(o) + "]"
@ensure_csrf_cookie
@_statistics_access
def statistical_stats_view(request, event: Event):
original_event = event
event = event.get_real_event()
database = event.get_real_database_alias()
brought_states = (Item.BROUGHT, Item.STAGED, Item.SOLD, Item.COMPENSATED, Item.RETURNED)
_items = Item.objects.using(database).filter(vendor__event=event)
_vendors = Vendor.objects.using(database).filter(event=event)
_boxes = Box.objects.using(database).filter(representative_item__vendor__event=event)
registered = _items.count()
deleted = _items.filter(hidden=True).count()
brought = _items.filter(state__in=brought_states).count()
sold = _items.filter(state__in=(Item.STAGED, Item.SOLD, Item.COMPENSATED)).count()
printed_deleted = _items.filter(hidden=True, printed=True).count()
deleted_brought = _items.filter(hidden=True, state__in=brought_states).count()
printed_not_brought = _items.filter(printed=True, state=Item.ADVERTISED).count()
items_in_box = _items.filter(box__isnull=False).count()
items_not_in_box = _items.filter(box__isnull=True).count()
registered_boxes = _boxes.count()
deleted_boxes = _boxes.filter(representative_item__hidden=True).count()
items_in_deleted_boxes = _items.filter(box__representative_item__hidden=True).count()
general = {
"registered": registered,
"deleted": deleted,
"deletedOfRegistered": (deleted * 100.0 / registered) if registered > 0 else 0,
"brought": brought,
"broughtOfRegistered": (brought * 100.0 / registered) if registered > 0 else 0,
"broughtDeleted": deleted_brought,
"printedDeleted": printed_deleted,
"printedNotBrought": printed_not_brought,
"sold": sold,
"soldOfBrought": (sold * 100.0 / brought) if brought > 0 else 0,
"vendors": _vendors.filter(item__state__in=brought_states).distinct().count(),
"vendorsTotal": _vendors.annotate(items=models.Count("item__id")).filter(items__gt=0).count(),
"vendorsInMobileView": _vendors.filter(mobile_view_visited=True).count(),
"itemsInBox": items_in_box,
"itemsNotInBox": items_not_in_box,
"registeredBoxes": registered_boxes,
"deletedBoxes": deleted_boxes,
"deletedOfRegisteredBoxes": (deleted_boxes * 100.0 / registered_boxes) if registered_boxes > 0 else 0,
"itemsInDeletedBoxes": items_in_deleted_boxes,
"itemsInDeletedBoxesOfRegistered": (items_in_deleted_boxes * 100.0 / registered) if registered > 0 else 0,
}
compensations = _vendors.filter(item__state=Item.COMPENSATED) \
.annotate(v_sum=models.Sum("item__price")).order_by("v_sum").values_list("v_sum", flat=True)
compensations = [float(e) for e in compensations]
purchases = list(
Receipt.objects.using(database).filter(counter__event=event, status=Receipt.FINISHED, type=Receipt.TYPE_PURCHASE)
.order_by("total")
.values_list("total", flat=True)
)
purchases = [float(e) for e in purchases]
general["purchases"] = len(purchases)
return render(request, "kirppu/general_stats.html", {
"event": original_event,
"compensations": _float_array(compensations),
"purchases": _float_array(purchases),
"general": general,
"CURRENCY": settings.KIRPPU_CURRENCY["raw"],
})
def vendor_view(request, event_slug):
"""
Render main view for vendors.
:rtype: HttpResponse
"""
event = get_object_or_404(Event, slug=event_slug)
user = request.user
source_event = event.get_real_event()
vendor_data = get_multi_vendor_values(request, event)
if user.is_authenticated:
database = source_event.get_real_database_alias()
vendor = vendor_data["current_vendor"]
items = Item.objects.using(database).filter(vendor=vendor, hidden=False, box__isnull=True)
boxes = Box.objects.using(database).filter(item__vendor=vendor, item__hidden=False).distinct()
boxed_items = Item.objects.using(database).filter(vendor=vendor, hidden=False, box__isnull=False)
else:
vendor = None
items = []
boxes = []
boxed_items = Item.objects.none()
box_info = boxed_items.aggregate(sum=models.Sum("price"), count=models.Count("id"))
context = {
'event': event,
'source_event': source_event,
'user': user,
'items': items,
'total_price': sum(i.price for i in items),
'num_total': len(items),
'num_printed': len(list(filter(lambda i: i.printed, items))),
'boxes': boxes,
'boxes_count': len(boxes),
'boxes_total_price': box_info["sum"],
'boxes_item_count': box_info["count"],
'boxes_printed': len(list(filter(lambda i: i.is_printed(), boxes))),
'profile_url': settings.PROFILE_URL,
'menu': _vendor_menu_contents(request, event),
'CURRENCY': settings.KIRPPU_CURRENCY,
}
context.update(vendor_data)
return render(request, "kirppu/app_frontpage.html", context)
@login_required
@require_http_methods(["POST"])
def accept_terms(request, event_slug):
event = get_object_or_404(Event, slug=event_slug)
event.require_default_db()
vendor = Vendor.get_or_create_vendor(request, event)
if vendor.terms_accepted is None:
vendor.terms_accepted = timezone.now()
vendor.save(update_fields=("terms_accepted",))
result = timezone.template_localtime(vendor.terms_accepted)
result = localize(result)
return HttpResponse(json.dumps({
"result": "ok",
"time": result,
}), "application/json")
@login_required
def remove_item_from_receipt(request, event_slug):
event = get_object_or_404(Event, slug=event_slug)
if not request.user.is_staff:
raise PermissionDenied()
form = get_form(ItemRemoveForm, request, event=event)
if request.method == "POST" and form.is_valid():
try:
removal = _remove_item_from_receipt(request, form.cleaned_data["code"], form.cleaned_data["receipt"])
except (ValueError, AssertionError) as e:
form.add_error(None, e.args[0])
else:
messages.add_message(request, messages.INFO, "Item {0} removed from {1}".format(
form.cleaned_data["code"], removal.receipt
))
return HttpResponseRedirect(url.reverse('kirppu:remove_item_from_receipt',
kwargs={"event_slug": event.slug}))
return render(request, "kirppu/app_item_receipt_remove.html", {
'form': form,
})
@login_required
def lost_and_found_list(request, event_slug):
event = Event.objects.get(slug=event_slug)
event.require_default_db()
if not EventPermission.get(event, request.user).can_see_accounting:
raise PermissionDenied
items = Item.objects \
.select_related("vendor") \
.filter(vendor__event=event, lost_property=True, abandoned=False) \
.order_by("vendor", "name")
vendor_object = namedtuple("VendorItems", "vendor vendor_id items")
vendor_list = {}
for item in items:
vendor_id = item.vendor_id
if vendor_id not in vendor_list:
vendor_list[vendor_id] = vendor_object(item.vendor.user, item.vendor_id, [])
vendor_list[vendor_id].items.append(item)
return render(request, "kirppu/lost_and_found.html", {
'menu': _vendor_menu_contents(request, event),
'event': event,
'items': vendor_list,
})
def kirppu_csrf_failure(request, reason=""):
if request.META.get("HTTP_ACCEPT", "") in ("text/json", "application/json"):
# TODO: Unify the response to match requested content type.
return HttpResponseForbidden(
_("CSRF verification failed. Request aborted."),
content_type="text/plain; charset=utf-8",
)
else:
return django_csrf_failure(request, reason=reason)
|
jlaunonen/kirppu
|
kirppu/views/monolithic.py
|
Python
|
mit
| 35,876
|
import logging
from autotest.client.shared import error
from virttest import aexpect, utils_misc
@error.context_aware
def run_autotest_regression(test, params, env):
"""
Autotest regression test:
Use Virtual Machines to test autotest.
1) Clone the given guest OS (only Linux) image twice.
2) Boot 2 VMs (autotest_server_vm and autotest_client_vm)
4) Install the autotest server in the server vm
5) Run the unittests
6) Run the pylint checker
7) Run a simple client sleeptest
8) Run a simple server sleeptest
9) Register the client vm in the autotest server
10) Schedule a simple job sleeptest in the client. Wait for client reboot.
11) If any of these steps have failed, fail the test and report the error
@param test: virt test object
@param params: Dictionary with the test parameters
@param env: Dictionary with test environment.
"""
step_failures = []
autotest_repo = params['autotest_repo']
autotest_branch = params['autotest_branch']
autotest_commit = params['autotest_commit']
password = params['password']
autotest_install_timeout = int(params.get('autotest_install_timeout', 1800))
unittests_run_timeout = int(params.get('unittests_run_timeout', 1800))
pylint_run_timeout = int(params.get('pylint_run_timeout', 1800))
vm_names = params["vms"].split()
server_name = vm_names[0]
client_name = vm_names[1]
vm_server = env.get_vm(server_name)
vm_server.verify_alive()
vm_client = env.get_vm(client_name)
vm_client.verify_alive()
timeout = float(params.get("login_timeout", 240))
session_server = vm_server.wait_for_login(timeout=timeout)
session_client = vm_client.wait_for_login(timeout=timeout)
client_ip = vm_client.get_address()
server_ip = vm_server.get_address()
step1 = "autotest-server-install"
try:
installer_file = "install-autotest-server.sh"
installer_url = ("https://raw.github.com/autotest/autotest/master"
"/contrib/%s" % installer_file)
# Download the install script and execute it
download_cmd = ("python -c 'from urllib2 import urlopen; "
"r = urlopen(\"%s\"); "
"f = open(\"%s\", \"w\"); "
"f.write(r.read())'" % (installer_url,
installer_file))
session_server.cmd(download_cmd)
permission_cmd = ("chmod +x install-autotest-server.sh")
session_server.cmd(permission_cmd)
install_cmd = ("./install-autotest-server.sh -u Aut0t3st -d Aut0t3st "
"-g %s -b %s" % (autotest_repo, autotest_branch))
if autotest_commit:
install_cmd += " -c %s" % autotest_commit
session_server.cmd(install_cmd, timeout=autotest_install_timeout)
vm_server.copy_files_from(guest_path="/tmp/install-autotest-server*log",
host_path=test.resultsdir)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step1)
top_commit = None
try:
session_server.cmd("test -d /usr/local/autotest/.git")
session_server.cmd("cd /usr/local/autotest")
top_commit = session_server.cmd("echo `git log -n 1 --pretty=format:%H`")
top_commit = top_commit.strip()
logging.info("Autotest top commit for repo %s, branch %s: %s",
autotest_repo, autotest_branch, top_commit)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
if top_commit is not None:
session_server.close()
session_server = vm_server.wait_for_login(timeout=timeout,
username='autotest',
password='Aut0t3st')
step2 = "unittests"
try:
session_server.cmd("cd /usr/local/autotest")
session_server.cmd("utils/unittest_suite.py --full",
timeout=unittests_run_timeout)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step2)
step3 = "pylint"
try:
session_server.cmd("cd /usr/local/autotest")
session_server.cmd("utils/check_patch.py --full --yes",
timeout=pylint_run_timeout)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step3)
step4 = "client_run"
try:
session_server.cmd("cd /usr/local/autotest/client")
session_server.cmd("./autotest-local run sleeptest",
timeout=pylint_run_timeout)
session_server.cmd("rm -rf results/default")
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step4)
step5 = "server_run"
try:
session_client.cmd("iptables -F")
session_server.cmd("cd /usr/local/autotest")
session_server.cmd("server/autotest-remote -m %s --ssh-user root "
"--ssh-pass %s "
"-c client/tests/sleeptest/control" %
(client_ip, password),
timeout=pylint_run_timeout)
session_server.cmd("rm -rf results-*")
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step5)
step6 = "registering_client_cli"
try:
label_name = "label-%s" % utils_misc.generate_random_id()
create_label_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"label create -t %s -w %s" %
(label_name, server_ip))
session_server.cmd(create_label_cmd)
list_labels_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"label list -a -w %s" % server_ip)
list_labels_output = session_server.cmd(list_labels_cmd)
for line in list_labels_output.splitlines():
logging.debug(line)
if not label_name in list_labels_output:
raise ValueError("No label %s in the output of %s" %
(label_name, list_labels_cmd))
create_host_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"host create -t %s %s -w %s" %
(label_name, client_ip, server_ip))
session_server.cmd(create_host_cmd)
list_hosts_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"host list -w %s" % server_ip)
list_hosts_output = session_server.cmd(list_hosts_cmd)
for line in list_hosts_output.splitlines():
logging.debug(line)
if not client_ip in list_hosts_output:
raise ValueError("No client %s in the output of %s" %
(client_ip, create_label_cmd))
if not label_name in list_hosts_output:
raise ValueError("No label %s in the output of %s" %
(label_name, create_label_cmd))
except (aexpect.ShellCmdError, ValueError), e:
if isinstance(e, aexpect.ShellCmdError):
for line in e.output.splitlines():
logging.error(line)
elif isinstance(e, ValueError):
logging.error(e)
step_failures.append(step6)
step7 = "running_job_cli"
try:
session_client.cmd("iptables -F")
job_name = "Sleeptest %s" % utils_misc.generate_random_id()
def job_is_status(status):
list_jobs_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"job list -a -w %s" % server_ip)
list_jobs_output = session_server.cmd(list_jobs_cmd)
if job_name in list_jobs_output:
if status in list_jobs_output:
return True
elif "Aborted" in list_jobs_output:
raise ValueError("Job is in aborted state")
elif "Failed" in list_jobs_output:
raise ValueError("Job is in failed state")
else:
return False
else:
raise ValueError("Job %s does not show in the "
"output of %s" % list_jobs_cmd)
def job_is_completed():
return job_is_status("Completed")
def job_is_running():
return job_is_status("Running")
job_create_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"job create --test sleeptest -m %s '%s' -w %s" %
(client_ip, job_name, server_ip))
session_server.cmd(job_create_cmd)
if not utils_misc.wait_for(job_is_running, 300, 0, 10,
"Waiting for job to start running"):
raise ValueError("Job did not start running")
# Wait for the session to become unresponsive
if not utils_misc.wait_for(lambda: not session_client.is_responsive(),
timeout=300):
raise error.ValueError("Client machine did not reboot")
# Establish a new client session
session_client = vm_client.wait_for_login(timeout=timeout)
# Wait for the job to complete
if not utils_misc.wait_for(job_is_completed, 300, 0, 10,
"Waiting for job to complete"):
raise ValueError("Job did not complete")
# Copy logs back so we can analyze them
vm_server.copy_files_from(guest_path="/usr/local/autotest/results/*",
host_path=test.resultsdir)
except (aexpect.ShellCmdError, ValueError), e:
if isinstance(e, aexpect.ShellCmdError):
for line in e.output.splitlines():
logging.error(line)
elif isinstance(e, ValueError):
logging.error(e)
step_failures.append(step7)
def report_version():
if top_commit is not None:
logging.info("Autotest git repo: %s", autotest_repo)
logging.info("Autotest git branch: %s", autotest_repo)
logging.info("Autotest top commit: %s", top_commit)
if step_failures:
logging.error("The autotest regression testing failed")
report_version()
raise error.TestFail("The autotest regression testing had the "
"following steps failed: %s" % step_failures)
else:
logging.info("The autotest regression testing passed")
report_version()
|
sathnaga/virt-test
|
tests/autotest_regression.py
|
Python
|
gpl-2.0
| 11,381
|
# The Hazard Library
# Copyright (C) 2012-2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openquake.hazardlib.mfd import TruncatedGRMFD
from openquake.hazardlib.tests.mfd.base_test import BaseMFDTestCase
class TruncatedGRMFDConstraintsTestCase(BaseMFDTestCase):
def test_negative_min_mag(self):
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=-1, max_mag=2, bin_width=0.4, a_val=1, b_val=2
)
self.assertEqual(exc.message, 'minimum magnitude must be non-negative')
def test_min_mag_higher_than_max_mag(self):
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=2.4, max_mag=2, bin_width=0.4, a_val=1, b_val=0.2
)
error = 'maximum magnitude must be higher than minimum magnitude ' \
'by bin width at least'
self.assertEqual(exc.message, error)
def test_negative_bin_width(self):
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=1, max_mag=2, bin_width=-0.4, a_val=1, b_val=0.2
)
self.assertEqual(exc.message, 'bin width must be positive')
def test_non_positive_b_val(self):
error = 'b value must be non-negative'
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=1, max_mag=2, bin_width=0.4, a_val=1, b_val=-2
)
self.assertEqual(exc.message, error)
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=1, max_mag=2, bin_width=0.4, a_val=1, b_val=0
)
self.assertEqual(exc.message, error)
def test_equal_min_mag_and_max_mag(self):
error = 'maximum magnitude must be higher than minimum magnitude ' \
'by bin width at least'
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=6.5, max_mag=6.5, bin_width=0.1, a_val=0.5, b_val=1.0
)
self.assertEqual(exc.message, error)
exc = self.assert_mfd_error(
TruncatedGRMFD,
min_mag=6.7, max_mag=7.3, bin_width=1.0, a_val=0.5, b_val=1.0
)
self.assertEqual(exc.message, error)
class TruncatedGRMFDMFDGetRatesTestCase(BaseMFDTestCase):
def _test(self, expected_rates, rate_tolerance, **kwargs):
mfd = TruncatedGRMFD(**kwargs)
actual_rates = mfd.get_annual_occurrence_rates()
self.assertEqual(len(actual_rates), len(expected_rates))
for i, (mag, rate) in enumerate(actual_rates):
expected_mag, expected_rate = expected_rates[i]
self.assertAlmostEqual(mag, expected_mag, delta=1e-14)
self.assertAlmostEqual(rate, expected_rate, delta=rate_tolerance)
if i == 0:
self.assertEqual((mag, mag + 2), mfd.get_min_max_mag())
def test_1_different_min_mag_and_max_mag(self):
expected_rates = [
(5.5, 2.846049894e-5),
(6.5, 2.846049894e-6),
(7.5, 2.846049894e-7),
]
self._test(expected_rates=expected_rates, rate_tolerance=1e-14,
min_mag=5.0, max_mag=8.0, bin_width=1.0,
a_val=0.5, b_val=1.0)
def test_2_different_min_mag_and_max_mag(self):
expected_rates = [
(5.5, 2.846049894e-5),
(6.5, 2.846049894e-6),
(7.5, 2.846049894e-7),
]
self._test(expected_rates=expected_rates, rate_tolerance=1e-14,
min_mag=5.1, max_mag=7.9, bin_width=1.0,
a_val=0.5, b_val=1.0)
class TruncatedGRMFDMFDRoundingTestCase(BaseMFDTestCase):
def test(self):
mfd = TruncatedGRMFD(min_mag=0.61, max_mag=0.94, bin_width=0.1,
a_val=1, b_val=0.2)
# mag values should be rounded to 0.6 and 0.9 and there
# should be three bins with the first having center at 0.65
min_mag, num_bins = mfd._get_min_mag_and_num_bins()
self.assertAlmostEqual(min_mag, 0.65)
self.assertEqual(mfd.get_min_max_mag(), (min_mag, min_mag + 0.2))
self.assertEqual(num_bins, 3)
class TruncatedGRMFDModificationsTestCase(BaseMFDTestCase):
def test_get_total_moment_rate(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=8.0, bin_width=0.1,
a_val=-17.2, b_val=0.4)
self.assertAlmostEqual(mfd._get_total_moment_rate(), 1.6140553)
def test_get_total_moment_rate_when_b_equal_to_1_5(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=8.0, bin_width=0.1,
a_val=-9.4, b_val=1.5)
self.assertAlmostEqual(mfd._get_total_moment_rate(), 1.3400508)
def test_set_a(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=8.0, bin_width=0.1,
a_val=1.5, b_val=0.5)
mfd._set_a(123.45)
self.assertAlmostEqual(mfd.a_val, -14.6531141)
def test_set_a_when_b_equal_to_1_5(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=8.0, bin_width=0.1,
a_val=1.5, b_val=1.5)
mfd._set_a(12.45)
self.assertAlmostEqual(mfd.a_val, -8.4319519)
def test_set_a_and_get_total_moment_rate(self):
mfd = TruncatedGRMFD(min_mag=3.0, max_mag=4.0, bin_width=0.1,
a_val=4.4, b_val=0.5)
tmr = mfd._get_total_moment_rate()
mfd._set_a(tmr)
self.assertAlmostEqual(mfd.a_val, 4.4)
self.assertEqual(mfd._get_total_moment_rate(), tmr)
def test_set_a_and_get_total_moment_rate_when_b_equal_to_1_5(self):
mfd = TruncatedGRMFD(min_mag=2.4, max_mag=5.6, bin_width=0.4,
a_val=-0.44, b_val=1.5)
tmr = mfd._get_total_moment_rate()
mfd._set_a(tmr)
self.assertAlmostEqual(mfd.a_val, -0.44)
self.assertEqual(mfd._get_total_moment_rate(), tmr)
def test_increment_max_mag(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=7.0, bin_width=0.1,
a_val=-18.2, b_val=0.41)
old_tmr = mfd._get_total_moment_rate()
mfd.modify('increment_max_mag', {'value': 1})
self.assertEqual(mfd.max_mag, 8.0)
self.assertEqual(mfd.b_val, 0.41)
self.assertEqual(mfd.min_mag, 6.0)
self.assertAlmostEqual(mfd._get_total_moment_rate(), old_tmr)
mfd.modify('increment_max_mag', {'value': -1})
self.assertAlmostEqual(mfd._get_total_moment_rate(), old_tmr)
self.assertEqual(mfd.max_mag, 7.0)
self.assertAlmostEqual(mfd.a_val, -18.2)
def test_increment_max_mag_check_constraints(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=7.0, bin_width=0.1,
a_val=1, b_val=1)
self.assert_mfd_error(mfd.modify, 'increment_max_mag', {'value': -1})
def test_set_max_mag(self):
mfd = TruncatedGRMFD(min_mag=3.5, max_mag=5.5, bin_width=0.5,
a_val=1, b_val=1.3)
mfd.modify('set_max_mag', {'value': 4.2})
self.assertEqual(mfd.max_mag, 4.2)
self.assertEqual(mfd.a_val, 1)
self.assertEqual(mfd.b_val, 1.3)
self.assertEqual(mfd.min_mag, 3.5)
def test_set_max_mag_check_constraints(self):
mfd = TruncatedGRMFD(min_mag=3.5, max_mag=5.5, bin_width=0.5,
a_val=1, b_val=1.3)
self.assert_mfd_error(mfd.modify, 'set_max_mag', {'value': 3.6})
def test_increment_b(self):
mfd = TruncatedGRMFD(min_mag=4.2, max_mag=6.6, bin_width=0.2,
a_val=-20.5, b_val=0.51)
old_tmr = mfd._get_total_moment_rate()
mfd.modify('increment_b', {'value': 1.46})
self.assertEqual(mfd.max_mag, 6.6)
self.assertEqual(mfd.b_val, 0.51 + 1.46)
self.assertEqual(mfd.min_mag, 4.2)
self.assertAlmostEqual(mfd._get_total_moment_rate(), old_tmr)
mfd.modify('increment_b', {'value': -1.46})
self.assertAlmostEqual(mfd._get_total_moment_rate(), old_tmr)
self.assertEqual(mfd.b_val, 0.51)
self.assertAlmostEqual(mfd.a_val, -20.5)
def test_increment_b_check_constraints(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=7.0, bin_width=0.1,
a_val=1, b_val=1)
self.assert_mfd_error(mfd.modify, 'increment_b', {'value': -1})
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=7.0, bin_width=0.1,
a_val=1, b_val=1)
self.assert_mfd_error(mfd.modify, 'increment_b', {'value': -2})
def test_set_ab(self):
mfd = TruncatedGRMFD(min_mag=2.5, max_mag=3.5, bin_width=0.25,
a_val=1, b_val=1.3)
mfd.modify('set_ab', {'a_val': -4.2, 'b_val': 1.45})
self.assertEqual(mfd.max_mag, 3.5)
self.assertEqual(mfd.a_val, -4.2)
self.assertEqual(mfd.b_val, 1.45)
self.assertEqual(mfd.min_mag, 2.5)
def test_set_ab_check_constraints(self):
mfd = TruncatedGRMFD(min_mag=6.0, max_mag=7.0, bin_width=0.1,
a_val=1, b_val=1)
self.assert_mfd_error(mfd.modify, 'set_ab', {'a_val': 0, 'b_val': 0})
|
larsbutler/oq-hazardlib
|
openquake/hazardlib/tests/mfd/truncated_gr_test.py
|
Python
|
agpl-3.0
| 9,724
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Service configuration for remote API.
This module is shared by both the remote_api_stub and the handler.
"""
import sys
from google.appengine.api import api_base_pb
from google.appengine.api import mail_service_pb
from google.appengine.api import urlfetch_service_pb
from google.appengine.api import user_service_pb
from google.appengine.api.app_identity import app_identity_service_pb
from google.appengine.api.blobstore import blobstore_service_pb
from google.appengine.api.capabilities import capability_service_pb
from google.appengine.api.channel import channel_service_pb
from google.appengine.api.files import file_service_pb
from google.appengine.api.images import images_service_pb
from google.appengine.api.logservice import log_service_pb
from google.appengine.api.memcache import memcache_service_pb
from google.appengine.api.modules import modules_service_pb
from google.appengine.api.prospective_search import prospective_search_pb
from google.appengine.api.remote_socket import remote_socket_service_pb
from google.appengine.api.search import search_service_pb
from google.appengine.api.system import system_service_pb
from google.appengine.api.taskqueue import taskqueue_service_pb
from google.appengine.api.xmpp import xmpp_service_pb
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_v4_pb
from google.appengine.ext.remote_api import remote_api_pb
SERVICE_PB_MAP = {
'app_identity_service': {
'SignForApp': (app_identity_service_pb.SignForAppRequest,
app_identity_service_pb.SignForAppResponse),
'GetPublicCertificatesForApp': (
app_identity_service_pb.GetPublicCertificateForAppRequest,
app_identity_service_pb.GetPublicCertificateForAppResponse),
'GetServiceAccountName': (
app_identity_service_pb.GetServiceAccountNameRequest,
app_identity_service_pb.GetServiceAccountNameResponse),
'GetDefaultGcsBucketName': (
app_identity_service_pb.GetDefaultGcsBucketNameRequest,
app_identity_service_pb.GetDefaultGcsBucketNameResponse),
'GetAccessToken': (app_identity_service_pb.GetAccessTokenRequest,
app_identity_service_pb.GetAccessTokenResponse),
},
'blobstore': {
'CreateUploadURL': (blobstore_service_pb.CreateUploadURLRequest,
blobstore_service_pb.CreateUploadURLResponse),
'DeleteBlob': (blobstore_service_pb.DeleteBlobRequest,
api_base_pb.VoidProto),
'FetchData': (blobstore_service_pb.FetchDataRequest,
blobstore_service_pb.FetchDataResponse),
'DecodeBlobKey': (blobstore_service_pb.DecodeBlobKeyRequest,
blobstore_service_pb.DecodeBlobKeyResponse),
'CreateEncodedGoogleStorageKey':
(blobstore_service_pb.CreateEncodedGoogleStorageKeyRequest,
blobstore_service_pb.CreateEncodedGoogleStorageKeyResponse),
},
'capability_service': {
'IsEnabled': (capability_service_pb.IsEnabledRequest,
capability_service_pb.IsEnabledResponse),
},
'channel': {
'CreateChannel': (channel_service_pb.CreateChannelRequest,
channel_service_pb.CreateChannelResponse),
'SendChannelMessage': (channel_service_pb.SendMessageRequest,
api_base_pb.VoidProto),
},
'datastore_v3': {
'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse),
'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse),
'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
'AllocateIds':(datastore_pb.AllocateIdsRequest,
datastore_pb.AllocateIdsResponse),
'RunQuery': (datastore_pb.Query,
datastore_pb.QueryResult),
'Next': (datastore_pb.NextRequest, datastore_pb.QueryResult),
'BeginTransaction':(datastore_pb.BeginTransactionRequest,
datastore_pb.Transaction),
'Commit': (datastore_pb.Transaction,
datastore_pb.CommitResponse),
'Rollback': (datastore_pb.Transaction,
api_base_pb.VoidProto),
'GetIndices': (api_base_pb.StringProto,
datastore_pb.CompositeIndices),
},
'datastore_v4': {
'AllocateIds': (datastore_v4_pb.AllocateIdsRequest,
datastore_v4_pb.AllocateIdsResponse),
},
'file': {
'Create': (file_service_pb.CreateRequest,
file_service_pb.CreateResponse),
'Open': (file_service_pb.OpenRequest,
file_service_pb.OpenResponse),
'Close': (file_service_pb.CloseRequest,
file_service_pb.CloseResponse),
'Append': (file_service_pb.AppendRequest,
file_service_pb.AppendResponse),
'Stat': (file_service_pb.StatRequest,
file_service_pb.StatResponse),
'Delete': (file_service_pb.DeleteRequest,
file_service_pb.DeleteResponse),
'Read': (file_service_pb.ReadRequest,
file_service_pb.ReadResponse),
'ReadKeyValue': (file_service_pb.ReadKeyValueRequest,
file_service_pb.ReadKeyValueResponse),
'Shuffle': (file_service_pb.ShuffleRequest,
file_service_pb.ShuffleResponse),
'GetShuffleStatus': (file_service_pb.GetShuffleStatusRequest,
file_service_pb.GetShuffleStatusResponse),
'GetCapabilities': (file_service_pb.GetCapabilitiesRequest,
file_service_pb.GetCapabilitiesResponse),
'GetDefaultGsBucketName':
(file_service_pb.GetDefaultGsBucketNameRequest,
file_service_pb.GetDefaultGsBucketNameResponse),
'ListDir': (file_service_pb.ListDirRequest,
file_service_pb.ListDirResponse),
},
'images': {
'Transform': (images_service_pb.ImagesTransformRequest,
images_service_pb.ImagesTransformResponse),
'Composite': (images_service_pb.ImagesCompositeRequest,
images_service_pb.ImagesCompositeResponse),
'Histogram': (images_service_pb.ImagesHistogramRequest,
images_service_pb.ImagesHistogramResponse),
'GetUrlBase': (images_service_pb.ImagesGetUrlBaseRequest,
images_service_pb.ImagesGetUrlBaseResponse),
'DeleteUrlBase': (images_service_pb.ImagesDeleteUrlBaseRequest,
images_service_pb.ImagesDeleteUrlBaseResponse),
},
'logservice': {
'Flush': (log_service_pb.FlushRequest, api_base_pb.VoidProto),
'SetStatus': (log_service_pb.SetStatusRequest, api_base_pb.VoidProto),
'Read': (log_service_pb.LogReadRequest, log_service_pb.LogReadResponse),
},
'mail': {
'Send': (mail_service_pb.MailMessage, api_base_pb.VoidProto),
'SendToAdmins': (mail_service_pb.MailMessage, api_base_pb.VoidProto),
},
'matcher': {
'Subscribe': (prospective_search_pb.SubscribeRequest,
prospective_search_pb.SubscribeResponse),
'Unsubscribe': (prospective_search_pb.UnsubscribeRequest,
prospective_search_pb.UnsubscribeResponse),
'ListSubscriptions': (prospective_search_pb.ListSubscriptionsRequest,
prospective_search_pb.ListSubscriptionsResponse),
'ListTopics': (prospective_search_pb.ListTopicsRequest,
prospective_search_pb.ListTopicsResponse),
'Match': (prospective_search_pb.MatchRequest,
prospective_search_pb.MatchResponse),
},
'memcache': {
'Get': (memcache_service_pb.MemcacheGetRequest,
memcache_service_pb.MemcacheGetResponse),
'Set': (memcache_service_pb.MemcacheSetRequest,
memcache_service_pb.MemcacheSetResponse),
'Delete': (memcache_service_pb.MemcacheDeleteRequest,
memcache_service_pb.MemcacheDeleteResponse),
'Increment': (memcache_service_pb.MemcacheIncrementRequest,
memcache_service_pb.MemcacheIncrementResponse),
'BatchIncrement': (memcache_service_pb.MemcacheBatchIncrementRequest,
memcache_service_pb.MemcacheBatchIncrementResponse),
'FlushAll': (memcache_service_pb.MemcacheFlushRequest,
memcache_service_pb.MemcacheFlushResponse),
'Stats': (memcache_service_pb.MemcacheStatsRequest,
memcache_service_pb.MemcacheStatsResponse),
},
'remote_datastore': {
'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
'TransactionQuery': (datastore_pb.Query,
remote_api_pb.TransactionQueryResult),
'Transaction': (remote_api_pb.TransactionRequest,
datastore_pb.PutResponse),
'GetIDs': (datastore_pb.PutRequest, datastore_pb.PutResponse),
'GetIDsXG': (datastore_pb.PutRequest, datastore_pb.PutResponse),
},
'remote_socket': {
'CreateSocket': (remote_socket_service_pb.CreateSocketRequest,
remote_socket_service_pb.CreateSocketReply),
'Bind': (remote_socket_service_pb.BindRequest,
remote_socket_service_pb.BindReply),
'GetSocketName': (remote_socket_service_pb.GetSocketNameRequest,
remote_socket_service_pb.GetSocketNameReply),
'GetPeerName': (remote_socket_service_pb.GetPeerNameRequest,
remote_socket_service_pb.GetPeerNameReply),
'SetSocketOptions': (remote_socket_service_pb.SetSocketOptionsRequest,
remote_socket_service_pb.SetSocketOptionsReply),
'GetSocketOptions': (remote_socket_service_pb.GetSocketOptionsRequest,
remote_socket_service_pb.GetSocketOptionsReply),
'Connect': (remote_socket_service_pb.ConnectRequest,
remote_socket_service_pb.ConnectReply),
'Listen': (remote_socket_service_pb.ListenRequest,
remote_socket_service_pb.ListenReply),
'Accept': (remote_socket_service_pb.AcceptRequest,
remote_socket_service_pb.AcceptReply),
'ShutDown': (remote_socket_service_pb.ShutDownRequest,
remote_socket_service_pb.ShutDownReply),
'Close': (remote_socket_service_pb.CloseRequest,
remote_socket_service_pb.CloseReply),
'Send': (remote_socket_service_pb.SendRequest,
remote_socket_service_pb.SendReply),
'Receive': (remote_socket_service_pb.ReceiveRequest,
remote_socket_service_pb.ReceiveReply),
'Poll': (remote_socket_service_pb.PollRequest,
remote_socket_service_pb.PollReply),
'Resolve': (remote_socket_service_pb.ResolveRequest,
remote_socket_service_pb.ResolveReply),
},
'search': {
'IndexDocument': (search_service_pb.IndexDocumentRequest,
search_service_pb.IndexDocumentResponse),
'DeleteDocument': (search_service_pb.DeleteDocumentRequest,
search_service_pb.DeleteDocumentResponse),
'ListDocuments': (search_service_pb.ListDocumentsRequest,
search_service_pb.ListDocumentsResponse),
'ListIndexes': (search_service_pb.ListIndexesRequest,
search_service_pb.ListIndexesResponse),
'Search': (search_service_pb.SearchRequest,
search_service_pb.SearchResponse),
},
'modules': {
'GetModules': (modules_service_pb.GetModulesRequest,
modules_service_pb.GetModulesResponse),
'GetVersions': (modules_service_pb.GetVersionsRequest,
modules_service_pb.GetVersionsResponse),
'GetDefaultVersion': (modules_service_pb.GetDefaultVersionRequest,
modules_service_pb.GetDefaultVersionResponse),
'GetNumInstances': (modules_service_pb.GetNumInstancesRequest,
modules_service_pb.GetNumInstancesResponse),
'SetNumInstances': (modules_service_pb.SetNumInstancesRequest,
modules_service_pb.SetNumInstancesResponse),
'StartModule': (modules_service_pb.StartModuleRequest,
modules_service_pb.StartModuleResponse),
'StopModule': (modules_service_pb.StopModuleRequest,
modules_service_pb.StopModuleResponse),
'GetHostname': (modules_service_pb.GetHostnameRequest,
modules_service_pb.GetHostnameResponse),
},
'system': {
'GetSystemStats': (system_service_pb.GetSystemStatsRequest,
system_service_pb.GetSystemStatsResponse),
'StartBackgroundRequest': (
system_service_pb.StartBackgroundRequestRequest,
system_service_pb.StartBackgroundRequestResponse),
},
'taskqueue': {
'Add': (taskqueue_service_pb.TaskQueueAddRequest,
taskqueue_service_pb.TaskQueueAddResponse),
'BulkAdd': (taskqueue_service_pb.TaskQueueBulkAddRequest,
taskqueue_service_pb.TaskQueueBulkAddResponse),
'FetchQueues': (taskqueue_service_pb.TaskQueueFetchQueuesRequest,
taskqueue_service_pb.TaskQueueFetchQueuesResponse),
'FetchQueueStats': (
taskqueue_service_pb.TaskQueueFetchQueueStatsRequest,
taskqueue_service_pb.TaskQueueFetchQueueStatsResponse),
'Delete': (taskqueue_service_pb.TaskQueueDeleteRequest,
taskqueue_service_pb.TaskQueueDeleteResponse),
'ForceRun': (taskqueue_service_pb.TaskQueueForceRunRequest,
taskqueue_service_pb.TaskQueueForceRunResponse),
'UpdateQueue': (taskqueue_service_pb.TaskQueueUpdateQueueRequest,
taskqueue_service_pb.TaskQueueUpdateQueueResponse),
'PauseQueue': (taskqueue_service_pb.TaskQueuePauseQueueRequest,
taskqueue_service_pb.TaskQueuePauseQueueResponse),
'PurgeQueue': (taskqueue_service_pb.TaskQueuePurgeQueueRequest,
taskqueue_service_pb.TaskQueuePurgeQueueResponse),
'DeleteQueue': (taskqueue_service_pb.TaskQueueDeleteQueueRequest,
taskqueue_service_pb.TaskQueueDeleteQueueResponse),
'DeleteGroup': (taskqueue_service_pb.TaskQueueDeleteGroupRequest,
taskqueue_service_pb.TaskQueueDeleteGroupResponse),
'QueryTasks': (taskqueue_service_pb.TaskQueueQueryTasksRequest,
taskqueue_service_pb.TaskQueueQueryTasksResponse),
'FetchTask': (taskqueue_service_pb.TaskQueueFetchTaskRequest,
taskqueue_service_pb.TaskQueueFetchTaskResponse),
'QueryAndOwnTasks': (
taskqueue_service_pb.TaskQueueQueryAndOwnTasksRequest,
taskqueue_service_pb.TaskQueueQueryAndOwnTasksResponse),
'ModifyTaskLease': (
taskqueue_service_pb.TaskQueueModifyTaskLeaseRequest,
taskqueue_service_pb.TaskQueueModifyTaskLeaseResponse),
'UpdateStorageLimit': (
taskqueue_service_pb.TaskQueueUpdateStorageLimitRequest,
taskqueue_service_pb.TaskQueueUpdateStorageLimitResponse),
},
'urlfetch': {
'Fetch': (urlfetch_service_pb.URLFetchRequest,
urlfetch_service_pb.URLFetchResponse),
},
'user': {
'CreateLoginURL': (user_service_pb.CreateLoginURLRequest,
user_service_pb.CreateLoginURLResponse),
'CreateLogoutURL': (user_service_pb.CreateLogoutURLRequest,
user_service_pb.CreateLogoutURLResponse),
'GetOAuthUser': (user_service_pb.GetOAuthUserRequest,
user_service_pb.GetOAuthUserResponse),
'CheckOAuthSignature': (user_service_pb.CheckOAuthSignatureRequest,
user_service_pb.CheckOAuthSignatureResponse),
},
'xmpp': {
'GetPresence': (xmpp_service_pb.PresenceRequest,
xmpp_service_pb.PresenceResponse),
'BulkGetPresence': (xmpp_service_pb.BulkPresenceRequest,
xmpp_service_pb.BulkPresenceResponse),
'SendMessage': (xmpp_service_pb.XmppMessageRequest,
xmpp_service_pb.XmppMessageResponse),
'SendInvite': (xmpp_service_pb.XmppInviteRequest,
xmpp_service_pb.XmppInviteResponse),
'SendPresence': (xmpp_service_pb.XmppSendPresenceRequest,
xmpp_service_pb.XmppSendPresenceResponse),
'CreateChannel': (channel_service_pb.CreateChannelRequest,
channel_service_pb.CreateChannelResponse),
'SendChannelMessage': (channel_service_pb.SendMessageRequest,
api_base_pb.VoidProto),
},
}
|
Kazade/NeHe-Website
|
google_appengine/google/appengine/ext/remote_api/remote_api_services.py
|
Python
|
bsd-3-clause
| 17,948
|
from . import unittest
from shapely.geometry import LineString
class ProductZTestCase(unittest.TestCase):
def test_line_intersection(self):
line1 = LineString([(0, 0, 0), (1, 1, 1)])
line2 = LineString([(0, 1, 1), (1, 0, 0)])
interxn = line1.intersection(line2)
self.assertTrue(interxn.has_z)
self.assertEqual(interxn._ndim, 3)
self.assertTrue(0.0 <= interxn.z <= 1.0)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(ProductZTestCase)
|
jdmcbr/Shapely
|
tests/test_products_z.py
|
Python
|
bsd-3-clause
| 517
|
from cgitb import text
__author__ = 'rafa'
import pygtk
pygtk.require('2.0')
import gtk
import MySQLdb
class Handler():
def delete_event(self, widget,data=None):
print( "Call from Glade." )
# TIP:if you return 0 , destroy, but if you return 1, stop , not kill program
# TODO: Here create dialog ask if I want exit program
class Principal:
"""
Contiene toda la funcionalidad principal
"""
def __init__(self, oLogin=None, glade=None ):
self.db = oLogin.db
self.cSql = None
self.glade = glade
self.database = oLogin.database
#Create object gtk.Window , from load window the glade
self.window = self.glade.get_object('consultas')
# Example connect manual signal.
self.window.connect("destroy", self.destroy)
self.window.connect("key-press-event", self.on_key_press_event )
# From Glade, signal delete_event the window consultas, at class Handler.
# self.glade.connect_signals( Handler() )
self.glade.connect_signals( self )
self.status_bar()
self.status_setText( "Database in use:" + self.database )
self.view_lista = self.glade.get_object('treeview_consulta')
self.textview_sql = self.glade.get_object('textview_ordenes')
self.textview_sql.grab_focus()
self.view_tree = self.glade.get_object('treeview_tables') #ON ROW ACTIVATED Activa( path, TreeViewColumn, oTextView, oServer, oBar, oTreeView )
self.mount_treeview()
self.window.show()
def mount_treeview(self):
pbd_bd = gtk.gdk.pixbuf_new_from_file("./images/bd.png")
pbd_table = gtk.gdk.pixbuf_new_from_file("./images/table.png")
pbd_field = gtk.gdk.pixbuf_new_from_file("./images/field.png")
#Preguntamos por la BDs
sql = "Select schema_name From `INFORMATION_SCHEMA`.`SCHEMATA`"
cursor = self.db.cursor()
try:
cursor.execute(sql)
result_db = cursor.fetchall()
except MySQLdb.Error, e:
self.status_setText( "Error %d: %s" % (e.args[0], e.args[1]) )
return
self.treestore = gtk.TreeStore(gtk.gdk.Pixbuf, str)
for bd in result_db:
iter = self.treestore.append(None,[pbd_bd, bd[0]] )
cursor1 = self.db.cursor()
try:
cursor1.execute( "show tables from " + bd[0])
result_table = cursor1.fetchall()
except MySQLdb.Error, e:
self.status_setText( "Error %d: %s" % (e.args[0], e.args[1]) )
return
for table in result_table:
iterchild = self.treestore.append(iter,[pbd_table, table[0] ] )
cursor2 = self.db.cursor()
try:
cursor2.execute( "show columns from " + bd[0] + "." + table[0])
result_field = cursor2.fetchall()
except MySQLdb.Error, e:
self.status_setText( "Error %d: %s" % (e.args[0], e.args[1]) )
return
for field in result_field:
self.treestore.append(iterchild,[pbd_field, field[0] ] )
if cursor2 != None:
cursor2.close()
if cursor1 != None:
cursor1.close()
cursor.close()
#Create Columns from names fields
column = gtk.TreeViewColumn( "", gtk.CellRendererPixbuf(), pixbuf=0)
self.view_tree.append_column( column )
column = gtk.TreeViewColumn( "Database", gtk.CellRendererText(), text=1 )
self.view_tree.append_column( column )
self.view_tree.set_model( self.treestore )
def status_bar(self):
self.status_bar = self.glade.get_object( "statusbar")
self.context_id = self.status_bar.get_context_id("database")
def status_setText(self,cText):
self.status_bar.pop(self.context_id)
self.status_bar.push( self.context_id, cText)
def execute_sql(self, widget, data=None):
self.setQuery()
def setQuery(self):
elements = 0
self.status_setText( "Database in use:" + self.database )
textbuffer = self.textview_sql.get_buffer()
self.cSql = textbuffer.get_text(*textbuffer.get_bounds())
# remove columns the old view
nOld_Fields = self.getTotalColumns()
if nOld_Fields != 0:
for column in self.view_lista.get_columns():
self.view_lista.remove_column( column )
#Clear model data of view
oModel = self.view_lista.get_model()
if oModel != None:
oModel.clear()
self.view_lista.set_model()
#Execute Sql
cursor = self.db.cursor()
try:
cursor.execute(self.cSql)
result = cursor.fetchall()
except MySQLdb.Error, e:
self.status_setText( "Error %d: %s" % (e.args[0], e.args[1]) )
return
if len(result) == 0: # if you use command, example USE TABLE, clean text
textbuffer.set_text("")
cur = self.db.cursor()
cur.execute("SELECT DATABASE()")
self.database = cur.fetchone()[0]
self.status_setText( "Database in use:" + self.database )
cur.close()
cursor.close()
return
num_fields = len(cursor.description)
field_names = [i[0] for i in cursor.description] # Name of fields
#Create Columns from names fields
i = 0
for nombre in field_names:
self.AddListColumn(nombre, i)
i = i + 1
#Create model dinamic of types str for view
ListStore = gtk.ListStore(*([str] * num_fields))
for value in result:
ListStore.append(value)
self.view_lista.set_model(ListStore)
cursor.close()
def AddListColumn(self, title, columnId):
column = gtk.TreeViewColumn(title, gtk.CellRendererText(), text=columnId)
column.set_resizable(True)
column.set_sort_column_id(columnId)
self.view_lista.append_column( column )
def AddColumnPixbuf(self, title, columnId):
column = gtk.TreeViewColumn(title, gtk.CellRendererPixbuf(), pixbuf=columnId)
self.view_lista.append_column( column )
def AddColumnPixbufTree(self, title, columnId):
column = gtk.TreeViewColumn(title, gtk.CellRendererPixbuf(), pixbuf=columnId)
self.treestore.append_column( column )
def getTotalColumns(self):
return len(self.view_lista.get_columns())
def on_key_press_event(self, widget, event):
keyname = gtk.gdk.keyval_name(event.keyval)
if keyname == "F5":
self.setQuery()
return 1
def delete_event(self, widget,data=None):
messagedialog = gtk.MessageDialog(self.window, 0, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO)
messagedialog.set_markup("<b>%s</b>" % "Hi pythoniso!")
messagedialog.format_secondary_markup("Why Do you exit the program ? Why ??")
response = messagedialog.run()
messagedialog.destroy()
if response == gtk.RESPONSE_YES:
return 0
elif response == gtk.RESPONSE_NO:
return 1
# Salimos de la aplicacion
def destroy(self, widget, data=None):
print( "Salgo de Aqui")
gtk.main_quit()
def main(self):
gtk.main()
return 0
|
rafathefull/tinnydb
|
principal.py
|
Python
|
gpl-3.0
| 7,430
|
#!/usr/bin/python3
"""
File: QuTech_VSM_Module.py
Author: Jeroen Bergmans, TNO/QuTech
Purpose: Instrument driver for Qutech Vector Switch Matrix
Usage:
Notes: # General
The VSM consists of 32 qubit tuners:
8 modules (numbered from left to right)
4 channels per module (numbered from bottom to top)
# Temperature
Temperature of the tuners is regulated, and can be monitored via
the _temperature_ parameters.
# Markers
Markers control switching the channel state on or off either by the
external maker connector (source=external) or by software commands
(source=internal). Marker state has no effect when source is
external.
# Qubits
Qubit parameters characterise the channels. For a single channel
row (so all modules together) one can set the frequency, led color,
and displayed description on the VSM display.
The frequency also determines the attenuation and phase calibration
of the channels, see below.
Each qubit output (a module, channel pair) can also be switched on
or off. In the off state, the led is off, marker state is off,
and attenuation is maximal.
# Calibration
Each qubit output (a module, channel pair) has two inputs:
the _gaussian_ pulse and the _derivative_ pulse. For each pulse the
attenuation and phase can be controlled via a 16bit DAC.
Setting raw attenuation and phase DAC values does not
control the channel attenuation and phase linearly, so in addition
to the RAW parameters a calibration table has to be used to find
DAC values for given attenuation and phase.
Bugs: Probably.
"""
from .SCPI import SCPI
from qcodes import validators
class QuTechVSMModule(SCPI):
def __init__(self, name, address, port, **kwargs):
super().__init__(name, address, port, **kwargs)
self.modules = [1, 2, 3, 4, 5, 6, 7, 8]
self.channels = [1, 2, 3, 4]
self.add_parameters()
self.connect_message()
def add_parameters(self):
self.add_temperature_parameters()
self.add_marker_parameters()
self.add_qubit_parameters()
self.add_calibration_parameters()
def add_temperature_parameters(self):
self.add_parameter('temperature_avg',
docstring='Temperature (in ℃) averaged over '
'all VSM qubit tuner sensors.',
unit='℃',
get_cmd='TEMPERATURE?')
for mod in self.modules:
mod_name = 'mod{m}'.format(m=mod)
mod_scpi = 'MODULE{m}'.format(m=mod)
# Analog sensors
for channel in self.channels:
doc = ('Temperature (in ℃) of qubit tuner '
'on module {m}, channel {c}.'.format(m=mod, c=channel))
ch_name = '{m}_ch{c}'.format(m=mod_name, c=channel)
ch_scpi = '{m}:CHANNEL{c}'.format(m=mod_scpi, c=channel)
self.add_parameter('temperature_' + ch_name,
docstring=doc,
unit='℃',
get_cmd='TEMPERATURE:'+ch_scpi+'?',
get_parser=float)
# Digital sensor
self.add_parameter('temperature_' + mod_name + '_digital',
docstring='Temperature (in ℃) of the separate '
'digital temperature sensor on each '
'module.',
unit='℃',
get_cmd='TEMPERATURE:' + mod_scpi + ':DIGITAL?',
get_parser=float)
def add_marker_parameters(self):
# Each (module, channel) separately
for mod in self.modules:
mod_name = 'mod{m}'.format(m=mod)
mod_scpi = 'MODULE{m}'.format(m=mod)
doc_source = 'Marker source of module {m}.'.format(m=mod)
self.add_parameter('marker_' + mod_name + '_source',
docstring=doc_source,
get_cmd='MARKER:'+mod_scpi+':SOURCE?',
set_cmd='MARKER:'+mod_scpi+':SOURCE {}',
vals=validators.Enum('int', 'ext'))
for channel in self.channels:
mod_ch_name = 'mod{m}_ch{c}'.format(m=mod, c=channel)
mod_ch_scpi = 'MODULE{m}:CHANNEL{c}'.format(m=mod,
c=channel)
doc_state = 'Marker state of module {m}, ' \
'channel {c}.'.format(m=mod, c=channel)
self.add_parameter('marker_' + mod_ch_name + '_state',
docstring=doc_state,
get_cmd='MARKER:'+mod_ch_scpi+':STATE?',
set_cmd='MARKER:'+mod_ch_scpi+':STATE {}',
vals=validators.OnOff())
# Marker breakout board
self.add_parameter('mbbc_state',
docstring='Whether the _marker breakout board_ is '
'connected to the VSM.',
get_cmd='MBBC?',
vals=validators.Enum('connected', 'disconnected'))
def add_qubit_parameters(self):
# Qubit attributes are set per channel row (so all modules in one go)
for channel in self.channels:
ch_name = '_ch{c}'.format(c=channel)
ch_scpi = ':CHANNEL{c}'.format(c=channel)
doc_description = 'Qubit description on display ' \
'for row {c}'.format(c=channel)
self.add_parameter('qubit' + ch_name + '_description',
docstring=doc_description,
get_cmd='QUBIT'+ch_scpi+':DESCRIPTION?',
set_cmd='QUBIT'+ch_scpi+':DESCRIPTION {}',
vals=validators.Strings())
doc_frequency = 'Qubit frequency in Hz for row {c}. ' \
'Range 4.0E9--8.0E9 Hz.'.format(c=channel)
self.add_parameter('qubit' + ch_name + '_frequency',
docstring=doc_frequency,
unit='Hz',
get_cmd='QUBIT'+ch_scpi+':FREQUENCY?',
set_cmd='QUBIT'+ch_scpi+':FREQUENCY {}',
vals=validators.Numbers(),
get_parser=float)
colors = ', '.join(['black', 'blue', 'green', 'grey', 'orange',
'red', 'white', 'yellow', 'dcl_blue',
'dcl_green', 'dcl_red', 'dcl_violet'])
doc_color = 'Qubit led and display color for row {c}. ' \
'Can either be one of the predefined colors ({lst})' \
'or a RGB hex string like "#rrggbb".'.format(c=channel,
lst=colors)
self.add_parameter('qubit' + ch_name + '_led_color',
docstring=doc_color,
get_cmd='QUBIT'+ch_scpi+':LEDCOLOR?',
set_cmd='QUBIT'+ch_scpi+':LEDCOLOR {}',
vals=validators.Strings())
# Individual channels can be switched on or off
for mod in self.modules:
mod_ch_name = '_mod{m}_ch{c}'.format(m=mod, c=channel)
mod_ch_scpi = ':MODULE{m}:CHANNEL{c}'.format(m=mod, c=channel)
doc_on_off = 'On/off state for channel {c} of ' \
'module {m}'.format(m=mod, c=channel)
self.add_parameter('qubit' + mod_ch_name,
docstring=doc_on_off,
get_cmd='QUBIT'+mod_ch_scpi+'?',
set_cmd='QUBIT'+mod_ch_scpi+' {}',
vals=validators.OnOff())
def add_calibration_parameters(self):
# Raw Calibration
# Two input pulses
for pulse in ('gaussian', 'derivative'):
# Two DACs
for dac in ('attenuation', 'phase'):
var_name = '_{p}_{d}_raw'.format(p=pulse, d=dac)
var_scpi = ':{p}:{d}:RAW'.format(p=pulse.upper(), d=dac.upper())
# Individual outputs: per (module, channel) pair
for channel in self.channels:
for mod in self.modules:
# Raw DAC values
doc_dac = 'Raw {d} DAC value (0--65535) for the {p} ' \
'input of channel {c} ' \
'of module {m}.'.format(p=pulse, d=dac,
c=channel, m=mod)
ch_name = '_mod{m}_ch{c}'.format(m=mod, c=channel)
ch_scpi = ':MODULE{m}:CHANNEL{c}'.format(m=mod,
c=channel)
scpi_name = 'CALIBRATION' + ch_scpi + var_scpi
self.add_parameter(
'calibration' + ch_name + var_name,
docstring=doc_dac,
get_cmd=scpi_name + '?',
set_cmd=scpi_name + ' {}',
get_parser=int,
vals=validators.Ints(min_value=0, max_value=2**16-1)
)
# Attenuation and phase
# Two input pulses
for pulse in ('gaussian', 'derivative'):
for channel in self.channels:
for mod in self.modules:
ch_name = '_mod{m}_ch{c}'.format(m=mod, c=channel)
ch_scpi = ':MODULE{m}:CHANNEL{c}'.format(m=mod, c=channel)
doc_var = 'Attenuation value (in dB) for the {p} ' \
'input of channel {c} ' \
'of module {m}.'.format(p=pulse, c=channel, m=mod)
var_name = ch_name + '_{p}_att_db'.format(p=pulse)
var_scpi = ch_scpi + ':{p}:ATTENUATION:DB'.format(p=pulse.upper())
scpi_name = 'CALIBRATION' + var_scpi
self.add_parameter('calibration' + var_name,
docstring=doc_var,
get_cmd=scpi_name + '?',
set_cmd=scpi_name + ' {}',
unit='dB',
get_parser=float,
vals=validators.Numbers())
doc_var = 'Attenuation value (linear) for the {p} ' \
'input of channel {c} ' \
'of module {m}.'.format(p=pulse, c=channel, m=mod)
var_name = ch_name + '_{p}_att_lin'.format(p=pulse)
var_scpi = ch_scpi + ':{p}:ATTENUATION:LIN'.format(p=pulse.upper())
scpi_name = 'CALIBRATION' + var_scpi
self.add_parameter('calibration' + var_name,
docstring=doc_var,
get_cmd=scpi_name + '?',
set_cmd=scpi_name + ' {}',
get_parser=float,
vals=validators.Numbers())
doc_var = 'Phase value (in rad) for the {p} ' \
'input of channel {c} ' \
'of module {m}.'.format(p=pulse, c=channel, m=mod)
var_name = ch_name + '_{p}_phs_rad'.format(p=pulse)
var_scpi = ch_scpi + ':{p}:PHASE:RAD'.format(p=pulse.upper())
scpi_name = 'CALIBRATION' + var_scpi
self.add_parameter('calibration' + var_name,
docstring=doc_var,
get_cmd=scpi_name + '?',
set_cmd=scpi_name + ' {}',
unit='rad',
get_parser=float,
vals=validators.Numbers())
doc_var = 'Phase value (in deg) for the {p} ' \
'input of channel {c} ' \
'of module {m}.'.format(p=pulse, c=channel, m=mod)
var_name = ch_name + '_{p}_phs_deg'.format(p=pulse)
var_scpi = ch_scpi + ':{p}:PHASE:DEG'.format(p=pulse.upper())
scpi_name = 'CALIBRATION' + var_scpi
self.add_parameter('calibration' + var_name,
docstring=doc_var,
get_cmd=scpi_name + '?',
set_cmd=scpi_name + ' {}',
unit='deg',
get_parser=float,
vals=validators.Numbers())
|
DiCarloLab-Delft/PycQED_py3
|
deprecated/pycqed/instrument_drivers/physical_instruments/attic/QuTech_VSM_Module_jeroen.py
|
Python
|
mit
| 13,698
|
switch_failure_rate = 0.0
switch_recovery_rate = 1.0
dataplane_drop_rate = 0.0
controlplane_block_rate = 0.0
controlplane_unblock_rate = 1.0
ofp_message_receipt_rate = 1.0
ofp_message_send_rate = 1.0
ofp_cmd_passthrough_rate = 1.0
ofp_flow_mod_failure_rate = 0.0
link_failure_rate = 0.0
link_recovery_rate = 1.0
controller_crash_rate = 0.0
controller_recovery_rate = 1.0
traffic_generation_rate = 0.05
host_migration_rate = 0.0
intracontroller_block_rate = 0.0
intracontroller_unblock_rate = 0.0
vip_ip_list = []
vip_traffic_percentage = 1.0
app_floodlight_circuitpusher_add_rate = 1
app_floodlight_circuitpusher_add_parallelism = 1
app_floodlight_circuitpusher_del_rate = 0.5
app_floodlight_circuitpusher_del_parallelism = 1
app_floodlight_firewall_allow_percentage = 0.8
app_floodlight_load_balancer_pool_size = 2
|
jmiserez/sts
|
config/fuzzer_params.py
|
Python
|
apache-2.0
| 815
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import mock
from twisted.trial import unittest
from twisted.internet import defer
from twisted.python import log
from buildbot.process import buildstep
from buildbot.process.buildstep import regex_log_evaluator
from buildbot.status.results import FAILURE, SUCCESS, WARNINGS, EXCEPTION
from buildbot.test.fake import fakebuild, remotecommand
from buildbot.test.util import config, steps, compat
from buildbot.util.eventual import eventually
class FakeLogFile:
def __init__(self, text):
self.text = text
def getText(self):
return self.text
class FakeStepStatus:
pass
class TestRegexLogEvaluator(unittest.TestCase):
def makeRemoteCommand(self, rc, stdout, stderr=''):
cmd = remotecommand.FakeRemoteCommand('cmd', {})
cmd.fakeLogData(self, 'stdio', stdout=stdout, stderr=stderr)
cmd.rc = rc
return cmd
def test_find_worse_status(self):
cmd = self.makeRemoteCommand(0, 'This is a big step')
step_status = FakeStepStatus()
r = [(re.compile("This is"), WARNINGS)]
new_status = regex_log_evaluator(cmd, step_status, r)
self.assertEqual(new_status, WARNINGS,
"regex_log_evaluator returned %d, expected %d"
% (new_status, WARNINGS))
def test_multiple_regexes(self):
cmd = self.makeRemoteCommand(0, "Normal stdout text\nan error")
step_status = FakeStepStatus()
r = [(re.compile("Normal stdout"), SUCCESS),
(re.compile("error"), FAILURE)]
new_status = regex_log_evaluator(cmd, step_status, r)
self.assertEqual(new_status, FAILURE,
"regex_log_evaluator returned %d, expected %d"
% (new_status, FAILURE))
def test_exception_not_in_stdout(self):
cmd = self.makeRemoteCommand(0,
"Completely normal output", "exception output")
step_status = FakeStepStatus()
r = [(re.compile("exception"), EXCEPTION)]
new_status = regex_log_evaluator(cmd, step_status, r)
self.assertEqual(new_status, EXCEPTION,
"regex_log_evaluator returned %d, expected %d"
% (new_status, EXCEPTION))
def test_pass_a_string(self):
cmd = self.makeRemoteCommand(0, "Output", "Some weird stuff on stderr")
step_status = FakeStepStatus()
r = [("weird stuff", WARNINGS)]
new_status = regex_log_evaluator(cmd, step_status, r)
self.assertEqual(new_status, WARNINGS,
"regex_log_evaluator returned %d, expected %d"
% (new_status, WARNINGS))
class TestBuildStep(steps.BuildStepMixin, config.ConfigErrorsMixin, unittest.TestCase):
class FakeBuildStep(buildstep.BuildStep):
def start(self):
eventually(self.finished, 0)
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
# support
def _setupWaterfallTest(self, hideStepIf, expect, expectedResult=SUCCESS):
self.setupStep(TestBuildStep.FakeBuildStep(hideStepIf=hideStepIf))
self.expectOutcome(result=expectedResult, status_text=["generic"])
self.expectHidden(expect)
# tests
def test_nameIsntString(self):
"""
When BuildStep is passed a name that isn't a string, it reports
a config error.
"""
self.assertRaisesConfigError("BuildStep name must be a string",
lambda: buildstep.BuildStep(name=5))
def test_unexpectedKeywordArgument(self):
"""
When BuildStep is passed an unknown keyword argument, it reports
a config error.
"""
self.assertRaisesConfigError("__init__ got unexpected keyword argument(s) ['oogaBooga']",
lambda: buildstep.BuildStep(oogaBooga=5))
def test_getProperty(self):
bs = buildstep.BuildStep()
bs.build = fakebuild.FakeBuild()
props = bs.build.build_status.properties = mock.Mock()
bs.getProperty("xyz", 'b')
props.getProperty.assert_called_with("xyz", 'b')
bs.getProperty("xyz")
props.getProperty.assert_called_with("xyz", None)
def test_setProperty(self):
bs = buildstep.BuildStep()
bs.build = fakebuild.FakeBuild()
props = bs.build.build_status.properties = mock.Mock()
bs.setProperty("x", "y", "t")
props.setProperty.assert_called_with("x", "y", "t", runtime=True)
bs.setProperty("x", "abc", "test", runtime=True)
props.setProperty.assert_called_with("x", "abc", "test", runtime=True)
def test_hideStepIf_False(self):
self._setupWaterfallTest(False, False)
return self.runStep()
def test_hideStepIf_True(self):
self._setupWaterfallTest(True, True)
return self.runStep()
def test_hideStepIf_Callable_False(self):
called = [False]
def shouldHide(result, step):
called[0] = True
self.assertTrue(step is self.step)
self.assertEquals(result, SUCCESS)
return False
self._setupWaterfallTest(shouldHide, False)
d = self.runStep()
d.addCallback(lambda _ : self.assertTrue(called[0]))
return d
def test_hideStepIf_Callable_True(self):
called = [False]
def shouldHide(result, step):
called[0] = True
self.assertTrue(step is self.step)
self.assertEquals(result, SUCCESS)
return True
self._setupWaterfallTest(shouldHide, True)
d = self.runStep()
d.addCallback(lambda _ : self.assertTrue(called[0]))
return d
def test_hideStepIf_fails(self):
# 0/0 causes DivideByZeroError, which should be flagged as an exception
self._setupWaterfallTest(lambda : 0/0, False, expectedResult=EXCEPTION)
return self.runStep()
@compat.usesFlushLoggedErrors
def test_hideStepIf_Callable_Exception(self):
called = [False]
def shouldHide(result, step):
called[0] = True
self.assertTrue(step is self.step)
self.assertEquals(result, EXCEPTION)
return True
def createException(*args, **kwargs):
raise RuntimeError()
self.setupStep(self.FakeBuildStep(hideStepIf=shouldHide,
doStepIf=createException))
self.expectOutcome(result=EXCEPTION,
status_text=['generic', 'exception'])
self.expectHidden(True)
d = self.runStep()
d.addErrback(log.err)
d.addCallback(lambda _ :
self.assertEqual(len(self.flushLoggedErrors(defer.FirstError)), 1))
d.addCallback(lambda _:
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1))
d.addCallback(lambda _ : self.assertTrue(called[0]))
return d
class TestLoggingBuildStep(unittest.TestCase):
def makeRemoteCommand(self, rc, stdout, stderr=''):
cmd = remotecommand.FakeRemoteCommand('cmd', {})
cmd.fakeLogData(self, 'stdio', stdout=stdout, stderr=stderr)
cmd.rc = rc
return cmd
def test_evaluateCommand_success(self):
cmd = self.makeRemoteCommand(0, "Log text", "Log text")
lbs = buildstep.LoggingBuildStep()
status = lbs.evaluateCommand(cmd)
self.assertEqual(status, SUCCESS, "evaluateCommand returned %d, should've returned %d" % (status, SUCCESS))
def test_evaluateCommand_failed(self):
cmd = self.makeRemoteCommand(23, "Log text", "")
lbs = buildstep.LoggingBuildStep()
status = lbs.evaluateCommand(cmd)
self.assertEqual(status, FAILURE, "evaluateCommand returned %d, should've returned %d" % (status, FAILURE))
def test_evaluateCommand_log_eval_func(self):
cmd = self.makeRemoteCommand(0, "Log text")
def eval(cmd, step_status):
return WARNINGS
lbs = buildstep.LoggingBuildStep(log_eval_func=eval)
status = lbs.evaluateCommand(cmd)
self.assertEqual(status, WARNINGS, "evaluateCommand didn't call log_eval_func or overrode its results")
class FailingCustomStep(buildstep.LoggingBuildStep):
def __init__(self, exception=buildstep.BuildStepFailed, *args, **kwargs):
buildstep.LoggingBuildStep.__init__(self, *args, **kwargs)
self.exception = exception
@defer.inlineCallbacks
def start(self):
yield defer.succeed(None)
raise self.exception()
class TestCustomStepExecution(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_step_raining_buildstepfailed_in_start(self):
self.setupStep(FailingCustomStep())
self.expectOutcome(result=FAILURE, status_text=["generic"])
return self.runStep()
def test_step_raising_exception_in_start(self):
self.setupStep(FailingCustomStep(exception=ValueError))
self.expectOutcome(result=EXCEPTION, status_text=["generic", "exception"])
d = self.runStep()
@d.addCallback
def cb(_):
self.assertEqual(len(self.flushLoggedErrors(ValueError)), 1)
return d
|
denny820909/builder
|
lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/test/unit/test_process_buildstep.py
|
Python
|
mit
| 9,964
|
#!/usr/bin/env python
from __future__ import print_function
import optparse
import os
import pwd
import signal
import subprocess
import sys
import time
import traceback
from six.moves.urllib.parse import urlunparse
from tornado import httpclient
from tornado import httputil
from tornado import gen
from tornado import web
from tornado.ioloop import IOLoop
from tornado.websocket import WebSocketHandler, websocket_connect
if False:
from typing import Any, Callable, Generator, Optional
if 'posix' in os.name and os.geteuid() == 0:
raise RuntimeError("run-dev.py should not be run as root.")
parser = optparse.OptionParser(r"""
Starts the app listening on localhost, for local development.
This script launches the Django and Tornado servers, then runs a reverse proxy
which serves to both of them. After it's all up and running, browse to
http://localhost:9991/
Note that, while runserver and runtornado have the usual auto-restarting
behavior, the reverse proxy itself does *not* automatically restart on changes
to this file.
""")
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(TOOLS_DIR))
from tools.lib.test_script import (
get_provisioning_status,
)
parser.add_option('--test',
action='store_true', dest='test',
help='Use the testing database and ports')
parser.add_option('--interface',
action='store', dest='interface',
default=None, help='Set the IP or hostname for the proxy to listen on')
parser.add_option('--no-clear-memcached',
action='store_false', dest='clear_memcached',
default=True, help='Do not clear memcached')
parser.add_option('--force', dest='force',
action="store_true",
default=False, help='Run tests despite possible problems.')
parser.add_option('--enable-tornado-logging', dest='enable_tornado_logging',
action="store_true",
default=False, help='Enable access logs from tornado proxy server.')
(options, arguments) = parser.parse_args()
if not options.force:
ok, msg = get_provisioning_status()
if not ok:
print(msg)
print('If you really know what you are doing, use --force to run anyway.')
sys.exit(1)
if options.interface is None:
user_id = os.getuid()
user_name = pwd.getpwuid(user_id).pw_name
if user_name in ["vagrant", "zulipdev"]:
# In the Vagrant development environment, we need to listen on
# all ports, and it's safe to do so, because Vagrant is only
# exposing certain guest ports (by default just 9991) to the
# host. The same argument applies to the remote development
# servers using username "zulipdev".
options.interface = None
else:
# Otherwise, only listen to requests on localhost for security.
options.interface = "127.0.0.1"
elif options.interface == "":
options.interface = None
base_port = 9991
if options.test:
base_port = 9981
settings_module = "zproject.test_settings"
else:
settings_module = "zproject.settings"
manage_args = ['--settings=%s' % (settings_module,)]
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from scripts.lib.zulip_tools import WARNING, ENDC
proxy_port = base_port
django_port = base_port + 1
tornado_port = base_port + 2
webpack_port = base_port + 3
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
# Clean up stale .pyc files etc.
subprocess.check_call('./tools/clean-repo')
# HACK to fix up node_modules/.bin/handlebars deletion issue
if not os.path.exists("node_modules/.bin/handlebars") and os.path.exists("node_modules/handlebars"):
print("Handlebars binary missing due to rebase past .gitignore fixup; fixing...")
subprocess.check_call(["rm", "-rf", "node_modules/handlebars"])
subprocess.check_call(["npm", "install"])
if options.clear_memcached:
print("Clearing memcached ...")
subprocess.check_call('./scripts/setup/flush-memcached')
# Set up a new process group, so that we can later kill run{server,tornado}
# and all of the processes they spawn.
os.setpgrp()
# Pass --nostatic because we configure static serving ourselves in
# zulip/urls.py.
cmds = [['./tools/compile-handlebars-templates', 'forever'],
['./manage.py', 'rundjango'] +
manage_args + ['127.0.0.1:%d' % (django_port,)],
['env', 'PYTHONUNBUFFERED=1', './manage.py', 'runtornado'] +
manage_args + ['127.0.0.1:%d' % (tornado_port,)],
['./tools/run-dev-queue-processors'] + manage_args,
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
'./puppet/zulip/files/postgresql/process_fts_updates']]
if options.test:
# Webpack doesn't support 2 copies running on the same system, so
# in order to support running the Casper tests while a Zulip
# development server is running, we use webpack in production mode
# for the Casper tests.
subprocess.check_call('./tools/webpack')
else:
cmds += [['./tools/webpack', '--watch', '--port', str(webpack_port)]]
for cmd in cmds:
subprocess.Popen(cmd)
def transform_url(protocol, path, query, target_port, target_host):
# type: (str, str, str, int, str) -> str
# generate url with target host
host = ":".join((target_host, str(target_port)))
newpath = urlunparse((protocol, host, path, '', query, ''))
return newpath
@gen.engine
def fetch_request(url, callback, **kwargs):
# type: (str, Any, **Any) -> Generator[Callable[..., Any], Any, None]
# use large timeouts to handle polling requests
req = httpclient.HTTPRequest(url, connect_timeout=240.0, request_timeout=240.0, **kwargs)
client = httpclient.AsyncHTTPClient()
# wait for response
response = yield gen.Task(client.fetch, req)
callback(response)
class BaseWebsocketHandler(WebSocketHandler):
# target server ip
target_host = '127.0.0.1' # type: str
# target server port
target_port = None # type: int
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
super(BaseWebsocketHandler, self).__init__(*args, **kwargs)
# define client for target websocket server
self.client = None # type: Any
def get(self, *args, **kwargs):
# type: (*Any, **Any) -> Callable
# use get method from WebsocketHandler
return super(BaseWebsocketHandler, self).get(*args, **kwargs)
def open(self):
# type: () -> None
# setup connection with target websocket server
websocket_url = "ws://{host}:{port}{uri}".format(
host=self.target_host,
port=self.target_port,
uri=self.request.uri
)
request = httpclient.HTTPRequest(websocket_url)
request.headers = self._add_request_headers(['sec-websocket-extensions'])
websocket_connect(request, callback=self.open_callback,
on_message_callback=self.on_client_message)
def open_callback(self, future):
# type: (Any) -> None
# callback on connect with target websocket server
self.client = future.result()
def on_client_message(self, message):
# type: (str) -> None
if not message:
# if message empty -> target websocket server close connection
return self.close()
if self.ws_connection:
# send message to client if connection exists
self.write_message(message, False)
def on_message(self, message, binary=False):
# type: (str, bool) -> Optional[Callable]
if not self.client:
# close websocket proxy connection if no connection with target websocket server
return self.close()
self.client.write_message(message, binary)
def check_origin(self, origin):
# type: (str) -> bool
return True
def _add_request_headers(self, exclude_lower_headers_list=None):
# type: (Optional[List[str]]) -> httputil.HTTPHeaders
exclude_lower_headers_list = exclude_lower_headers_list or []
headers = httputil.HTTPHeaders()
for header, v in self.request.headers.get_all():
if header.lower() not in exclude_lower_headers_list:
headers.add(header, v)
return headers
class CombineHandler(BaseWebsocketHandler):
def get(self, *args, **kwargs):
# type: (*Any, **Any) -> Optional[Callable]
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
return super(CombineHandler, self).get(*args, **kwargs)
def head(self):
# type: () -> None
pass
def post(self):
# type: () -> None
pass
def put(self):
# type: () -> None
pass
def patch(self):
# type: () -> None
pass
def options(self):
# type: () -> None
pass
def delete(self):
# type: () -> None
pass
def handle_response(self, response):
# type: (Any) -> None
if response.error and not isinstance(response.error, httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
else:
self.set_status(response.code, response.reason)
self._headers = httputil.HTTPHeaders() # clear tornado default header
for header, v in response.headers.get_all():
if header != 'Content-Length':
# some header appear multiple times, eg 'Set-Cookie'
self.add_header(header, v)
if response.body:
# rewrite Content-Length Header by the response
self.set_header('Content-Length', len(response.body))
self.write(response.body)
self.finish()
@web.asynchronous
def prepare(self):
# type: () -> None
if 'X-REAL-IP' not in self.request.headers:
self.request.headers['X-REAL-IP'] = self.request.remote_ip
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
return super(CombineHandler, self).prepare()
url = transform_url(
self.request.protocol,
self.request.path,
self.request.query,
self.target_port,
self.target_host,
)
try:
fetch_request(
url=url,
callback=self.handle_response,
method=self.request.method,
headers=self._add_request_headers(["upgrade-insecure-requests"]),
follow_redirects=False,
body=getattr(self.request, 'body'),
allow_nonstandard_methods=True
)
except httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response:
self.handle_response(e.response)
else:
self.set_status(500)
self.write('Internal server error:\n' + str(e))
self.finish()
class WebPackHandler(CombineHandler):
target_port = webpack_port
class DjangoHandler(CombineHandler):
target_port = django_port
class TornadoHandler(CombineHandler):
target_port = tornado_port
class Application(web.Application):
def __init__(self, enable_logging=False):
# type: (bool) -> None
handlers = [
(r"/json/events.*", TornadoHandler),
(r"/api/v1/events.*", TornadoHandler),
(r"/webpack.*", WebPackHandler),
(r"/sockjs.*", TornadoHandler),
(r"/socket.io.*", WebPackHandler),
(r"/.*", DjangoHandler)
]
super(Application, self).__init__(handlers, enable_logging=enable_logging)
def log_request(self, handler):
# type: (BaseWebsocketHandler) -> None
if self.settings['enable_logging']:
super(Application, self).log_request(handler)
def on_shutdown():
# type: () -> None
IOLoop.instance().stop()
def shutdown_handler(*args, **kwargs):
# type: (*Any, **Any) -> None
io_loop = IOLoop.instance()
if io_loop._callbacks:
io_loop.add_timeout(time.time() + 1, shutdown_handler)
else:
io_loop.stop()
# log which services/ports will be started
print("Starting Zulip services on ports: web proxy: {},".format(proxy_port),
"Django: {}, Tornado: {}".format(django_port, tornado_port), end='')
if options.test:
print("") # no webpack for --test
else:
print(", webpack: {}".format(webpack_port))
print("".join((WARNING,
"Note: only port {} is exposed to the host in a Vagrant environment.".format(
proxy_port), ENDC)))
try:
app = Application(enable_logging=options.enable_tornado_logging)
app.listen(proxy_port, address=options.interface)
ioloop = IOLoop.instance()
for s in (signal.SIGINT, signal.SIGTERM):
signal.signal(s, shutdown_handler)
ioloop.start()
except:
# Print the traceback before we get SIGTERM and die.
traceback.print_exc()
raise
finally:
# Kill everything in our process group.
os.killpg(0, signal.SIGTERM)
|
arpith/zulip
|
tools/run-dev.py
|
Python
|
apache-2.0
| 13,278
|
#!/usr/bin/env python
# encoding: utf-8
"""
releaseutils.py
Python Utils for releasing the Checker Framework
This contains no main method only utility functions
Created by Jonathan Burke 11/21/2012
Copyright (c) 2012 University of Washington
"""
import sys
import urllib2
import re
import subprocess
import os
import os.path
import shutil
import errno
from release_vars import *
#=========================================================================================
# Parse Args Utils # TODO: Perhaps use argparse module
def match_arg(arg):
"""Check if the given command-line argument matches one of the following
strings, and returns the matching project if it does:
langtools, annotation-file-utilities, checker-framework, lt, afu, cf"""
matched_project = None
for project in PROJECTS_TO_SHORTNAMES:
if arg == project[0] or arg == project[1]:
matched_project = project
return matched_project
def read_projects(argv, error_call_back):
"""Determine which of the jsr308-langtools, AFU and Checker Framework
projects to build based on the command-line arguments to release_build.
\"all\" indicates that all 3 projects are to be built. If the arguments
are incorrect, the error_call_back function is called and the script
execution is terminated."""
matched_projects = {
LT_OPT : False,
AFU_OPT : False,
CF_OPT : False
}
arg_length = len(sys.argv)
if arg_length < 2:
print "You must select at least one project!"
error_call_back()
sys.exit(1)
error = False
for index in range(1, arg_length):
arg = argv[index]
if arg == ALL_OPT:
for project in PROJECTS_TO_SHORTNAMES:
matched_projects[project[0]] = True
return matched_projects
matched_project = match_arg(argv[index])
if matched_project is None:
print "Unmatched project: " + argv[index]
error = True
else:
matched_projects[matched_project[0]] = True
if error:
error_call_back()
sys.exit(1)
return matched_projects
def add_project_dependencies(matched_projects):
"""Given the projects the user indicated need to be build, ensure that any
projects it depends on are also built. That is:
If building the Checker Framework release, ensure that the AFU and
jsr308-langtools are also built.
If building the AFU, ensure that jsr308-langtools is also built."""
if matched_projects[CF_OPT]:
matched_projects[AFU_OPT] = True
matched_projects[LT_OPT] = True
else:
if matched_projects[AFU_OPT]:
matched_projects[LT_OPT] = True
def print_projects(indent_level, indent_size):
"Print the projects that can be built by release_build."
indentation = duplicate(duplicate(" ", indent_size), indent_level)
project_to_short_cols = 27
print "projects: You must specify at least one of the following projects or \"all\""
print indentation + pad_to("project", " ", project_to_short_cols) + "short-name"
for project in PROJECTS_TO_SHORTNAMES:
print indentation + pad_to(project[0], " ", project_to_short_cols) + project[1]
print indentation + ALL_OPT
def duplicate(string, times):
"""Returns a string that is the concatenation of the given string repeated the
given number of times."""
result = ""
for dummy in range(0, times):
result += string
return result
def pad_to(original_str, filler, size):
"""Return a string of the given size that is the given string padded with 0 or
more repetitions of the given filler character."""
missing = size - len(original_str)
return original_str + duplicate(filler, missing)
def read_command_line_option(argv, argument):
"""Returns True if the given command line arguments contain the specified
argument, False otherwise."""
for index in range(1, len(argv)):
if argv[index] == argument:
return True
return False
#=========================================================================================
# Command utils
def execute(command_args, halt_if_fail=True, capture_output=False, working_dir=None):
"""Execute the given command.
If capture_output is true, then return the output (and ignore the halt_if_fail argument).
If capture_output is not true, return the return code of the subprocess call."""
print "Executing: %s" % (command_args)
import shlex
args = shlex.split(command_args) if isinstance(command_args, str) else command_args
if capture_output:
process = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=working_dir)
out = process.communicate()[0]
process.wait()
return out
else:
result = subprocess.call(args, cwd=working_dir)
if halt_if_fail and result:
raise Exception('Error %s while executing %s' % (result, command_args))
return result
def execute_write_to_file(command_args, output_file_path, halt_if_fail=True, working_dir=None):
"""Execute the given command, capturing the output to the given file."""
print "Executing: %s" % (command_args)
import shlex
args = shlex.split(command_args) if isinstance(command_args, str) else command_args
output_file = open(output_file_path, 'w+')
process = subprocess.Popen(args, stdout=output_file, stderr=output_file, cwd=working_dir)
process.communicate()
process.wait()
output_file.close()
if process.returncode != 0 and halt_if_fail:
raise Exception('Error %s while executing %s' % (process.returncode, command_args))
def check_command(command):
"""Executes the UNIX \"which\" command to determine whether the given command
is installed and on the PATH."""
p = execute(['which', command], False)
if p:
raise AssertionError('command not found: %s' % command)
print ''
def prompt_yes_no(msg, default=False):
"""Prints the given message and continually prompts the user until they
answer yes or no. Returns true if the answer was yes, false otherwise."""
default_str = "no"
if default:
default_str = "yes"
result = prompt_w_suggestion(msg, default_str, "^(Yes|yes|No|no)$")
if result == "yes" or result == "Yes":
return True
return False
def prompt_yn(msg):
"""Prints the given message and continually prompts the user until they
answer y or n. Returns true if the answer was y, false otherwise."""
y_or_n = 'z'
while y_or_n != 'y' and y_or_n != 'n':
print msg + " [y|n]"
y_or_n = raw_input().lower()
return y_or_n == 'y'
def prompt_until_yes():
"Prompts the user continually until they enter yes"
while not prompt_yes_no("Continue?"):
pass
def prompt_w_suggestion(msg, suggestion, valid_regex=None):
"Only accepts answers that match valid_regex."
answer = None
while answer is None:
answer = raw_input(msg + " (%s): " % suggestion)
if answer is None or answer == "":
answer = suggestion
else:
answer = answer.strip()
if valid_regex is not None:
m = re.match(valid_regex, answer)
if m is None:
answer = None
print "Invalid answer. Validating regex: " + valid_regex
else:
answer = suggestion
return answer
def check_tools(tools):
"""Given an array specifying a set of tools, verify that the tools are
installed and on the PATH."""
print "\nChecking to make sure the following programs are installed:"
print ', '.join(tools)
print('Note: If you are NOT working on buffalo.cs.washington.edu then you ' +
'likely need to change the variables that are set in release.py\n' +
'Search for "Set environment variables".')
map(check_command, tools)
print ''
#=========================================================================================
# Version Utils
# From http://stackoverflow.com/a/1714190/173852, but doesn't strip trailing zeroes
def version_number_to_array(version_num):
"""Given a version number, return an array of the elements, as integers."""
return [int(x) for x in version_num.split(".")]
def version_array_to_string(version_array):
"""Given an array of numbers representing a version, such as [1,2,3], returns
a string representation of the version, such as \"1.2.3\" """
return ".".join(str(x) for x in version_array)
# From http://stackoverflow.com/a/1714190/173852
def compare_version_numbers(version1, version2):
"""Given two versions in string form, returns a negative value if
version1 < version2, 0 if version1 == version2 and a strictly positive
value if version1 > version2."""
return cmp(version_number_to_array(version1), version_number_to_array(version2))
def increment_version(version_num, single_digits=False):
"""
Returns the next incremental version after the argument.
If single_digits is true, do not permit any part to grow greater than 9.
"""
# Drop the fourth and subsequent parts if present
version_array = version_number_to_array(version_num)[:3]
version_array[-1] = version_array[-1] + 1
if single_digits and version_array[-1] > 9:
return increment_version(version_array_to_string(version_array[0:-1]), single_digits) + ".0"
return version_array_to_string(version_array)
def test_increment_version():
"""Run test cases to ensure that increment_version works correctly.
This is critical since running release_build.py with the --auto switch
will automatically increment the release versions without prompting the
user to verify the new versions."""
assert increment_version('1.0.3') == '1.0.4'
assert increment_version('1.0.9') == '1.0.10'
assert increment_version('1.1.9') == '1.1.10'
assert increment_version('1.3.0') == '1.3.1'
assert increment_version('1.3.1') == '1.3.2'
assert increment_version('1.9.9') == '1.9.10'
assert increment_version('3.6.22') == '3.6.23'
assert increment_version('3.22.6') == '3.22.7'
assert increment_version('1.0.3.1') == '1.0.4'
assert increment_version('1.0.9.1') == '1.0.10'
assert increment_version('1.1.9.1') == '1.1.10'
assert increment_version('1.3.0.1') == '1.3.1'
assert increment_version('1.3.1.1') == '1.3.2'
assert increment_version('1.9.9.1') == '1.9.10'
assert increment_version('3.6.22.1') == '3.6.23'
assert increment_version('3.22.6.1') == '3.22.7'
assert increment_version('1.0.3', True) == '1.0.4'
assert increment_version('1.0.9', True) == '1.1.0'
assert increment_version('1.1.9', True) == '1.2.0'
assert increment_version('1.3.0', True) == '1.3.1'
assert increment_version('1.3.1', True) == '1.3.2'
assert increment_version('1.9.9', True) == '2.0.0'
assert increment_version('3.6.22', True) == '3.7.0'
assert increment_version('3.22.6', True) == '3.22.7'
def current_distribution_by_website(site):
"""
Reads the checker framework version from the checker framework website and
returns the version of the current release
"""
print 'Looking up checker-framework-version from %s\n' % site
ver_re = re.compile(r"<!-- checker-framework-zip-version -->checker-framework-(.*)\.zip")
text = urllib2.urlopen(url=site).read()
result = ver_re.search(text)
return result.group(1)
def current_distribution(checker_framework_dir):
"""
Reads the checker framework version from build-common.properties
returns the version of the current release
"""
ver_re = re.compile(r"""build.version = (\d+\.\d+\.\d+(?:\.\d+){0,1})""")
build_props_location = os.path.join(checker_framework_dir, "build-common.properties")
build_props = open(build_props_location)
for line in build_props:
match = ver_re.search(line)
if match:
return match.group(1)
print "Couldn't find checker framework version in file: " + build_props_location
sys.exit(1)
def extract_from_site(site, open_tag, close_tag):
"""
Reads a string from between open and close tag at the given url
"""
regex_str = open_tag + "(.*)" + close_tag
ver_re = re.compile(regex_str)
text = urllib2.urlopen(url=site).read()
result = ver_re.search(text)
return result.group(1)
def get_afu_version_from_html(html_file_path):
"""Retrieve the AFU version from within the afu-version tags in the given HTML
file on the filesystem."""
version_regex = "<!-- afu-version -->(\\d+\\.\\d+\\.?\\d*),.*<!-- /afu-version -->"
version = find_first_instance(version_regex, html_file_path)
if version is None:
raise Exception("Could not detect Annotation File Utilities version in file " + html_file_path)
return version
#=========================================================================================
# Git/Mercurial Utils
def is_git(repo_root):
"""Returns true if a (bare or non-bare) git repo exists at the given
filesystem path, false if a Mercurial repo exists at that path. Throws an
exception if neither a git not a Mercurial repository exists at the given
path."""
return is_git_private(repo_root, True)
def is_git_not_bare(repo_root):
"""Returns true if a non-bare git repo exists at the given filesystem path,
false if a bare git or a Mercurial repo exists at that path. Throws an
exception if neither a git not a Mercurial repository exists at the given
path."""
return is_git_private(repo_root, False)
def is_git_private(repo_root, return_value_on_bare):
"""If a git bare repo exists at the given filesystem path, returns the value
specified in the return_value_on_bare parameter. Returns true if a non-bare
git repo exists at the given path, and false if a Mercurial repo exists at
that path. Throws an exception if neither a git not a Mercurial repository
exists at the given path.
Not meant to be called directly - use is_git or is_git_not_bare instead."""
if git_bare_repo_exists_at_path(repo_root):
return return_value_on_bare
if git_repo_exists_at_path(repo_root):
return True
if hg_repo_exists_at_path(repo_root):
return False
raise Exception(repo_root + " is not recognized as a git, git bare, or hg repository")
def git_bare_repo_exists_at_path(repo_root): # Bare git repos have no .git directory but they have a refs directory
"Returns whether a bare git repository exists at the given filesystem path."
if os.path.isdir(repo_root + "/refs"):
return True
return False
def git_repo_exists_at_path(repo_root):
"""Returns whether a (bare or non-bare) git repository exists at the given
filesystem path."""
return os.path.isdir(repo_root + "/.git") or git_bare_repo_exists_at_path(repo_root)
def hg_repo_exists_at_path(repo_root):
"""Returns whether a Mercurial repository exists at the given filesystem
path."""
return os.path.isdir(repo_root + "/.hg")
def push_changes_prompt_if_fail(repo_root):
"""Attempt to push changes, including tags, that were committed to the
repository at the given filesystem path. In case of failure, ask the user
if they would like to try again. Loop until pushing changes succeeds or the
user answers opts to not try again."""
while True:
if is_git(repo_root):
cmd = 'git -C %s push --tags' % repo_root
result = os.system(cmd)
if result == 0:
break
else:
print "Could not push tags from: " + repo_root + "; result=" + str(result) + " for command: `" + cmd + "` in " + os.getcwd()
if not prompt_yn("Try again (responding 'n' will skip this push command but will not exit the script) ?"):
break
if is_git(repo_root):
cmd = 'git -C %s push origin master' % repo_root
else:
cmd = 'hg -R %s push' % repo_root
result = os.system(cmd)
if result == 0:
break
else:
print "Could not push from: " + repo_root + "; result=" + str(result) + " for command: " + cmd + "` in " + os.getcwd()
if not prompt_yn("Try again (responding 'n' will skip this push command but will not exit the script) ?"):
break
def push_changes(repo_root):
"""Pushes changes, including tags, that were committed to the repository at
the given filesystem path."""
if is_git(repo_root):
execute('git -C %s push --tags' % repo_root)
execute('git -C %s push origin master' % repo_root)
else:
execute('hg -R %s push' % repo_root)
def update_repo(path, bareflag):
"""Pull the latest changes to the given repo and update. The bareflag
parameter indicates whether the updated repo must be a bare git repo."""
if is_git(path):
if bareflag:
execute('git -C %s fetch origin master:master' % path)
else:
execute('git -C %s pull' % path)
else:
execute('hg -R %s pull -u' % path)
def commit_tag_and_push(version, path, tag_prefix):
"""Commit the changes made for this release, add a tag for this release, and
push these changes."""
if is_git(path):
execute('git -C %s commit -a -m "new release %s"' % (path, version))
execute('git -C %s tag %s%s' % (path, tag_prefix, version))
else:
execute('hg -R %s commit -m "new release %s"' % (path, version))
execute('hg -R %s tag %s%s' % (path, tag_prefix, version))
push_changes(path)
def clone_from_scratch_or_update(src_repo, dst_repo, clone_from_scratch, bareflag):
"""If the clone_from_scratch flag is True, clone the given git or
Mercurial repo from scratch into the filesystem path specified by dst_repo,
deleting it first if the repo is present on the filesystem.
Otherwise, if a repo exists at the filesystem path given by dst_repo, pull
the latest changes to it and update it. If the repo does not exist, clone it
from scratch. The bareflag parameter indicates whether the cloned/updated
repo must be a bare git repo."""
if clone_from_scratch:
delete_and_clone(src_repo, dst_repo, bareflag)
else:
if os.path.exists(dst_repo):
update_repo(dst_repo, bareflag)
else:
clone(src_repo, dst_repo, bareflag)
def delete_and_clone(src_repo, dst_repo, bareflag):
"""Clone the given git or Mercurial repo from scratch into the filesystem
path specified by dst_repo. If a repo exists at the filesystem path given
by dst_repo, delete it first. The bareflag parameter indicates whether
the cloned repo must be a bare git repo."""
delete_path_if_exists(dst_repo)
clone(src_repo, dst_repo, bareflag)
def clone(src_repo, dst_repo, bareflag):
"""Clone the given git or Mercurial repo from scratch into the filesystem
path specified by dst_repo. The bareflag parameter indicates whether the
cloned repo must be a bare git repo."""
isGitRepo = False
if "http" in src_repo:
if "git" in src_repo:
isGitRepo = True
elif "git@github.com:" in src_repo:
isGitRepo = True
elif is_git(src_repo):
isGitRepo = True
if isGitRepo:
flags = ""
if bareflag:
flags = "--bare"
execute('git clone --quiet %s %s %s' % (flags, src_repo, dst_repo))
else:
execute('hg clone --quiet %s %s' % (src_repo, dst_repo))
def is_repo_cleaned_and_updated(repo):
"""IMPORTANT: this function is not known to be fully reliable in ensuring
that a repo is fully clean of all changes, such as committed tags. To be
certain of success throughout the release_build and release_push process,
the best option is to clone repositories from scratch.
Returns whether the repository at the given filesystem path is clean (i.e.
there are no committed changes and no untracked files in the working tree)
and up-to-date with respect to the repository it was cloned from."""
if is_git(repo):
# The idiom "not execute(..., capture_output=True)" evaluates to True when the captured output is empty.
if git_bare_repo_exists_at_path(repo):
execute("git -C %s fetch origin" % (repo))
is_updated = not execute("git -C %s diff master..FETCH_HEAD" % (repo), capture_output=True)
return is_updated
else:
# Could add "--untracked-files=no" to this command
is_clean = not execute("git -C %s status --porcelain" % (repo), capture_output=True)
execute("git -C %s fetch origin" % (repo))
is_updated = not execute("git -C %s diff origin/master..master" % (repo), capture_output=True)
return is_clean and is_updated
else:
summary = execute('hg -R %s summary --remote' % (repo), capture_output=True)
if "commit: (clean)" not in summary:
return False
if "update: (current)" not in summary:
return False
if "remote: (synced)" not in summary:
return False
return True
def repo_exists(repo_root):
"""Returns whether a (bare or non-bare) git repository or a Mercurial
repository exists at the given filesystem path."""
return git_repo_exists_at_path(repo_root) or hg_repo_exists_at_path(repo_root)
def check_repos(repos, fail_on_error, is_intermediate_repo_list):
"""Fail if the repository is not clean and up to date."""
for repo in repos:
if repo_exists(repo):
if not is_repo_cleaned_and_updated(repo):
if is_intermediate_repo_list:
print("\nWARNING: Intermediate repository " + repo + " is not up to date with respect to the live repository.\n" +
"A separate warning will not be issued for a build repository that is cloned off of the intermediate repository.")
if fail_on_error:
raise Exception('repo %s is not cleaned and updated!' % repo)
else:
if not prompt_yn('%s is not clean and up to date! Continue (answering \'n\' will exit the script)?' % repo):
raise Exception('%s is not clean and up to date! Halting!' % repo)
def get_tag_line(lines, revision, tag_prefixes):
"""Get the revision hash for the tag matching the given project revision in
the given lines containing revision hashes. Uses the given array of tag
prefix strings if provided. For example, given an array of tag prefixes
[\"checker-framework-\", \"checkers-\"] and project revision \"2.0.0\", the
tags named \"checker-framework-2.0.0\" and \"checkers-2.0.0\" are sought."""
for line in lines:
for prefix in tag_prefixes:
full_tag = prefix + revision
if line.startswith(full_tag):
return line
return None
def get_commit_for_tag(revision, repo_file_path, tag_prefixes):
"""Get the commit hash for the tag matching the given project revision of
the Git repository at the given filesystem path. Uses the given array of
tag prefix strings if provided. For example, given an array of tag prefixes
[\"checker-framework-\", \"checkers-\"] and project revision \"2.0.0\", the
tags named \"checker-framework-2.0.0\" and \"checkers-2.0.0\" are sought."""
if not is_git(repo_file_path):
raise Exception("get_commit_for_tag is only defined for git repositories")
# assume the first is the most recent
tags = execute("git -C " + repo_file_path + " rev-list " + tag_prefixes[0] + revision, True, True)
lines = tags.splitlines()
commit = lines[0]
if commit is None:
msg = "Could not find revision %s in repo %s using tags %s " % (revision, repo_file_path, ",".join(tag_prefixes))
raise Exception(msg)
return commit
def get_hash_for_tag(revision, repo_file_path, tag_prefixes):
"""Get the revision hash for the tag matching the given project revision of
the Mercurial repository at the given filesystem path. Uses the given array
of tag prefix strings if provided. For example, given an array of tag
prefixes [\"checker-framework-\", \"checkers-\"] and project revision
\"2.0.0\", the tags named \"checker-framework-2.0.0\" and \"checkers-2.0.0\"
are sought."""
if is_git(repo_file_path):
raise Exception("get_hash_for_tag is not defined for git repositories")
tags = execute("hg tags -R " + repo_file_path, True, True)
lines = tags.split("\n")
target = get_tag_line(lines, revision, tag_prefixes)
if target is None:
msg = "Could not find revision %s in repo %s using tags %s " % (revision, repo_file_path, ",".join(tag_prefixes))
raise Exception(msg)
tokens = target.split()
result = tokens[1].split(":")[1]
return result
def get_tip_hash(repository):
"""Get the revision hash for the \"tip\" tag of the Mercurial repository at
the given filesystem path."""
return get_hash_for_tag("tip", repository, [""])
def write_diff_to_file(old_version, repository, tag_prefixes, dir_path, outfile):
"""Retrieves the changes under the given directory/path for the project
at the given repository path since the given old version. The changes are
saved to the given output file.
Uses the given array of tag prefix strings if provided. For example, given
an array of tag prefixes [\"checker-framework-\", \"checkers-\"] and old
version of the project \"2.0.0\", the tags named \"checker-framework-2.0.0\"
and \"checkers-2.0.0\" are sought, and changes made since the located tag
was pushed are retrieved."""
if is_git(repository):
old_tag = get_commit_for_tag(old_version, repository, tag_prefixes)
cmd = "git -C %s diff -w %s.. %s" % (repository, old_tag, dir_path)
else:
old_tag = get_hash_for_tag(old_version, repository, tag_prefixes)
tip_tag = get_tip_hash(repository)
cmd = "hg -R %s diff -w -r%s:%s %s" % (repository, old_tag, tip_tag, dir_path)
execute_write_to_file(cmd, outfile)
def propose_documentation_change_review(dir_title, old_version, repository_path, tag_prefixes,
dir_path, diff_output_file):
"""Asks the user if they would like to review the documentation changes for
the project at the given repository path that have been pushed since the
given old version of that project. Also takes as parameters: the title of
the project to display to the user when asking whether to proceed, the tag
prefixes for the project (see the docstring for get_tag_line for a
description of tag prefixes), the path to the manual/documentation for the
project, and the file to output the documentation changes to."""
if prompt_yes_no("Review %s?" %dir_title, True):
write_diff_to_file(old_version, repository_path, tag_prefixes, dir_path, diff_output_file)
# A side effect here is that the user will see updated version numbers in this diff that won't
# be reflected in their local repository. I think that is OK. The version numbers will be updated
# when the actual release is made anyway.
# Alternatively the process could be modified such that the version number updates are not made
# before the user sees the diff. However that would mean the user never gets a chance to review
# those updates.
print "Please review " + dir_title + " and make any edits you deem necessary in your local clone of the repository."
print "Diff file: " + diff_output_file
prompt_until_yes()
#=========================================================================================
# File Utils
def wget_file(source_url, destination_dir):
"""Download a file from the source URL to the given destination directory.
Useful since download_binary does not seem to work on source files."""
print "DEST DIR: " + destination_dir
execute("wget %s" % source_url, True, False, destination_dir)
def download_binary(source_url, destination, max_size):
"""Download a file from the given URL and save its contents to the
destination filename. Raise an exception if the source file is larger than
max_size."""
http_response = urllib2.urlopen(url=source_url)
content_length = http_response.headers['content-length']
if content_length is None:
raise Exception("No content-length when downloading: " + source_url)
if int(content_length) > max_size:
raise Exception("Content-length (" + content_length + ") greater than max_size (" + max_size + ") ")
dest_file = open(destination, 'wb')
dest_file.write(http_response.read())
dest_file.close()
def read_first_line(file_path):
"Return the first line in the given file. Assumes the file exists."
infile = open(file_path, 'r')
first_line = infile.readline()
infile.close()
return first_line
def ensure_group_access(path):
"Give group access to all files and directories under the specified path"
# Errs for any file not owned by this user.
# But, the point is to set group writeability of any *new* files.
execute('chmod -f -R g+rw %s' % path, halt_if_fail=False)
def ensure_user_access(path):
"Give the user access to all files and directories under the specified path"
execute('chmod -f -R u+rwx %s' % path, halt_if_fail=True)
def set_umask():
"Equivalent to executing \"umask g+rw\" from the command line."
os.umask(os.umask(0) & 0b001111)
def find_first_instance(regex, infile):
"""Find the first line in the given file that matches the given regular
expression and return the string concatenation of the result strings
corresponding to the matching groups."""
with open(infile, 'r') as f:
pattern = re.compile(regex)
for line in f:
m = pattern.match(line)
if m is not None:
if pattern.groups > 0:
res = ""
for g in m.groups():
res = res + g
return res
else:
return m.group(0)
return None
def delete(file_to_delete):
"Delete the specified file."
os.remove(file_to_delete)
def delete_if_exists(file_to_delete):
"Check if the specified file exists, and if so, delete it."
if os.path.exists(file_to_delete):
delete(file_to_delete)
def delete_path(path):
"Delete all files and directories under the specified path."
ensure_group_access(path)
shutil.rmtree(path)
def delete_path_if_exists(path):
"""Check if the specified path exists, and if so, delete all files and
directories under it."""
if os.path.exists(path):
delete_path(path)
def prompt_or_auto_delete(path, auto):
"""If auto is false, delete the given file/directory if it exists.
Otherwise, ask the user if they wish the file/directory to be deleted, and
if they answer yes, delete it."""
if not auto:
prompt_to_delete(path)
else:
print
delete_path_if_exists(path)
def prompt_to_delete(path):
"""Ask the user if the specified file/directory should be deleted, and if
they answer yes, delete it."""
if os.path.exists(path):
result = prompt_w_suggestion("Delete the following file/directory:\n %s [Yes|No]" % path, "yes", "^(Yes|yes|No|no)$")
if result == "Yes" or result == "yes":
delete_path(path)
def force_symlink(target_of_link, path_to_symlink):
"""Forces the creation of a symlink to the given path at the given target
location. That is, if a file or symlink exists at the target location, it
is deleted and the symlink is then created."""
try:
os.symlink(target_of_link, path_to_symlink)
except OSError, e:
if e.errno == errno.EEXIST:
os.remove(path_to_symlink)
os.symlink(target_of_link, path_to_symlink)
def are_in_file(file_path, strs_to_find):
"""Returns true if every string in the given strs_to_find array is found in
at least one line in the given file. In particular, returns true if
strs_to_find is empty. Note that the strs_to_find parameter is mutated."""
infile = open(file_path)
for line in infile:
if len(strs_to_find) == 0:
return True
index = 0
while index < len(strs_to_find):
if strs_to_find[index] in line:
del strs_to_find[index]
else:
index = index + 1
return len(strs_to_find) == 0
def insert_before_line(to_insert, file_path, line):
"""Insert the given line to the given file before the given 0-indexed line
number."""
mid_line = line - 1
with open(file_path) as infile:
content = infile.readlines()
output = open(file_path, "w")
for i in range(0, mid_line):
output.write(content[i])
output.write(to_insert)
for i in range(mid_line, len(content)):
output.write(content[i])
output.close()
def create_empty_file(file_path):
"Creates an empty file with the given filename."
dest_file = open(file_path, 'wb')
dest_file.close()
#=========================================================================================
# Maven Utils
def mvn_deploy(binary, pom, url):
command = """
mvn deploy:deploy-file
-Dfile=%s
-DpomFile=%s
-DgeneratePom=false
-Durl=%s
""" % (binary, pom, url)
return execute(command)
def mvn_install(pluginDir):
pom = pluginDirToPom(pluginDir)
execute("mvn -f %s clean package" % pom)
execute("mvn -f %s install" % pom)
def pluginDirToPom(pluginDir):
return os.path.join(pluginDir, 'pom.xml')
def find_mvn_plugin_jar(pluginDir, version, suffix=None):
if suffix is None:
name = "%s/target/checkerframework-maven-plugin-%s.jar" % (pluginDir, version)
else:
name = "%s/target/checkerframework-maven-plugin-%s-%s.jar" % (pluginDir, version, suffix)
return name
def mvn_deploy_mvn_plugin(pluginDir, pom, version, mavenRepo):
jarFile = find_mvn_plugin_jar(pluginDir, version)
return mvn_deploy(jarFile, pom, mavenRepo)
def mvn_sign_and_deploy(url, repo_id, pom_file, file_property, classifier, pgp_user, pgp_passphrase):
cmd = "mvn gpg:sign-and-deploy-file -Durl=%s -DrepositoryId=%s -DpomFile=%s -Dfile=%s" % (url, repo_id, pom_file, file_property)
if classifier is not None:
cmd += " -Dclassifier=" + classifier
cmd += (" -Dgpg.keyname=%s '-Dgpg.passphrase=%s'" % (pgp_user, pgp_passphrase))
execute(cmd)
def mvn_sign_and_deploy_all(url, repo_id, pom_file, artifact_jar, source_jar, javadoc_jar, pgp_user, pgp_passphrase):
mvn_sign_and_deploy(url, repo_id, pom_file, artifact_jar, None, pgp_user, pgp_passphrase)
mvn_sign_and_deploy(url, repo_id, pom_file, source_jar, "sources", pgp_user, pgp_passphrase)
mvn_sign_and_deploy(url, repo_id, pom_file, javadoc_jar, "javadoc", pgp_user, pgp_passphrase)
#=========================================================================================
# Misc. Utils
def print_step(step):
"Print a step in the release_build or release_push script."
print "\n"
print step
dashStr = ""
for dummy in range(0, len(step)):
dashStr += "-"
print dashStr
def get_announcement_email(version):
"""Return the template for the e-mail announcing a new release of the
Checker Framework."""
return """
To: checker-framework-discuss@googlegroups.com
Subject: Release %s of the Checker Framework
We have released a new version of the Checker Framework and the Eclipse plugin for the Checker Framework.
* The Checker Framework lets you create and/or run pluggable type checkers, in order to detect and prevent bugs in your code.
* The Eclipse plugin makes it more convenient to run the Checker Framework.
You can find documentation and download links for these projects at:
http://CheckerFramework.org/
Changes for the Checker Framework
<<Insert latest Checker Framework changelog entry, omitting the first line with the release version and date, and with hard line breaks removed>>
""" % (version)
#=========================================================================================
# Testing
def test_release_utils():
"Test that critical methods in this file work as expected."
test_increment_version()
# Tests run every time this file is loaded
test_release_utils()
|
Jianchu/checker-framework
|
release/release_utils.py
|
Python
|
gpl-2.0
| 36,734
|
"""
$url mitele.es
$type live
$region Spain
"""
import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
from streamlink.utils.parse import parse_qsd
from streamlink.utils.url import update_qsd
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)"
))
class Mitele(Plugin):
caronte_url = "https://caronte.mediaset.es/delivery/channel/mmc/{channel}/mtweb"
gbx_url = "https://mab.mediaset.es/1.0.0/get?oid=mtmw&eid=%2Fapi%2Fmtmw%2Fv2%2Fgbx%2Fmtweb%2Flive%2Fmmc%2F{channel}"
error_schema = validate.Schema({"code": int})
caronte_schema = validate.Schema(validate.parse_json(), validate.any(
{
"cerbero": validate.url(),
"bbx": str,
"dls": [{
"lid": validate.all(int, validate.transform(str)),
"format": validate.any("hls", "dash", "smooth"),
"stream": validate.url(),
validate.optional("assetKey"): str,
"drm": bool,
}],
},
error_schema,
))
gbx_schema = validate.Schema(
validate.parse_json(),
{"gbx": str},
validate.get("gbx")
)
cerbero_schema = validate.Schema(
validate.parse_json(),
validate.any(
validate.all(
{"tokens": {str: {"cdn": str}}},
validate.get("tokens")
),
error_schema,
)
)
token_errors = {
4038: "User has no privileges"
}
def _get_streams(self):
channel = self.match.group("channel")
pdata = self.session.http.get(self.caronte_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.caronte_schema)
gbx = self.session.http.get(self.gbx_url.format(channel=channel),
schema=self.gbx_schema)
if "code" in pdata:
log.error("error getting pdata: {}".format(pdata["code"]))
return
tokens = self.session.http.post(pdata["cerbero"],
acceptable_status=(200, 403, 404),
json={"bbx": pdata["bbx"], "gbx": gbx},
headers={"origin": "https://www.mitele.es"},
schema=self.cerbero_schema)
if "code" in tokens:
log.error("Could not get stream tokens: {} ({})".format(tokens["code"],
self.token_errors.get(tokens["code"], "unknown error")))
return
list_urls = []
for stream in pdata["dls"]:
if stream["drm"]:
log.warning("Stream may be protected by DRM")
else:
sformat = stream.get("format")
log.debug("Stream: {} ({})".format(stream["stream"], sformat or "n/a"))
cdn_token = tokens.get(stream["lid"], {}).get("cdn", "")
qsd = parse_qsd(cdn_token)
if sformat == "hls":
list_urls.append(update_qsd(stream["stream"], qsd))
if not list_urls:
return
for url in list(set(list_urls)):
yield from HLSStream.parse_variant_playlist(self.session, url, name_fmt="{pixels}_{bitrate}").items()
__plugin__ = Mitele
|
chhe/streamlink
|
src/streamlink/plugins/mitele.py
|
Python
|
bsd-2-clause
| 3,570
|
import database_manager
while True:
cmd = input()
# print(cmd)
if cmd == 'drop':
database_manager.drop_tables()
elif cmd == 'init':
database_manager.init()
elif cmd.lower().startswith('addst'):
params = cmd[5:].replace(' ', '').split(',')
if database_manager.add_student(params[0], params[1], params[2]) == 0:
print('student added.')
else:
print('student with id ' + params[0] + ' already exist.')
elif cmd.lower().startswith('addct'):
params = cmd[5:].replace(' ', '').split(',')
if database_manager.add_course(params[0].upper(), params[1], params[2]) == 0:
print('course added.')
else:
print('course with id ' + params[0].upper() + ' already exist.')
elif cmd.lower().startswith('addpr'):
params = cmd[5:].replace(' ', '').split(',')
if database_manager.add_prof(params[0].upper(), params[1], params[2]) == 0:
print('professor added.')
else:
print('professor with id ' + params[0].upper() + ' already exist.')
elif cmd.lower() == 'getst':
database_manager.get_students()
elif cmd.lower() == 'getct':
database_manager.get_courses()
elif cmd.lower() == 'getpr':
database_manager.get_profs()
elif cmd.lower().startswith('searchst'):
name = cmd[8:].replace(' ', '').lower()
database_manager.search_student(name)
else:
print('unknown command')
|
mirtaba/BayatBot
|
Exam.py
|
Python
|
mit
| 1,507
|
class ColoredButtonGroup(QtGui.QGroupBox):
def __init__(self, numColBut, *args, **kwargs):
super(ColoredButtonGroup, self).__init__(*args, **kwargs)
self.numColBut = numColBut
self.buttonGroup = QtGui.QButtonGroup()
layout = QtGui.QHBoxLayout()
layout.setSpacing(1)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSizeConstraint(SetFixedSize)
self.setLayout(layout)
for _ in range(self.numColBut):
randomColor = getRandomQtColor()
button = ColoredButton(randomColor)
self.buttonGroup.addButton(button)
layout.addWidget(button)
self.buttonGroup.buttonClicked.connect(self._OnCurrentColorChosen)
button.click()
def _OnCurrentColorChosen(self, button):
print button
class ColoredButtonDialog(QtGui.QDialog):
def __init__(self, callback, *args, **kwargs):
super(ColoredButtonDialog, self).__init__(*args, **kwargs)
layout = QtGui.QHBoxLayout()
self.setLayout(layout)
self.chooser = ColoredButtonGroup(8)
self.chooser.currentColorChosen.connect(callback)
layout.addWidget(self.chooser)
class Target(object):
def colorize(self, color):
print color
target = Target()
chooserdialog = ColoredButtonDialog(target.colorize)
chooserdialog.exec_()
|
satishgoda/learningqt
|
basics/color/color_buttongroup.py
|
Python
|
mit
| 1,407
|
import claripy
import ana
import nose
import pickle
import tempfile
import logging
l = logging.getLogger('claripy.test.serial')
def test_pickle():
bz = claripy.backend_z3
a = claripy.BitVecVal(0, 32)
b = claripy.BitVec('x', 32, explicit_name=True)
c = a+b
nose.tools.assert_equal(c.resolved_with(bz).__module__, 'z3')
nose.tools.assert_equal(str(c.resolved_with(bz)), '0 + x')
c_copy = pickle.loads(pickle.dumps(c, -1))
nose.tools.assert_equal(c_copy.resolved_with(bz).__module__, 'z3')
nose.tools.assert_equal(str(c_copy.resolved_with(bz)), '0 + x')
def test_datalayer():
l.info("Running test_datalayer")
pickle_dir = tempfile.mkdtemp()
ana.set_dl(pickle_dir=pickle_dir)
l.debug("Pickling to %s",pickle_dir)
a = claripy.BitVecVal(0, 32)
b = claripy.BitVec("x", 32)
c = a + b
d = a+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b
l.debug("Storing!")
a.ana_store()
c_info = c.ana_store()
d_info = d.ana_store()
l.debug("Loading!")
ana.set_dl(pickle_dir=pickle_dir)
#nose.tools.assert_equal(len(claripy.dl._cache), 0)
cc = claripy.ast.BV.ana_load(c_info)
nose.tools.assert_equal(str(cc), str(c))
cd = claripy.ast.BV.ana_load(d_info)
nose.tools.assert_equal(str(cd), str(d))
l.debug("Time to test some solvers!")
s = claripy.FullFrontend(claripy.backend_z3)
x = claripy.BitVec("x", 32)
s.add(x == 3)
s.finalize()
ss = claripy.FullFrontend.ana_load(s.ana_store())
nose.tools.assert_equal(str(s.constraints), str(ss.constraints))
nose.tools.assert_equal(str(s.variables), str(ss.variables))
s = claripy.CompositeFrontend(claripy.backend_z3)
x = claripy.BitVec("x", 32)
s.add(x == 3)
s.finalize()
ss = claripy.CompositeFrontend.ana_load(s.ana_store())
old_constraint_sets = [[hash(j) for j in k.constraints] for k in s._solver_list]
new_constraint_sets = [[hash(j) for j in k.constraints] for k in ss._solver_list]
nose.tools.assert_items_equal(old_constraint_sets, new_constraint_sets)
nose.tools.assert_equal(str(s.variables), str(ss.variables))
if __name__ == '__main__':
test_pickle()
test_datalayer()
|
zhuyue1314/claripy
|
tests/test_serial.py
|
Python
|
bsd-2-clause
| 2,258
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-12 08:44
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('project', '0013_remove_eoi_selected_partners'),
]
operations = [
migrations.AlterField(
model_name='eoi',
name='invited_partners',
field=models.ManyToManyField(blank=True, related_name='expressions_of_interest', to='partner.Partner'),
),
migrations.AlterField(
model_name='eoi',
name='reviewers',
field=models.ManyToManyField(blank=True, related_name='eoi_as_reviewer', to=settings.AUTH_USER_MODEL),
),
]
|
unicef/un-partner-portal
|
backend/unpp_api/apps/project/migrations/0014_auto_20170912_0844.py
|
Python
|
apache-2.0
| 783
|
import networkx as nx
from bokeh.io import output_file, show
from bokeh.models import (BoxSelectTool, Circle, EdgesAndLinkedNodes, HoverTool,
MultiLine, NodesAndLinkedEdges, Plot, Range1d, TapTool)
from bokeh.palettes import Spectral4
from bokeh.plotting import from_networkx
G=nx.karate_club_graph()
plot = Plot(width=400, height=400,
x_range=Range1d(-1.1,1.1), y_range=Range1d(-1.1,1.1))
plot.title.text = "Graph Interaction Demonstration"
plot.add_tools(HoverTool(tooltips=None), TapTool(), BoxSelectTool())
graph_renderer = from_networkx(G, nx.circular_layout, scale=1, center=(0,0))
graph_renderer.node_renderer.glyph = Circle(size=15, fill_color=Spectral4[0])
graph_renderer.node_renderer.selection_glyph = Circle(size=15, fill_color=Spectral4[2])
graph_renderer.node_renderer.hover_glyph = Circle(size=15, fill_color=Spectral4[1])
graph_renderer.edge_renderer.glyph = MultiLine(line_color="#CCCCCC", line_alpha=0.8, line_width=5)
graph_renderer.edge_renderer.selection_glyph = MultiLine(line_color=Spectral4[2], line_width=5)
graph_renderer.edge_renderer.hover_glyph = MultiLine(line_color=Spectral4[1], line_width=5)
graph_renderer.selection_policy = NodesAndLinkedEdges()
graph_renderer.inspection_policy = EdgesAndLinkedNodes()
plot.renderers.append(graph_renderer)
output_file("interactive_graphs.html")
show(plot)
|
bokeh/bokeh
|
sphinx/source/docs/user_guide/examples/graph_interaction.py
|
Python
|
bsd-3-clause
| 1,374
|
from os import path
import argparse
__all__ = ('Config',)
def percentage(string):
errstr = "must be a float between 0 and 1, not %r" % string
try:
value = float(string)
except ValueError:
raise argparse.ArgumentTypeError(errstr)
if value < 0 or value > 1:
raise argparse.ArgumentTypeError(errstr)
return value
class Config(object):
"""Defines all the options supported by our configuration system.
"""
OPTIONS = (
{
'name': 'android',
'help': 'Android resource directory ($PROJECT/res by default)',
'dest': 'resource_dir',
'kwargs': {'metavar': 'DIR'}
# No default, and will not actually be stored on the config object.
},
{
'name': 'gettext',
'help': 'directory containing the .po files ($PROJECT/locale by default)',
'dest': 'gettext_dir',
'kwargs': {'metavar': 'DIR'}
# No default, and will not actually be stored on the config object.
},
{
'name': 'groups',
'help': 'process the given default XML files (for example ' +
'"strings arrays"); by default all files which contain ' +
'string resources will be used',
'dest': 'groups',
'default': [],
'kwargs': {'nargs': '+', 'metavar': 'GROUP'}
},
{
'name': 'no-template',
'help': 'do not generate a .pot template file on export',
'dest': 'no_template',
'default': False,
'kwargs': {'action': 'store_true'}
},
{
'name': 'template',
'help': 'filename to use for the .pot file(s); may contain the ' +
'%%(domain)s and %%(group)s variables',
'dest': 'template_name',
'default': '',
'kwargs': {'metavar': 'NAME'}
},
{
'name': 'ignore',
'help': 'ignore the given message; can be given multiple times; ' +
'regular expressions can be used if putting the value ' +
'inside slashes (/match/)',
'dest': 'ignores',
'default': [],
'kwargs': {'metavar': 'MATCH', 'action': 'append', 'nargs': '+'}
},
{
'name': 'ignore-fuzzy',
'help': 'during import, ignore messages marked as fuzzy in .po files',
'dest': 'ignore_fuzzy',
'default': False,
'kwargs': {'action': 'store_true'}
},
{
'name': 'require-min-complete',
'help': 'ignore a language\'s .po file(s) completely if there ' +
'aren\'t at least the given percentage of translations',
'dest': 'min_completion',
'default': 0,
'kwargs': {'metavar': 'FLOAT', 'type': percentage}
},
{
'name': 'domain',
'help': 'gettext po domain to use, affects the .po filenames',
'dest': 'domain',
'default': None,
},
{
'name': 'layout',
'help': 'how and where .po files are stored; may be "default", ' +
'"gnu", or a custom path using the variables %%(locale)s ' +
'%%(domain)s and optionally %%(group)s. E.g., ' +
'"%%(group)s-%%(locale)s.po" will write to "strings-es.po" ' +
'for Spanish in strings.xml.',
'dest': 'layout',
'default': 'default',
},
{
'name': 'enable-fuzzy-matching',
'help': 'enable fuzzy matching during export command. When it is enabled ' +
'android2po will automatically add translations for new strings. ' +
'by default this behaviour is turned off',
'dest': 'enable_fuzzy_matching',
'default': False,
'kwargs': {'action': 'store_true'}
},
{
'name': 'clear-obsolete',
'help': 'during export do not add obsolete strings to the generated .po files',
'dest': 'clear_obsolete',
'default': True,
'kwargs': {'action': 'store_true'}
}
)
def __init__(self):
"""Initialize all configuration values with a default.
It is important that we do this here manually, rather than relying
on the "default" mechanism of argparse, because we have multiple
potential congiguration sources (command line, config file), and
we don't want defaults to override actual values.
The attributes we define here are also used to determine
which command line options passed should be assigned to this
object, and which should be exposed via a separate ``options``
namespace.
"""
for optdef in self.OPTIONS:
if 'default' in optdef:
setattr(self, optdef['dest'], optdef['default'])
@classmethod
def setup_arguments(cls, parser):
"""Setup our configuration values as arguments in the ``argparse``
object in ``parser``.
"""
for optdef in cls.OPTIONS:
names = ('--%s' % optdef.get('name'),)
kwargs = {
'help': optdef.get('help', None),
'dest': optdef.get('dest', None),
# We handle defaults ourselves. This is actually important,
# or defaults from one config source may override valid
# values from another.
'default': argparse.SUPPRESS,
}
kwargs.update(optdef.get('kwargs', {}))
parser.add_argument(*names, **kwargs)
@classmethod
def rebase_paths(cls, config, base_path):
"""Make those config values that are paths relative to
``base_path``, because by default, paths are relative to
the current working directory.
"""
for name in ('gettext_dir', 'resource_dir'):
value = getattr(config, name, None)
if value is not None:
setattr(config, name, path.normpath(path.join(base_path, value)))
|
ekager/focus-android
|
tools/l10n/android2po/config.py
|
Python
|
mpl-2.0
| 6,251
|
from celery import Task
# django-haystack indexing automatically
from haystack.management.commands import update_index
class UpdateIndexTask(Task):
def run(self):
update_index.Command().handle()
|
deadlylaid/book_connect
|
wef/items/tasks/haystack_indexing.py
|
Python
|
mit
| 211
|
from instabot import User, api, Sender, Getter
import logging.config
import unittest
from instabot.user.user_controller import UserController
class TestUser(unittest.TestCase):
def test_getter(self):
get = Getter()
print ("USERS AVAILABLE: %d" % get.controller.queue.qsize())
resp = list(get.user_followers("4456846295"))
self.assertTrue(len(resp) > 0)
resp = list(get.user_following("4456846295", total=5))
self.assertEqual(len(resp), 5)
resp = list(get.user_feed("4456846295", total=10))
self.assertEqual(len(resp), 10)
resp = get.user_info("4456846295")
self.assertEqual(resp["pk"], 4456846295)
resp = get.user_info("ohld")
self.assertEqual(resp["pk"], 352300017)
resp = list(get.liked_media(total=0))
self.assertEqual(len(resp), 0)
def test_sender(self):
send = Sender("instabotproject")
self.assertTrue(send.can_follow("ohld"))
self.assertFalse(send.follow_followers("ohld", total=1))
def add_users():
pass
if __name__ == '__main__':
logging.config.fileConfig('instabot/log.conf')
log = logging.getLogger('main')
# add_users()
unittest.main()
|
instagrambot/instapro
|
test.py
|
Python
|
apache-2.0
| 1,220
|
import subprocess
import os
class CommandRunner:
HOST_LIST_TO_RUN_LOCAL = ["localhost", "127.0.0.1"]
def __init__(self, local_hostname, logger):
logger.debug("Creating CommandRunner with Args - local_hostname: {local_hostname}, logger: {logger}".format(**locals()))
self.local_hostname = local_hostname
self.logger = logger
# returns: is_successful, output
def run_command(self, host, base_command):
self.logger.debug("Running Command: " + str(base_command))
if host == self.local_hostname or host in self.HOST_LIST_TO_RUN_LOCAL:
return self._run_local_command(base_command)
else:
return self._run_ssh_command(host, base_command)
# This will start the process up as a child process. Meaning if the scheduler_failover_controller fails the child process will fail as well. (unless you're running the systemctl command)
def _run_local_command(self, base_command):
self.logger.debug("Running command as Local command")
output = os.popen(base_command).read()
if output:
output = output.split("\n")
self.logger.debug("Run Command output: " + str(output))
return True, output
def _run_ssh_command(self, host, base_command):
self.logger.debug("Running command as SSH command")
if base_command.startswith("sudo"):
command_split = ["ssh", "-tt", host, base_command]
else:
command_split = ["ssh", host, base_command]
return self._run_split_command(
command_split=command_split
)
def _run_split_command(self, command_split):
self.logger.debug("Running command_split: " + str(command_split))
is_successful = True
output = []
try:
process = subprocess.Popen(command_split, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait()
if process.stderr is not None:
stderr_output = process.stderr.readlines()
if stderr_output and len(stderr_output) > 0:
output += [err.decode() if isinstance(err, bytes) else err for err in stderr_output]
self.logger.debug("Run Command stderr output: " + str(stderr_output))
if process.stdout is not None:
output += [out.decode() if isinstance(out, bytes) else out for out in process.stdout.readlines()]
if process.returncode != 0:
self.logger.warn("Process returned code '" + str(process.returncode) + "'")
is_successful = False
except Exception as e:
is_successful = False
output = str(e)
self.logger.debug("Run Command output: " + str(output))
return is_successful, output
|
teamclairvoyant/airflow-scheduler-failover-controller
|
scheduler_failover_controller/command_runner/command_runner.py
|
Python
|
apache-2.0
| 2,797
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import inspect
import sys
import json
import logging
from collections import defaultdict
from urllib import unquote
from datetime import datetime
from datetime import timedelta
from decorator import decorator
from paste.deploy.converters import asint
from tg.decorators import before_validate
from tg import request, redirect, session, config
from tg.render import render
from webob import exc
from pylons import tmpl_context as c
from pylons import response
from webob.exc import HTTPFound, WSGIHTTPException
from allura.lib import helpers as h
from allura.lib import utils
log = logging.getLogger(__name__)
def task(*args, **kw):
"""Decorator that adds a ``.post()`` function to the decorated callable.
Calling ``<original_callable>.post(*args, **kw)`` queues the callable for
execution by a background worker process. All parameters must be
BSON-serializable.
Example usage::
@task
def myfunc():
pass
@task(notifications_disabled=True)
def myotherfunc():
# No email notifications will be sent for c.project during this task
pass
"""
def task_(func):
def post(*args, **kwargs):
delay = kwargs.pop('delay', 0)
project = getattr(c, 'project', None)
cm = (h.notifications_disabled if project and
kw.get('notifications_disabled') else h.null_contextmanager)
with cm(project):
from allura import model as M
return M.MonQTask.post(func, args, kwargs, delay=delay)
# if decorating a class, have to make it a staticmethod
# or it gets a spurious cls argument
func.post = staticmethod(post) if inspect.isclass(func) else post
return func
if len(args) == 1 and callable(args[0]):
return task_(args[0])
return task_
class event_handler(object):
'''Decorator to register event handlers'''
listeners = defaultdict(set)
def __init__(self, *topics):
self.topics = topics
def __call__(self, func):
for t in self.topics:
self.listeners[t].add(func)
return func
class require_post(object):
'''
A decorator to require controllers by accessed with a POST only. Use whenever data will be modified by a
controller, since that's what POST is good for. We have CSRF protection middleware on POSTs, too.
'''
def __init__(self, redir=None):
self.redir = redir
def __call__(self, func):
def check_method(remainder, params):
if request.method != 'POST':
if self.redir is not None:
redirect(self.redir)
raise exc.HTTPMethodNotAllowed(headers={'Allow': 'POST'})
before_validate(check_method)(func)
return func
@decorator
def reconfirm_auth(func, *args, **kwargs):
'''
A decorator to require the user to reconfirm their login. Useful for sensitive pages.
'''
from allura.lib.plugin import AuthenticationProvider
if request.POST.get('password'):
if AuthenticationProvider.get(request).validate_password(c.user, request.POST['password']):
session['auth-reconfirmed'] = datetime.utcnow()
session.save()
kwargs.pop('password', None)
else:
c.form_errors['password'] = 'Invalid password.'
allowed_timedelta = timedelta(seconds=asint(config.get('auth.reconfirm.seconds', 60)))
last_reconfirm = session.get('auth-reconfirmed', datetime.min)
if datetime.utcnow() - last_reconfirm <= allowed_timedelta:
return func(*args, **kwargs)
else:
return render({}, 'jinja', "allura:templates/reconfirm_auth.html")
class log_action(object): # pragma no cover
def __init__(self,
logger=None,
level=logging.INFO,
msg=None,
*args, **kwargs):
if logger is None:
logger = logging
self._logger = logger
self._level = level
self._msg = msg
self._args = args
self._kwargs = kwargs
self._extra_proto = dict(
user=None,
user_id=None,
source=None,
project_name=None,
group_id=None)
def __call__(self, func):
self._func = func
self._extra_proto.update(action=func.__name__)
if self._msg is None:
self._msg = func.__name__
result = lambda *args, **kwargs: self._wrapper(*args, **kwargs)
# assert not hasattr(func, 'decoration')
if hasattr(func, 'decoration'):
result.decoration = func.decoration
return result
def _wrapper(self, *args, **kwargs):
result = None
try:
try:
result = self._func(*args, **kwargs)
except exc.HTTPServerError:
raise
except exc.HTTPException, e:
result = e
args = self._args
kwargs = self._kwargs
extra = kwargs.setdefault('extra', {})
extra.update(self._make_extra(result))
self._logger.log(self._level, self._msg,
*self._args, **self._kwargs)
return result
except:
args = self._args
kwargs = self._kwargs
extra = kwargs.setdefault('extra', {})
extra.update(self._make_extra(result))
kwargs['exc_info'] = sys.exc_info()
self._logger.log(logging.ERROR, self._msg,
*self._args, **self._kwargs)
raise
def _make_extra(self, result=None):
'''Create a dict of extra items to be added to a log record
'''
extra = self._extra_proto.copy()
# Save the client IP address
extra.update(client_ip=utils.ip_address(request))
# Save the user info
user = getattr(request, 'user', None)
if user:
extra.update(user=user.username,
user_id=user.id)
# Save the project info
if (result
and isinstance(result, dict)
and 'p' in result
and result['p'] is not None):
extra.update(
source=result['p']['source'],
project_name=result['p']['shortname'],
group_id=result['p'].get('sf_id'))
# Log the referer cookie if it exists
referer_link = request.cookies.get('referer_link')
if referer_link:
referer_link = unquote(referer_link)
try:
referer_link = json.loads(referer_link)
except ValueError:
pass
extra['referer_link'] = referer_link
return extra
def getattr_(obj, name, default_thunk):
"Similar to .setdefault in dictionaries."
try:
return getattr(obj, name)
except AttributeError:
default = default_thunk()
setattr(obj, name, default)
return default
@decorator
def memoize(func, *args):
"""
Cache the method's result, for the given args
"""
dic = getattr_(func, "memoize_dic", dict)
# memoize_dic is created at the first call
if args in dic:
return dic[args]
else:
result = func(*args)
dic[args] = result
return result
def memorable_forget():
"""
Decorator to mark a controller action as needing to "forget" remembered input values on the next
page render, if we detect that the form post was processed successfully
"""
def _ok_to_forget(response, controller_result, raised):
"""
Look for signals that say it's probably ok to forget remembered inputs for the current form.
Checks here will need to be expanded for controller actions that behave differently
than others upon successful processing of their particular request
"""
# if there is a flash message with type "ok", then we can forget.
if response.headers:
set_cookie = response.headers.get('Set-Cookie', '')
if 'status%22%3A%20%22ok' in set_cookie:
return True
# if the controller raised a 302, we can assume the value will be remembered by the app
# if needed, and forget.
if raised and isinstance(raised, HTTPFound):
return True
return False
def forget(controller_result, raised=None):
"""
Check if the form's inputs can be forgotten, and set the cookie to forget if so.
:param res: the result of the controller action
:param raised: any error (redirect or exception) raised by the controller action
"""
if _ok_to_forget(response, controller_result, raised):
response.set_cookie('memorable_forget', request.path)
@decorator
def _inner(func, *args, **kwargs):
res, raised = (None, None)
try:
res = func(*args, **kwargs)
forget(res)
return res
except WSGIHTTPException as ex:
forget(None, ex)
raise ex
return _inner
|
heiths/allura
|
Allura/allura/lib/decorators.py
|
Python
|
apache-2.0
| 10,039
|
"""Top-level import for all CLI-related functionality in apitools.
Note that importing this file will ultimately have side-effects, and
may require imports not available in all environments (such as App
Engine). In particular, picking up some readline-related imports can
cause pain.
"""
# pylint:disable=wildcard-import
from googlecloudapis.apitools.base.py.app2 import *
from googlecloudapis.apitools.base.py.base_cli import *
|
harshilasu/LinkurApp
|
y/google-cloud-sdk/lib/googlecloudapis/apitools/base/py/cli.py
|
Python
|
gpl-3.0
| 432
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''Django template filters to be used to display builds.
'''
from __future__ import absolute_import, division
from __future__ import unicode_literals
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import conditional_escape
from django.urls import reverse
from mbdb.models import Change, Build
import tinder.views
register = template.Library()
@register.filter(needs_autoescape=True)
def showbuild(build_or_step, autoescape=None):
def esc(input):
if autoescape:
return conditional_escape(input)
return input
if build_or_step is None:
return mark_safe(" ")
if isinstance(build_or_step, Change):
# blame column
change = build_or_step
fmt = ('<a href="' +
reverse(tinder.views.builds_for_change) +
'?change=%d" title="%s">%s</a>')
return mark_safe(
fmt % (change.number, change.when.isoformat(), esc(change.who))
)
if isinstance(build_or_step, Build):
fmt = '<a href="%s" title="%s">Build %d</a><br/>%s %s'
build = build_or_step
b_url = reverse('tinder-showbuild',
args=[build.builder.name, build.buildnumber])
rv = fmt % (b_url, build.starttime.isoformat(), build.buildnumber,
build.getProperty('tree'), build.getProperty('locale'))
rv += '<br/>%s' % build.slave.name
if build.sourcestamp.changes.count():
fmt = ('<a href="' +
reverse(tinder.views.builds_for_change) +
'?change=%d">%d</a>')
links = [
fmt % (c.number, c.number)
for c in build.sourcestamp.changes.order_by('pk')
]
rv += '<br/>Changes ' + ', '.join(links)
if build.endtime is not None:
# We're a finished build, just show the build
return mark_safe(rv)
outer = '''<table class="builddetails" border="1" cellspacing="0">
%s
</table>
'''
rowfmt = '''<tr><td class="%s">%s</td></tr>
'''
rows = [
rowfmt % (res2class(s), showstep(s))
for s in build.steps.order_by('-pk')
]
body = ''.join(rows) + rowfmt % ('running', rv)
return mark_safe(outer % body)
return build_or_step.name
@register.filter(needs_autoescape=True)
def showstep(step, autoescape=None):
def esc(input):
if autoescape:
return conditional_escape(input)
return input
if step.starttime and step.endtime:
step_t = step.endtime - step.starttime
if step_t.days:
# something funky, but wth
step_t = "%d day(s)" % step_t.days
else:
step_t = step_t.seconds
if step_t > 5 * 60:
# we're longer than 5 mins, ignore seconds
step_t = "%d minutes" % (step_t // 60)
elif step_t <= 90:
step_t = "%d seconds" % step_t
else:
step_t = "%d minutes %d seconds" % (step_t // 60, step_t % 60)
else:
step_t = '-'
fmt = '<span class="step_text">%s</span> <span class="step_time">%s</span>'
result = fmt % (esc(' '.join(step.text)), step_t)
return mark_safe(result)
@register.filter
def res2class(build_or_step):
resultclasses = ['success', 'warning', 'failure', 'skip', 'except']
try:
class_ = resultclasses[build_or_step.result]
except (TypeError, IndexError):
if build_or_step.starttime:
class_ = 'running'
else:
class_ = ''
return mark_safe(class_)
@register.filter
def timedelta(start, end):
if start is None or end is None:
return mark_safe('-')
td = end - start
rv = []
if td.days:
rv.append('%d day(s)' % td.days)
minutes, seconds = divmod(td.seconds, 60)
if minutes:
rv.append('%d minute(s)' % minutes)
if seconds:
rv.append('%d second(s)' % seconds)
return ' '.join(rv)
|
mozilla/elmo
|
apps/tinder/templatetags/build_extras.py
|
Python
|
mpl-2.0
| 4,246
|
# #####
# This file is part of the RobotDesigner of the Neurorobotics subproject (SP10)
# in the Human Brain Project (HBP).
# It has been forked from the RobotEditor (https://gitlab.com/h2t/roboteditor)
# developed at the Karlsruhe Institute of Technology in the
# High Performance Humanoid Technologies Laboratory (H2T).
# #####
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# #####
#
# Copyright (c) 2016, TU Munich
#
# Changes:
# 2016-12-08: Guang Chen (TUM), Initial version of SDF support
# 2016-1-16: Guang Chen, v1.0, support exporting kinematic chain&mesh&dynamic
# ######
"""
:term:`Operators <operator>` (and functions) exporting to SDF. It supports the creation of ROS packages, zipped
ROS packages and 'plain' SDF files with absolute or relative file paths.
Note: modify :meth:`pyxb.binding.basis._TypeBinding_mixin.toxml` to call :meth:`xml.dom.minidom.Node.toprettyxml`
"""
# System imports
import os
from math import radians
import tempfile
from pathlib import Path
# Blender imports
import bpy
from bpy.props import StringProperty, BoolProperty
from mathutils import Vector
# RobotDesigner imports
from .generic import sdf_tree
from .generic.helpers import list_to_string, string_to_list, localpose2globalpose
from ...core import config, PluginManager, RDOperator
from ...core.logfile import export_logger
from ...operators.helpers import ModelSelected, ObjectMode
from ...operators.model import SelectModel
from ..osim.osim_export import create_osim, get_muscles
from ..generic_tools import (
create_thumbnail,
export_rqtez_publisher_muscle,
export_rqtez_publisher_controller,
export_rqt_multiplot_muscles,
export_rqt_multiplot_jointcontroller,
)
from ...properties.globals import global_properties
from .generic import config_model_dom
from .generic import sdf_model_dom
from pyxb.namespace import XMLSchema_instance as xsi
import pyxb
def _uri_for_meshes_and_muscles(
in_ros_package: bool, abs_file_paths, toplevel_dir: str, file_path: str
):
"""
Generate proper URI's for included geometry files and muscle definitions (.osim).
:param in_ros_package: Whether to export into a ros package or plain files
:param abs_file_paths: If not installed into a ros package decides whether to use absolute file paths.
:param toplevel_dir: The directory in which to export
:param file_path: The absolute path of the file for which to generate the URI.
:return:
"""
if in_ros_package:
uri = os.path.relpath(file_path, str(Path(toplevel_dir).parent))
return "package://" + uri.replace(os.path.sep, "/")
elif not abs_file_paths:
uri = os.path.relpath(file_path, str(Path(toplevel_dir).parent))
return "model://" + uri.replace(os.path.sep, "/")
else:
return "model://" + file_path.replace(os.path.sep, "/")
def indent(elem, level=0):
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def export_mesh(
operator: RDOperator,
context,
name: str,
directory: str,
toplevel_dir: str,
in_ros_package: bool,
abs_file_paths=False,
export_collision=False,
):
"""
Exports a mesh to a separate file.
:param operator: The calling operator
:param context: The current context
:param name: the name of the mesh (not the file name).
:type name: basestring.
:param directory: The directory in which to install the meshes
:param toplevel_dir: The directory in which to export
:param in_ros_package: Whether to export into a ros package or plain files
:param abs_file_paths: If not intstalled into a ros package decides whether to use absolute file paths.
:param export_collision: Exporting a collision mesh or visualization mesh.
:return: name of the file the mesh is stored in.
"""
if not export_collision:
meshes = [
obj.name
for obj in context.scene.objects
if obj.type == "MESH"
and obj.name == name
and obj.RobotDesigner.tag == "DEFAULT"
]
directory = os.path.join(directory, "meshes", "visual")
else:
meshes = [
obj.name
for obj in context.scene.objects
if obj.type == "MESH"
and name == obj.name
and obj.RobotDesigner.tag == "COLLISION"
]
directory = os.path.join(directory, "meshes", "collisions")
if not os.path.exists(directory):
os.makedirs(directory)
# There is max. 1 object in the list
assert len(meshes) <= 1
for mesh in meshes:
export_logger.debug("Processing mesh: {}".format(mesh))
model_name = bpy.context.active_object.name
bpy.ops.object.select_all(action="DESELECT")
bpy.data.objects[mesh].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects[mesh]
# bpy.context.active_object.select = True
# get the mesh vertices number
bm = bpy.context.view_layer.objects.active.data
# export_logger.debug("# of vertices={}".format(len(bm.vertices)))
if len(bm.vertices) > 1:
if "." in mesh:
file_path = os.path.join(
directory,
bpy.data.objects[mesh].RobotDesigner.fileName.replace(".", "_")
+ ".dae",
)
else:
file_path = os.path.join(
directory, bpy.data.objects[mesh].RobotDesigner.fileName + ".dae"
)
hide_flag_backup = bpy.context.view_layer.objects.active.hide_get()
bpy.context.view_layer.objects.active.hide_set(
False
) # Blender does not want to export hidden objects.
# disconnect mesh from armature
parent_bone = bpy.data.objects[mesh].parent_bone
bpy.data.objects[mesh].parent = None
bpy.ops.wm.collada_export(
filepath=file_path,
apply_modifiers=True,
selected=True,
use_texture_copies=True,
)
# reconnect mesh to armature
bpy.data.objects[mesh].parent = bpy.data.objects[model_name]
bpy.data.objects[mesh].parent_bone = parent_bone
## collada importer does not import library_visual_scene in blender 2.8
# tree = ET.parse(source=file_path)
# collada = tree.getroot()
# if collada.tag[0] == '{':
# uri, ignore, tag = collada.tag[1:].partition("}")
# xmlns = '{' + uri + '}'
# ET.register_namespace('', uri)
# # ET.register_namespace('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
# else:
# xmlns = ''
#
# lib_geometries = collada.find(xmlns + 'library_geometries')
# geometry_id = lib_geometries[0].get('id')
# mesh_url = '#' + geometry_id
#
# node_attr = {'id': mesh, 'name': mesh, 'type': 'NODE'}
# inst_geo_attr = {'url': mesh_url, 'name': mesh}
#
# lib_visual = collada.find(xmlns + 'library_visual_scenes')
# visual_scene = lib_visual.find(xmlns + 'visual_scene')
# if len(visual_scene) == 0:
# node = visual_scene.makeelement('node', node_attr)
# visual_scene.append(node)
# instance = node.makeelement('instance_geometry', inst_geo_attr)
# node.append(instance)
#
# collada.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
#
# indent(collada)
# ET.ElementTree(collada).write(file_path, encoding="utf-8", xml_declaration=True)
## collada importer does not import library_visual_scene in blender 2.8
# for elem in collada:
# scene = elem.find(xmlns + 'visual_scene')
# try:
# node = scene.makeelement('node', node_attr)
# scene.append(node)
# instance = node.makeelement('instance_geometry', inst_geo_attr)
# instance.text = None
# node.append(instance)
# except:
# pass
# collada.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
# ET.ElementTree(collada).write(file_path, encoding="utf-8", xml_declaration=True)
bpy.context.view_layer.objects.active.hide_set(hide_flag_backup)
# quick fix for dispersed meshes
# todo: find appropriate solution
with open(file_path, "r") as file:
lines = file.readlines()
with open(file_path, "w") as file:
for line in lines:
if "matrix" not in line:
file.write(line)
else:
if "." in mesh:
file_path = os.path.join(
directory,
bpy.data.objects[mesh].RobotDesigner.fileName.replace(".", "_")
+ "_vertices"
+ str(len(bm.vertices))
+ ".dae",
)
else:
file_path = os.path.join(
directory,
bpy.data.objects[mesh].RobotDesigner.fileName
+ "_vertices"
+ str(len(bm.vertices))
+ ".dae",
)
SelectModel.run(model_name=model_name)
return _uri_for_meshes_and_muscles(
in_ros_package, abs_file_paths, toplevel_dir, file_path
)
# return ("model://" + os.path.join(model_folder_name, "meshes",
# mesh + ".dae"))
def create_sdf(
operator: RDOperator,
context,
filepath: str,
meshpath: str,
toplevel_directory: str,
in_ros_package: bool,
abs_filepaths=False,
):
"""
Creates the SDF XML file and exports the meshes
:param operator: The calling operator
:param context: The current context
:param filepath: path to the SDF file
:param meshpath: Path to the mesh directory
:param toplevel_directory: The directory in which to export
:param in_ros_package: Whether to export into a ros package or plain files
:param abs_filepaths: If not installed into a ros package decides whether to use absolute file paths.
:return:
"""
def walk_segments(segment, tree, ref_pose):
"""
Recursively builds a SDF tree object hierarchy for export
:param segment: Reference to a blender bone object
:param tree: Reference to a SDF Tree object. (Defined in sdf_tree.py)
"""
export_logger.info("walk_segments: {}".format(str(segment)))
child = tree.add()
trafo, dummy = segment.RobotDesigner.getTransform()
# trafo, _ = getTransformFromBlender(segment)
# child.joint.origin.rpy = list_to_string(trafo.to_euler())
# child.joint.origin.xyz = list_to_string([i * j for i, j in zip(trafo.translation, blender_scale_factor)])
# Link Sdf properties export
child.link.gravity.append(segment.RobotDesigner.linkInfo.gravity)
child.link.self_collide.append(segment.RobotDesigner.linkInfo.link_self_collide)
# joint ode properties
child.joint.physics = [pyxb.BIND()]
child.joint.physics[0].ode = [pyxb.BIND()]
child.joint.physics[0].ode[0].cfm_damping.append(
segment.RobotDesigner.ode.cfm_damping
)
child.joint.physics[0].ode[0].implicit_spring_damper.append(
segment.RobotDesigner.ode.i_s_damper
)
child.joint.physics[0].ode[0].cfm.append(segment.RobotDesigner.ode.cfm)
child.joint.physics[0].ode[0].erp.append(segment.RobotDesigner.ode.erp)
child.joint.axis[0].dynamics = [pyxb.BIND()]
child.joint.axis[0].dynamics[0].damping.append(
segment.RobotDesigner.joint_dynamics.damping
)
child.joint.axis[0].dynamics[0].friction.append(
segment.RobotDesigner.joint_dynamics.friction
)
child.joint.axis[0].dynamics[0].spring_reference.append(
segment.RobotDesigner.joint_dynamics.spring_reference
)
child.joint.axis[0].dynamics[0].spring_stiffness.append(
segment.RobotDesigner.joint_dynamics.spring_stiffness
)
pose_xyz = list_to_string(
[i * j for i, j in zip(trafo.translation, blender_scale_factor)]
)
pose_rpy = list_to_string(trafo.to_euler())
pose_xyz, pose_rpy = localpose2globalpose(ref_pose, pose_rpy, pose_xyz)
export_logger.info(" child link pose'{}'".format(" ".join([pose_xyz, pose_rpy])))
child.link.pose.append(" ".join([pose_xyz, pose_rpy]))
# child.link.pos[0] = ' '.join([pose_xyz, pose_rpy])
# if '_joint' in segment.name:
# segment.name = segment.name.replace("_joint", "")
if "." in segment.name:
segment.name = segment.name.replace(".", "_")
# sdf: here the child does not mean the child of the joint!!!!it is different
child.joint.name = segment.RobotDesigner.joint_name
child.link.name = segment.name.replace("_joint", "_link")
if segment.parent:
parent_link = [
l
for j, l in tree.connectedLinks.items()
if segment.parent.name == l.name
]
if parent_link[0] in tree.connectedJoints:
tree.connectedJoints[parent_link[0]].append(child.joint)
else:
tree.connectedJoints[parent_link[0]] = [child.joint]
# If root segment is connected to world
if segment.RobotDesigner.world is True and segment.parent is None:
tree.connectedJoints[tree.link] = [child.joint]
if segment.parent:
export_logger.info(" segment parent name'{}'".format(segment.parent.name))
export_logger.info(" segment joint name'{}'".format(child.joint.name))
export_logger.info(" segment link name'{}'".format(child.link.name))
export_logger.info(
"connected links (joint->link): {}".format(
{j.name: l.name for j, l in tree.connectedLinks.items()})
)
export_logger.info(
"connected joints (link->joint): {}".format(
{l.name: j[0].name for l, j in tree.connectedJoints.items()})
)
if segment.RobotDesigner.axis_revert:
revert = -1
else:
revert = 1
if segment.RobotDesigner.axis == "X":
joint_axis_xyz = list_to_string(Vector((1, 0, 0)) * revert)
elif segment.RobotDesigner.axis == "Y":
joint_axis_xyz = list_to_string(Vector((0, 1, 0)) * revert)
elif segment.RobotDesigner.axis == "Z":
joint_axis_xyz = list_to_string(Vector((0, 0, 1)) * revert)
child.joint.axis[0].xyz.append(joint_axis_xyz)
# Settings the following flag is probably wrong. Why? Because RD derives the pose of the
# child bone from the joint angle and axis w.r.t. the child edit pose. Hence Blenders/RD's
# behaviour is consistent with Gazebo when this option is turned off.
# child.joint.axis[0].use_parent_model_frame.append(True)
export_logger.info(" joint axis xyz'{}'".format(joint_axis_xyz))
export_logger.info("Parent link: {}".format(child.joint.parent))
export_logger.info("Child link {}:".format(child.joint.child))
export_logger.info("Joint type: {}".format(child.joint.type))
export_logger.info("Axis: {}".format(child.joint.axis))
export_logger.info("Axis limit: {}".format(child.joint.axis[0].limit))
export_logger.info("Axis xyz: {}".format(child.joint.axis[0].xyz))
# Export individual limits only if set as active in GUI
seg = segment.RobotDesigner
if seg.jointMode == "REVOLUTE":
if seg.theta.isActive or seg.dynamic_limits.isActive:
# child.joint.axis[0].limit.append(sdf_model_dom.CTD_ANON_59())
child.joint.axis[0].limit = [pyxb.BIND()]
if seg.theta.isActive:
child.joint.axis[0].limit[0].lower.append((radians(seg.theta.min)))
child.joint.axis[0].limit[0].upper.append((radians(seg.theta.max)))
if seg.dynamic_limits.isActive is True:
child.joint.axis[0].limit[0].effort.append(seg.dynamic_limits.maxTorque)
child.joint.axis[0].limit[0].velocity.append(
seg.dynamic_limits.maxVelocity
)
child.joint.type = "revolute"
if seg.jointMode == "PRISMATIC":
if seg.d.isActive or seg.dynamic_limits.isActive:
# child.joint.axis[0].limit.append(sdf_model_dom.CTD_ANON_59())
child.joint.axis[0].limit = [pyxb.BIND()]
if seg.d.isActive:
child.joint.axis[0].limit[0].lower.append(seg.d.min)
child.joint.axis[0].limit[0].upper.append(seg.d.max)
if seg.dynamic_limits.isActive:
child.joint.axis[0].limit[0].effort.append(seg.dynamic_limits.maxTorque)
child.joint.axis[0].limit[0].velocity.append(
seg.dynamic_limits.maxVelocity
)
child.joint.type = "prismatic"
if seg.jointMode == "REVOLUTE2":
child.joint.type = "revolute2"
if seg.jointMode == "UNIVERSAL":
child.joint.type = "universal"
if seg.jointMode == "BALL":
child.joint.type = "ball"
if seg.jointMode == "FIXED":
child.joint.type = "fixed"
export_logger.info(" joint type'{}'".format(child.joint.type))
### Add Meshes
armature = context.active_object
connected_meshes = [
mesh.name
for mesh in context.scene.objects
if mesh.type == "MESH"
and mesh.parent_bone == segment.name
and mesh.parent == armature
]
# if len(connected_meshes) > 0:
# child.link.name = connected_meshes[0]
# else:
# child.link.name = child.joint.name + '_link'
# # todo: the RobotDesigner does not have the concept of
# # links further it is possible to have
# # todo: several meshes assigned to the same bone
# # todo: solutions add another property to a bone or
# # chose the name from the list of connected meshes
for mesh in connected_meshes:
export_logger.info("Connected mesh name: {}".format(mesh))
pose_bone = context.active_object.pose.bones[segment.name]
pose = (
pose_bone.matrix.inverted()
@ context.active_object.matrix_world.inverted()
@ bpy.data.objects[mesh].matrix_world
)
# bpy.context.active_object.matrix_world = segment_world * trafo_sdf * bpy.context.active_object.matrix_world # * inverse_matrix(bpy.context.active_object.matrix_world)#* \
# bpy.context.active_object.matrix_world
visual_path = export_mesh(
operator,
context,
mesh,
meshpath,
toplevel_directory,
in_ros_package,
abs_filepaths,
export_collision=False,
)
export_logger.info("visual mesh path: {}".format(visual_path))
if visual_path and "_vertices1.dae" not in visual_path:
visual = child.add_mesh(
visual_path,
[
i * j
for i, j in zip(
bpy.data.objects[mesh].scale, blender_scale_factor
)
],
)
visual_pose_xyz = list_to_string(
[i * j for i, j in zip(pose.translation, blender_scale_factor)]
)
visual_pose_rpy = list_to_string(pose.to_euler())
visual.pose.append(" ".join([visual_pose_xyz, visual_pose_rpy]))
visual.name = bpy.data.objects[mesh].name # child.link.name
else:
export_logger.info("No visual model for: {}".format(mesh))
collision_path = export_mesh(
operator,
context,
mesh,
meshpath,
toplevel_directory,
in_ros_package,
abs_filepaths,
export_collision=True,
)
export_logger.info("collision mesh path: {}".format(collision_path))
# this does not include basic collision objects
if collision_path and "_vertices1.dae" not in collision_path:
collision = child.add_collision(
collision_path,
[
i * j
for i, j in zip(
bpy.data.objects[mesh].scale, blender_scale_factor
)
],
)
export_logger.info(
" collision mesh pose translation wo scale'{}'".format(pose.translation)
)
export_logger.info(
" collision mesh pose scale factor'{}'".format(blender_scale_factor)
)
export_logger.info(
" collision mesh pose translation wi scale'{}'".format(
[i * j for i, j in zip(pose.translation, blender_scale_factor)])
)
collision_pose_xyz = list_to_string(
[i * j for i, j in zip(pose.translation, blender_scale_factor)]
)
collision_pose_rpy = list_to_string(pose.to_euler())
collision.pose.append(
" ".join([collision_pose_xyz, collision_pose_rpy])
)
collision.name = bpy.data.objects[
mesh
].name # child.link.name + '_collision'
export_logger.info(
" collision mesh pose'%s'" % collision.pose[0].value()
)
else:
export_logger.info("No collision model for: {}".format(mesh))
# add basic collision objects
if "BASIC_COLLISION_" in bpy.data.objects[mesh].RobotDesigner.tag:
collision = child.add_basic(
bpy.data.objects[mesh].RobotDesigner.tag,
[
i * j
for i, j in zip(
bpy.data.objects[mesh].scale, blender_scale_factor
)
],
)
export_logger.info(
" basic collision mesh pose translation wo scale'{}'".format(
pose.translation)
)
export_logger.info(
" basic collision mesh pose scale factor'{}'".format(blender_scale_factor)
)
export_logger.info(
" basic collision mesh pose translation wi scale'{}'".format(
[i * j for i, j in zip(pose.translation, blender_scale_factor)])
)
collision_pose_xyz = list_to_string(
[i * j for i, j in zip(pose.translation, blender_scale_factor)]
)
collision_pose_rpy = list_to_string(pose.to_euler())
collision.pose.append(
" ".join([collision_pose_xyz, collision_pose_rpy])
)
collision.name = bpy.data.objects[
mesh
].name # child.link.name + '_collision'
export_logger.info(
" basic collision mesh pose'{}'".format(collision.pose[0].value())
)
else:
export_logger.info("No basic collision model for: {}".format(mesh))
# export surface properties for both collision and basic collision objects
if "COLLISION" in bpy.data.objects[mesh].RobotDesigner.tag:
# add surface properties
collision.surface.append(sdf_model_dom.surface())
surface_property = bpy.data.objects[
mesh
].RobotDesigner.sdfCollisionProps
# add bounce properties
collision.surface[0].bounce = [pyxb.BIND()]
bounce = collision.surface[0].bounce[0]
bounce.restitution_coefficient.append(
surface_property.restitution_coeff
)
bounce.threshold.append(surface_property.threshold)
# add friction properties
collision.surface[0].friction = [pyxb.BIND()]
friction = collision.surface[0].friction[0]
friction.torsional = [pyxb.BIND()]
torsional = friction.torsional[0]
torsional.coefficient.append(surface_property.coefficient)
torsional.use_patch_radius.append(surface_property.use_patch_radius)
torsional.patch_radius.append(surface_property.patch_radius)
torsional.surface_radius.append(surface_property.surface_radius)
torsional.ode = [pyxb.BIND()]
torsional.ode[0].slip.append(surface_property.slip)
friction.ode = [pyxb.BIND()]
friction_ode = collision.surface[0].friction[0].ode[0]
friction_ode.mu.append(surface_property.mu)
friction_ode.mu2.append(surface_property.mu2)
fdir1 = "%f %f %f" % (
surface_property.fdir1[0],
surface_property.fdir1[1],
surface_property.fdir1[2],
)
friction_ode.fdir1.append(fdir1)
friction_ode.slip1.append(surface_property.slip1)
friction_ode.slip2.append(surface_property.slip2)
# add contact properties
collision.surface[0].contact = [pyxb.BIND()]
contact = collision.surface[0].contact[0]
contact.collide_without_contact.append(
surface_property.collide_wo_contact
)
contact.collide_without_contact_bitmask.append(
surface_property.collide_wo_contact_bitmask
)
contact.collide_bitmask.append(surface_property.collide_bitmask)
contact.category_bitmask.append(surface_property.category_bitmask)
contact.poissons_ratio.append(surface_property.poissons_ratio)
contact.elastic_modulus.append(surface_property.elastic_modulus)
if (
bpy.data.objects[
global_properties.model_name.get(bpy.context.scene)
].RobotDesigner.physics_engine
== "OPENSIM"
):
contact.opensim = [pyxb.BIND()]
contact_opensim = contact.opensim[0]
contact_opensim.stiffness.append(surface_property.osim_stiffness)
contact_opensim.dissipation.append(
surface_property.osim_dissipation
)
contact.ode = [pyxb.BIND()]
contact_ode = contact.ode[0]
contact_ode.soft_cfm.append(surface_property.soft_cfm)
contact_ode.soft_erp.append(surface_property.soft_erp)
contact_ode.kp.append(surface_property.kp)
contact_ode.kd.append(surface_property.kd)
contact_ode.max_vel.append(surface_property.max_vel)
contact_ode.min_depth.append(surface_property.min_depth)
# add soft contact properties
# todo: not yet implemented dart properties.
# if bpy.data.objects[
# global_properties.model_name.get(bpy.context.scene)].RobotDesigner.physics_engine == 'DART':
### Add Physics
frame_names = [
frame.name
for frame in context.scene.objects
if frame.RobotDesigner.tag == "PHYSICS_FRAME"
and frame.parent_bone == segment.name
]
# If no frame is connected create a default one. This is required for Gazebo!
export_logger.info("frame names: {}".format(frame_names))
# if not frame_names:
# child.add_inertial()
for frame in frame_names:
# Add inertial definitions (for Gazebo)
inertial = child.link.inertial[0]
export_logger.debug(inertial, inertial.__dict__)
if bpy.data.objects[frame].parent_bone == segment.name:
pose_bone = context.active_object.pose.bones[segment.name]
# set mass
inertial.mass[0] = bpy.data.objects[frame].RobotDesigner.dynamics.mass
if inertial.mass[0] <= 0.0:
raise ValueError(
"Mass of "
+ frame
+ " is not positive, but "
+ str(inertial.mass[0])
)
# Ugly, to throw an exception here. But appending info_list did not print the info in the GUI.
# set inertia
inertial.inertia[0].ixx[0] = bpy.data.objects[
frame
].RobotDesigner.dynamics.inertiaXX
inertial.inertia[0].ixy[0] = bpy.data.objects[
frame
].RobotDesigner.dynamics.inertiaXY
inertial.inertia[0].ixz[0] = bpy.data.objects[
frame
].RobotDesigner.dynamics.inertiaXZ
inertial.inertia[0].iyy[0] = bpy.data.objects[
frame
].RobotDesigner.dynamics.inertiaYY
inertial.inertia[0].iyz[0] = bpy.data.objects[
frame
].RobotDesigner.dynamics.inertiaYZ
inertial.inertia[0].izz[0] = bpy.data.objects[
frame
].RobotDesigner.dynamics.inertiaZZ
# set inertial pose
pose = (
pose_bone.matrix.inverted()
@ context.active_object.matrix_world.inverted()
@ bpy.data.objects[frame].matrix_world
)
frame_pose_xyz = list_to_string(
[i * j for i, j in zip(pose.translation, blender_scale_factor)]
)
frame_pose_rpy = list_to_string(pose.to_euler())
inertial.pose[0] = " ".join([frame_pose_xyz, frame_pose_rpy])
# add joint controllers
if operator.gazebo and segment.RobotDesigner.jointController.isActive is True:
if segment.parent is None and segment.RobotDesigner.world is False:
pass
else:
controller_pid = list_to_string(
[
segment.RobotDesigner.jointController.P,
segment.RobotDesigner.jointController.I,
segment.RobotDesigner.jointController.D,
]
)
controller = pyxb.BIND(
joint_name=child.joint.name,
type=segment.RobotDesigner.jointController.controllerType,
pid=controller_pid,
)
root.control_plugin.controller.append(controller)
### add link sensors
sensor_names = [
sensor.name
for sensor in context.scene.objects
if sensor.RobotDesigner.tag == "SENSOR"
and sensor.parent_bone == segment.name
]
export_logger.info(" sensor name'{}".format(sensor_names))
for sensor in sensor_names:
active_sensor = bpy.data.objects[sensor]
type = active_sensor.RobotDesigner.sensor_type
if type == "CAMERA_SENSOR":
sensor_sdf = child.add_camera_sensor()
sensor_sdf.name = sensor
# camera
sensor_sdf.type = "camera"
sensor_sdf.camera.name = "left eze"
# todo sensor_sdf.horizontal_fov = bpy.data.cameras[sensor].angle_x
# todo sensor_sdf.camera.image.append('imagename')
# todo sensor_sdf.camera.image.width.append(active_sensor.RobotDesigner.cameraSensor.width)
# todo sensor_sdf.camera.image.height = active_sensor.RobotDesigner.cameraSensor.height
# todo sensor_sdf.camera.image.format = active_sensor.RobotDesigner.cameraSensor.format
else:
"type not found"
# elif type == 'CAMERA': todo other sensor types
# sensor_sdf.type = 'camera'
# export_logger.info(" sensor name'{}'".format(child.link.sensor.name))
"""
A quick word on poses in sdf 1.6
The way it works hasn't changed from 1.5
You first have to append before you can freely set a value in it.
Difference is when calling the value.
In 1.6, you have to use: ...pose[0].value() in order to call the value of the pose.
"""
# Add geometry
for child_segments in segment.children:
export_logger.info("Next Segment'{}'".format(child_segments.name))
ref_pose = string_to_list(child.link.pose[0].value())
walk_segments(child_segments, child, ref_pose)
robot_name = context.active_object.name
blender_scale_factor = context.active_object.scale
blender_scale_factor = [
blender_scale_factor[0],
blender_scale_factor[2],
blender_scale_factor[1],
]
root = sdf_tree.SDFTree.create_empty(robot_name)
# add model pose
root.sdf.model[0].pose.append(
" ".join(
[
list_to_string(context.active_object.location),
list_to_string(context.active_object.rotation_euler),
]
)
)
# A link for world. Used for export of root links connected to world
root.link.name = "world"
if (
bpy.data.objects[
global_properties.model_name.get(bpy.context.scene)
].RobotDesigner.physics_engine
== "OPENSIM"
):
# add root geometries to root.link
muscles = get_muscles(robot_name, context)
if muscles:
# add muscles path tag
muscle_uri = _uri_for_meshes_and_muscles(
in_ros_package,
abs_filepaths,
toplevel_directory,
os.path.join(toplevel_directory, "muscles.osim"),
)
root.sdf.model[0].muscles.append(muscle_uri)
# add OpenSim muscle plugin
root.sdf.model[0].plugin.append(sdf_model_dom.plugin())
length = len(root.sdf.model[0].plugin)
root.sdf.model[0].plugin[length - 1].name = "muscle_interface_plugin"
root.sdf.model[0].plugin[
length - 1
].filename = "libgazebo_ros_muscle_interface.so"
# build control plugin element
# if there is a segment which has a controller attached to it: then create controller plugin
for segment in bpy.context.active_object.data.bones:
# The joint controller might still be active even if the root segment is not connected to world.
# An if clause to ignore the controller in such a scenario
if segment.parent is None and segment.RobotDesigner.world is False:
pass
elif segment.RobotDesigner.jointController.isActive is True:
if operator.gazebo:
root.sdf.model[0].plugin.append(sdf_model_dom.plugin())
root.control_plugin = root.sdf.model[0].plugin[
len(root.sdf.model[0].plugin) - 1
]
root.control_plugin.name = robot_name + "_controller"
root.control_plugin.filename = "libgeneric_controller_plugin.so"
root.control_plugin.controller = []
break
root_segments = [b for b in context.active_object.data.bones if b.parent is None]
for segments in root_segments:
export_logger.info("Root Segment'{}'".format(segments.name))
ref_pose = [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
] # transform to gazebo coordinate frame
walk_segments(segments, root, ref_pose)
export_logger.info("Writing to '{}'".format(filepath))
root.write(filepath)
def create_config(
operator: RDOperator,
context,
filepath: str,
meshpath: str,
toplevel_directory: str,
in_ros_package: bool,
abs_filepaths=False,
):
"""
Creates the model.config file and exports it
:param operator: The calling operator
:param context: The current context
# :param filepath: path to the SDF file
# :param meshpath: Path to the mesh directory
:param toplevel_directory: The directory in which to export
:param in_ros_package: Whether to export into a ros package or plain files
:param abs_filepaths: If not installed into a ros package decides whether to use absolute file paths.
:return:
"""
# create model config element
modelI = config_model_dom.model()
# get model data
modelI.name = bpy.context.active_object.RobotDesigner.modelMeta.model_config
modelI.version = bpy.context.active_object.RobotDesigner.modelMeta.model_version
# get thumbnail data
modelI.thumbnail = "thumbnail.png"
# set sdf fixed name
sdf = config_model_dom.sdf_versioned()
sdf._setValue("model.sdf")
sdf.version = 1.6
modelI.sdf = sdf
# get author data
author = config_model_dom.author_type(
bpy.context.active_object.RobotDesigner.author.authorName,
bpy.context.active_object.RobotDesigner.author.authorEmail,
)
# modelI.author = author
modelI.author = author
modelI.description = (
bpy.context.active_object.RobotDesigner.modelMeta.model_description
)
# export model.config file
with open(toplevel_directory + "/model.config", "w") as f:
output = modelI.toDOM()
output.documentElement.setAttributeNS(
xsi.uri(),
"xsi:schemaLocation",
"http://schemas.humanbrainproject.eu/SP10/2017/robot_model_config ../robot_model_configuration.xsd",
)
output.documentElement.setAttributeNS(
xsi.uri(), "xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"
)
output = output.toprettyxml()
f.write(output.replace("ns1:", ""))
@RDOperator.Preconditions(ModelSelected, ObjectMode)
@PluginManager.register_class
class ExportPlain(RDOperator):
"""
:ref:`operator` for exporting the selected robot to a SDF File and mesh directory in the same directory.
"""
bl_idname = config.OPERATOR_PREFIX + "export_to_sdf_plain"
bl_label = "Export SDF - plain"
filter_glob: StringProperty(
default="*.sdf",
options={"HIDDEN"},
)
abs_file_paths: BoolProperty(name="Absolute Filepaths", default=False)
package_url = False
gazebo: BoolProperty(name="Export Gazebo tags", default=True)
filepath: StringProperty(name="Filename", subtype="FILE_PATH")
@classmethod
def run(cls, abs_file_paths, gazebo, filepath):
"""
Run this operator
"""
return super().run(**cls.pass_keywords())
@RDOperator.OperatorLogger
@RDOperator.Postconditions(ModelSelected, ObjectMode)
def execute(self, context):
toplevel_dir = self.filepath
self.filepath = os.path.join(self.filepath, "model.sdf")
create_sdf(
self,
context,
filepath=self.filepath,
meshpath=toplevel_dir,
toplevel_directory=toplevel_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
create_config(
self,
context,
filepath=self.filepath,
meshpath=toplevel_dir,
toplevel_directory=toplevel_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
create_osim(
self,
context,
filepath=self.filepath,
meshpath=toplevel_dir,
toplevel_directory=toplevel_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
# thumbnail export
create_thumbnail(toplevel_directory=toplevel_dir)
# rqt_ez_publisher exports
if (
global_properties.export_rqt_ez_publisher_muscles.get(bpy.context.scene)
== True
):
export_rqtez_publisher_muscle(toplevel_directory=toplevel_dir)
if (
global_properties.export_rqt_ez_publisher_jointcontroller.get(
bpy.context.scene
)
== True
):
export_rqtez_publisher_controller(toplevel_directory=toplevel_dir)
# rqt_multiplot exports
if (
global_properties.export_rqt_multiplot_muscles.get(bpy.context.scene)
== True
):
export_rqt_multiplot_muscles(toplevel_directory=toplevel_dir)
if (
global_properties.export_rqt_multiplot_jointcontroller.get(
bpy.context.scene
)
== True
):
export_rqt_multiplot_jointcontroller(toplevel_directory=toplevel_dir)
return {"FINISHED"}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
@RDOperator.Preconditions(ModelSelected, ObjectMode)
@PluginManager.register_class
class ExportPackage(RDOperator):
"""
:ref:`operator` for exporting the selected robot to an SDF File into a ROS package including model.config file.
"""
bl_idname = config.OPERATOR_PREFIX + "export_to_sdf_package"
bl_label = "Export SDF"
filter_glob: StringProperty(
default="*.sdf",
options={"HIDDEN"},
)
abs_file_paths: BoolProperty(name="Absolute Filepaths", default=False)
package_url = False
gazebo: BoolProperty(name="Export Gazebo tags", default=True)
filepath: StringProperty(name="Filename", subtype="FILE_PATH")
@RDOperator.OperatorLogger
@RDOperator.Postconditions(ModelSelected, ObjectMode)
def execute(self, context):
toplevel_dir = self.filepath
self.filepath = os.path.join(self.filepath, "model.sdf")
create_sdf(
self,
context,
filepath=self.filepath,
meshpath=toplevel_dir,
toplevel_directory=toplevel_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
create_config(
self,
context,
filepath=self.filepath,
meshpath=toplevel_dir,
toplevel_directory=toplevel_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
create_osim(
self,
context,
filepath=self.filepath,
meshpath=toplevel_dir,
toplevel_directory=toplevel_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
# thumbnail export
create_thumbnail(toplevel_directory=toplevel_dir)
# rqt_ez_publisher exports
if (
global_properties.export_rqt_ez_publisher_muscles.get(bpy.context.scene)
== True
):
export_rqtez_publisher_muscle(toplevel_directory=toplevel_dir)
if (
global_properties.export_rqt_ez_publisher_jointcontroller.get(
bpy.context.scene
)
== True
):
export_rqtez_publisher_controller(toplevel_directory=toplevel_dir)
# rqt_multiplot exports
if (
global_properties.export_rqt_multiplot_muscles.get(bpy.context.scene)
== True
):
export_rqt_multiplot_muscles(toplevel_directory=toplevel_dir)
if (
global_properties.export_rqt_multiplot_jointcontroller.get(
bpy.context.scene
)
== True
):
export_rqt_multiplot_jointcontroller(toplevel_directory=toplevel_dir)
return {"FINISHED"}
def invoke(self, context, event):
self.filepath = (
context.active_object.RobotDesigner.modelMeta.model_folder.replace(" ", "_")
)
if self.filepath == "":
self.filepath = global_properties.model_name.get(bpy.context.scene).replace(
" ", "_"
)
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
@RDOperator.Preconditions(ModelSelected, ObjectMode)
@PluginManager.register_class
class ExportZippedPackage(RDOperator):
"""
:ref:`operator` for exporting the selected robot to an SDF File into a zipped ROS package.
"""
bl_idname = config.OPERATOR_PREFIX + "export_to_sdf_package_zipped"
bl_label = "Export SDF as zipped folder"
filter_glob: StringProperty(
default="*.zip",
options={"HIDDEN"},
)
abs_file_paths: BoolProperty(name="Absolute Filepaths", default=False)
package_url = False
gazebo: BoolProperty(name="Export Gazebo tags", default=True)
filepath: StringProperty(name="Filename", subtype="FILE_PATH")
@RDOperator.OperatorLogger
@RDOperator.Postconditions(ModelSelected, ObjectMode)
def execute(self, context):
"""
Code snipped from `<http://stackoverflow.com/questions/1855095/how-to-create-a-zip-archive-of-a-directory>`_
"""
import zipfile
if os.path.isdir(self.filepath):
self.logger.debug(self.filepath)
self.report({"ERROR"}, "No File selected!")
return {"FINISHED"}
def zipdir(path, ziph):
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
self.logger.debug("{}, {}, {},".format(root, dirs, files))
for file in files:
file_path = os.path.join(root, file)
ziph.write(file_path, os.path.relpath(file_path, path))
with tempfile.TemporaryDirectory() as target:
dir_name = os.path.splitext(os.path.basename(self.filepath))[0]
temp_dir = os.path.join(target, dir_name)
temp_file = os.path.join(temp_dir, "model.sdf")
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
create_sdf(
self,
context,
filepath=temp_file,
meshpath=temp_dir,
toplevel_directory=temp_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
create_config(
self,
context,
filepath=self.filepath,
meshpath=temp_dir,
toplevel_directory=temp_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
create_osim(
self,
context,
filepath=self.filepath,
meshpath=temp_dir,
toplevel_directory=temp_dir,
in_ros_package=False,
abs_filepaths=self.abs_file_paths,
)
# thumbnail export
create_thumbnail(toplevel_directory=toplevel_dir)
# rqt_ez_publisher exports
if (
global_properties.export_rqt_ez_publisher_muscles.get(bpy.context.scene)
== True
):
export_rqtez_publisher_muscle(toplevel_directory=toplevel_dir)
if (
global_properties.export_rqt_ez_publisher_jointcontroller.get(
bpy.context.scene
)
== True
):
export_rqtez_publisher_controller(toplevel_directory=toplevel_dir)
# rqt_multiplot exports
if (
global_properties.export_rqt_multiplot_muscles.get(bpy.context.scene)
== True
):
export_rqt_multiplot_muscles(toplevel_directory=toplevel_dir)
if (
global_properties.export_rqt_multiplot_jointcontroller.get(
bpy.context.scene
)
== True
):
export_rqt_multiplot_jointcontroller(toplevel_directory=toplevel_dir)
self.logger.debug(temp_file)
with zipfile.ZipFile(self.filepath, "w") as zipf:
zipdir(target, zipf)
return {"FINISHED"}
def invoke(self, context, event):
self.filepath = (
context.active_object.RobotDesigner.modelMeta.model_folder.replace(" ", "_")
)
if self.filepath == "":
self.filepath = global_properties.model_name.get(bpy.context.scene).replace(
" ", "_"
)
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
|
HBPNeurorobotics/BlenderRobotDesigner
|
robot_designer_plugin/export/sdf/sdf_export.py
|
Python
|
gpl-2.0
| 50,899
|
#!/usr/bin/python2.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# $Id$
#
# Copyright (C) 1999-2006 Keith Dart <keith@kdart.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
"""
Module for managing Linux firewall feature from Python, using sudo to run
ipfw program.
NOTE: this only works on Linux with firewall option enabled in the kernel.
"""
import sudo
#import socketlib
def port_forward(srcport, destport, rule=None):
"""Use firewall rule to forward a TCP port to a different port. Useful for
redirecting privileged ports to non-privileged ports. """
return NotImplemented
def add(rule, action):
return NotImplemented
def delete(rule):
return NotImplemented
def flush():
return NotImplemented
# XXX some day make this complete... :-)
class Firewall(object):
def read(self):
"""Read current rule set."""
return NotImplemented
class IPChains(object):
pass
if __name__ == "__main__":
pass
|
xiangke/pycopia
|
core/pycopia/OS/Linux/firewall.py
|
Python
|
lgpl-2.1
| 1,458
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2018-02-27 13:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('calculator', '0004_collection_to_show'),
]
operations = [
migrations.AlterField(
model_name='collection',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='calculator.CollectionCategory'),
),
migrations.AlterField(
model_name='comparison',
name='key',
field=models.CharField(choices=[('', 'All'), ('', 'All'), ('', 'All'), ('C', 'C'), ('C#', 'C#'), ('Db', 'Db'), ('D', 'D'), ('D#', 'D#'), ('Eb', 'Eb'), ('E', 'E'), ('F', 'F'), ('F#', 'F#'), ('Gb', 'Gb'), ('G', 'G'), ('G#', 'G#'), ('Ab', 'Ab'), ('A', 'A'), ('A#', 'A#'), ('Bb', 'Bb'), ('B', 'B')], max_length=2, null=True),
),
migrations.AlterField(
model_name='comparison',
name='mode',
field=models.CharField(choices=[('', 'All'), ('', 'All'), ('', 'All'), ('M', 'Major'), ('m', 'Minor')], max_length=1, null=True),
),
migrations.AlterField(
model_name='comparison',
name='time_signature',
field=models.CharField(choices=[('', 'All'), ('', 'All'), ('', 'All'), ('2/2', '2/2'), ('4/2', '4/2'), ('2/4', '2/4'), ('3/4', '3/4'), ('4/4', '4/4'), ('5/4', '5/4'), ('6/4', '6/4'), ('3/8', '3/8'), ('6/8', '6/8'), ('9/8', '9/8'), ('12/8', '12/8')], max_length=4, null=True),
),
migrations.AlterField(
model_name='piece',
name='key',
field=models.CharField(blank=True, choices=[('', 'All'), ('', 'All'), ('', 'All'), ('C', 'C'), ('C#', 'C#'), ('Db', 'Db'), ('D', 'D'), ('D#', 'D#'), ('Eb', 'Eb'), ('E', 'E'), ('F', 'F'), ('F#', 'F#'), ('Gb', 'Gb'), ('G', 'G'), ('G#', 'G#'), ('Ab', 'Ab'), ('A', 'A'), ('A#', 'A#'), ('Bb', 'Bb'), ('B', 'B')], max_length=2),
),
migrations.AlterField(
model_name='piece',
name='mode',
field=models.CharField(blank=True, choices=[('', 'All'), ('', 'All'), ('', 'All'), ('M', 'Major'), ('m', 'Minor')], max_length=1),
),
migrations.AlterField(
model_name='piece',
name='time_signature',
field=models.CharField(blank=True, choices=[('', 'All'), ('', 'All'), ('', 'All'), ('2/2', '2/2'), ('4/2', '4/2'), ('2/4', '2/4'), ('3/4', '3/4'), ('4/4', '4/4'), ('5/4', '5/4'), ('6/4', '6/4'), ('3/8', '3/8'), ('6/8', '6/8'), ('9/8', '9/8'), ('12/8', '12/8')], max_length=4),
),
]
|
GenosResearchGroup/ContourMetrics
|
apps/calculator/migrations/0005_auto_20180227_1322.py
|
Python
|
mit
| 2,718
|
"""Helpers to deal with Cast devices."""
from __future__ import annotations
from typing import Optional
import attr
from pychromecast import dial
from pychromecast.const import CAST_MANUFACTURERS
@attr.s(slots=True, frozen=True)
class ChromecastInfo:
"""Class to hold all data about a chromecast for creating connections.
This also has the same attributes as the mDNS fields by zeroconf.
"""
services: set | None = attr.ib()
uuid: str | None = attr.ib(
converter=attr.converters.optional(str), default=None
) # always convert UUID to string if not None
_manufacturer = attr.ib(type=Optional[str], default=None)
model_name: str = attr.ib(default="")
friendly_name: str | None = attr.ib(default=None)
is_audio_group = attr.ib(type=Optional[bool], default=False)
is_dynamic_group = attr.ib(type=Optional[bool], default=None)
@property
def is_information_complete(self) -> bool:
"""Return if all information is filled out."""
want_dynamic_group = self.is_audio_group
have_dynamic_group = self.is_dynamic_group is not None
have_all_except_dynamic_group = all(
attr.astuple(
self,
filter=attr.filters.exclude(
attr.fields(ChromecastInfo).is_dynamic_group
),
)
)
return have_all_except_dynamic_group and (
not want_dynamic_group or have_dynamic_group
)
@property
def manufacturer(self) -> str:
"""Return the manufacturer."""
if self._manufacturer:
return self._manufacturer
if not self.model_name:
return None
return CAST_MANUFACTURERS.get(self.model_name.lower(), "Google Inc.")
def fill_out_missing_chromecast_info(self) -> ChromecastInfo:
"""Return a new ChromecastInfo object with missing attributes filled in.
Uses blocking HTTP / HTTPS.
"""
if self.is_information_complete:
# We have all information, no need to check HTTP API.
return self
# Fill out missing group information via HTTP API.
if self.is_audio_group:
is_dynamic_group = False
http_group_status = None
if self.uuid:
http_group_status = dial.get_multizone_status(
None,
services=self.services,
zconf=ChromeCastZeroconf.get_zeroconf(),
)
if http_group_status is not None:
is_dynamic_group = any(
str(g.uuid) == self.uuid
for g in http_group_status.dynamic_groups
)
return ChromecastInfo(
services=self.services,
uuid=self.uuid,
friendly_name=self.friendly_name,
model_name=self.model_name,
is_audio_group=True,
is_dynamic_group=is_dynamic_group,
)
# Fill out some missing information (friendly_name, uuid) via HTTP dial.
http_device_status = dial.get_device_status(
None, services=self.services, zconf=ChromeCastZeroconf.get_zeroconf()
)
if http_device_status is None:
# HTTP dial didn't give us any new information.
return self
return ChromecastInfo(
services=self.services,
uuid=(self.uuid or http_device_status.uuid),
friendly_name=(self.friendly_name or http_device_status.friendly_name),
manufacturer=(self.manufacturer or http_device_status.manufacturer),
model_name=(self.model_name or http_device_status.model_name),
)
class ChromeCastZeroconf:
"""Class to hold a zeroconf instance."""
__zconf = None
@classmethod
def set_zeroconf(cls, zconf):
"""Set zeroconf."""
cls.__zconf = zconf
@classmethod
def get_zeroconf(cls):
"""Get zeroconf."""
return cls.__zconf
class CastStatusListener:
"""Helper class to handle pychromecast status callbacks.
Necessary because a CastDevice entity can create a new socket client
and therefore callbacks from multiple chromecast connections can
potentially arrive. This class allows invalidating past chromecast objects.
"""
def __init__(self, cast_device, chromecast, mz_mgr, mz_only=False):
"""Initialize the status listener."""
self._cast_device = cast_device
self._uuid = chromecast.uuid
self._valid = True
self._mz_mgr = mz_mgr
if cast_device._cast_info.is_audio_group:
self._mz_mgr.add_multizone(chromecast)
if mz_only:
return
chromecast.register_status_listener(self)
chromecast.socket_client.media_controller.register_status_listener(self)
chromecast.register_connection_listener(self)
if not cast_device._cast_info.is_audio_group:
self._mz_mgr.register_listener(chromecast.uuid, self)
def new_cast_status(self, cast_status):
"""Handle reception of a new CastStatus."""
if self._valid:
self._cast_device.new_cast_status(cast_status)
def new_media_status(self, media_status):
"""Handle reception of a new MediaStatus."""
if self._valid:
self._cast_device.new_media_status(media_status)
def new_connection_status(self, connection_status):
"""Handle reception of a new ConnectionStatus."""
if self._valid:
self._cast_device.new_connection_status(connection_status)
@staticmethod
def added_to_multizone(group_uuid):
"""Handle the cast added to a group."""
def removed_from_multizone(self, group_uuid):
"""Handle the cast removed from a group."""
if self._valid:
self._cast_device.multizone_new_media_status(group_uuid, None)
def multizone_new_cast_status(self, group_uuid, cast_status):
"""Handle reception of a new CastStatus for a group."""
def multizone_new_media_status(self, group_uuid, media_status):
"""Handle reception of a new MediaStatus for a group."""
if self._valid:
self._cast_device.multizone_new_media_status(group_uuid, media_status)
def invalidate(self):
"""Invalidate this status listener.
All following callbacks won't be forwarded.
"""
# pylint: disable=protected-access
if self._cast_device._cast_info.is_audio_group:
self._mz_mgr.remove_multizone(self._uuid)
else:
self._mz_mgr.deregister_listener(self._uuid, self)
self._valid = False
|
w1ll1am23/home-assistant
|
homeassistant/components/cast/helpers.py
|
Python
|
apache-2.0
| 6,729
|
# countline.py
# counting lines of java file or java files in a dir
import os
import sys
def process_file(path):
total = 0
if (path.endswith('.java')):
handle = open(path, 'r')
for eachLine in handle:
if len(eachLine) > 1:
#print len(eachLine), ' > ', eachLine,
eachLine = eachLine.lstrip()
if (eachLine.startswith("//") == False):
total += 1
handle.close()
print path, ',', total, 'lines.'
return total
def process_dir(path):
total = 0
listfile = os.listdir(path)
for filename in listfile:
filepath = path + '/' + filename
if(os.path.isdir(filepath)):
# exclude hidden dirs
if(filename[0] == '.'):
pass
else:
total += process_dir(filepath)
elif(os.path.isfile(filepath)):
total += process_file(filepath)
return total
def process(path):
total = 0
if(os.path.isdir(path)):
total = process_dir(path)
elif(os.path.isfile(path)):
total = process_file(path)
print '>>> total lines :', total, '.'
return total
#process('/home/kesalin/test/test/AccountPreference.java')
process('/home/kesalin/test/Settings')
|
kesalin/PythonSnippet
|
countline.py
|
Python
|
gpl-2.0
| 1,288
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_cloud_endpoints
----------------------------------
Tests Keystone endpoints commands.
"""
import uuid
from openstack.cloud.exc import OpenStackCloudException
from openstack.cloud.exc import OpenStackCloudUnavailableFeature
from openstack.tests.unit import base
from testtools import matchers
class TestCloudEndpoints(base.TestCase):
def get_mock_url(self, service_type='identity', interface='admin',
resource='endpoints', append=None, base_url_append='v3'):
return super(TestCloudEndpoints, self).get_mock_url(
service_type, interface, resource, append, base_url_append)
def _dummy_url(self):
return 'https://%s.example.com/' % uuid.uuid4().hex
def test_create_endpoint_v2(self):
self.use_keystone_v2()
service_data = self._get_service_data()
endpoint_data = self._get_endpoint_v2_data(
service_data.service_id, public_url=self._dummy_url(),
internal_url=self._dummy_url(), admin_url=self._dummy_url())
other_endpoint_data = self._get_endpoint_v2_data(
service_data.service_id, region=endpoint_data.region,
public_url=endpoint_data.public_url)
# correct the keys
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
resource='services', base_url_append='OS-KSADM'),
status_code=200,
json={'OS-KSADM:services': [
service_data.json_response_v2['OS-KSADM:service']]}),
dict(method='POST',
uri=self.get_mock_url(base_url_append=None),
status_code=200,
json=endpoint_data.json_response,
validate=dict(json=endpoint_data.json_request)),
dict(method='GET',
uri=self.get_mock_url(
resource='services', base_url_append='OS-KSADM'),
status_code=200,
json={'OS-KSADM:services': [
service_data.json_response_v2['OS-KSADM:service']]}),
# NOTE(notmorgan): There is a stupid happening here, we do two
# gets on the services for some insane reason (read: keystoneclient
# is bad and should feel bad).
dict(method='GET',
uri=self.get_mock_url(
resource='services', base_url_append='OS-KSADM'),
status_code=200,
json={'OS-KSADM:services': [
service_data.json_response_v2['OS-KSADM:service']]}),
dict(method='POST',
uri=self.get_mock_url(base_url_append=None),
status_code=200,
json=other_endpoint_data.json_response,
validate=dict(json=other_endpoint_data.json_request))
])
endpoints = self.cloud.create_endpoint(
service_name_or_id=service_data.service_id,
region=endpoint_data.region,
public_url=endpoint_data.public_url,
internal_url=endpoint_data.internal_url,
admin_url=endpoint_data.admin_url
)
self.assertThat(endpoints[0].id,
matchers.Equals(endpoint_data.endpoint_id))
self.assertThat(endpoints[0].region,
matchers.Equals(endpoint_data.region))
self.assertThat(endpoints[0].publicURL,
matchers.Equals(endpoint_data.public_url))
self.assertThat(endpoints[0].internalURL,
matchers.Equals(endpoint_data.internal_url))
self.assertThat(endpoints[0].adminURL,
matchers.Equals(endpoint_data.admin_url))
# test v3 semantics on v2.0 endpoint
self.assertRaises(OpenStackCloudException,
self.cloud.create_endpoint,
service_name_or_id='service1',
interface='mock_admin_url',
url='admin')
endpoints_3on2 = self.cloud.create_endpoint(
service_name_or_id=service_data.service_id,
region=endpoint_data.region,
interface='public',
url=endpoint_data.public_url
)
# test keys and values are correct
self.assertThat(
endpoints_3on2[0].region,
matchers.Equals(other_endpoint_data.region))
self.assertThat(
endpoints_3on2[0].publicURL,
matchers.Equals(other_endpoint_data.public_url))
self.assertThat(endpoints_3on2[0].get('internalURL'),
matchers.Equals(None))
self.assertThat(endpoints_3on2[0].get('adminURL'),
matchers.Equals(None))
self.assert_calls()
def test_create_endpoint_v3(self):
service_data = self._get_service_data()
public_endpoint_data = self._get_endpoint_v3_data(
service_id=service_data.service_id, interface='public',
url=self._dummy_url())
public_endpoint_data_disabled = self._get_endpoint_v3_data(
service_id=service_data.service_id, interface='public',
url=self._dummy_url(), enabled=False)
admin_endpoint_data = self._get_endpoint_v3_data(
service_id=service_data.service_id, interface='admin',
url=self._dummy_url(), region=public_endpoint_data.region)
internal_endpoint_data = self._get_endpoint_v3_data(
service_id=service_data.service_id, interface='internal',
url=self._dummy_url(), region=public_endpoint_data.region)
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(resource='services'),
status_code=200,
json={'services': [
service_data.json_response_v3['service']]}),
dict(method='POST',
uri=self.get_mock_url(),
status_code=200,
json=public_endpoint_data_disabled.json_response,
validate=dict(
json=public_endpoint_data_disabled.json_request)),
dict(method='GET',
uri=self.get_mock_url(resource='services'),
status_code=200,
json={'services': [
service_data.json_response_v3['service']]}),
dict(method='POST',
uri=self.get_mock_url(),
status_code=200,
json=public_endpoint_data.json_response,
validate=dict(json=public_endpoint_data.json_request)),
dict(method='POST',
uri=self.get_mock_url(),
status_code=200,
json=internal_endpoint_data.json_response,
validate=dict(json=internal_endpoint_data.json_request)),
dict(method='POST',
uri=self.get_mock_url(),
status_code=200,
json=admin_endpoint_data.json_response,
validate=dict(json=admin_endpoint_data.json_request)),
])
endpoints = self.cloud.create_endpoint(
service_name_or_id=service_data.service_id,
region=public_endpoint_data_disabled.region,
url=public_endpoint_data_disabled.url,
interface=public_endpoint_data_disabled.interface,
enabled=False)
# Test endpoint values
self.assertThat(
endpoints[0].id,
matchers.Equals(public_endpoint_data_disabled.endpoint_id))
self.assertThat(endpoints[0].url,
matchers.Equals(public_endpoint_data_disabled.url))
self.assertThat(
endpoints[0].interface,
matchers.Equals(public_endpoint_data_disabled.interface))
self.assertThat(
endpoints[0].region,
matchers.Equals(public_endpoint_data_disabled.region))
self.assertThat(
endpoints[0].region_id,
matchers.Equals(public_endpoint_data_disabled.region))
self.assertThat(endpoints[0].enabled,
matchers.Equals(public_endpoint_data_disabled.enabled))
endpoints_2on3 = self.cloud.create_endpoint(
service_name_or_id=service_data.service_id,
region=public_endpoint_data.region,
public_url=public_endpoint_data.url,
internal_url=internal_endpoint_data.url,
admin_url=admin_endpoint_data.url)
# Three endpoints should be returned, public, internal, and admin
self.assertThat(len(endpoints_2on3), matchers.Equals(3))
# test keys and values are correct for each endpoint created
for result, reference in zip(
endpoints_2on3, [public_endpoint_data,
internal_endpoint_data,
admin_endpoint_data]
):
self.assertThat(result.id, matchers.Equals(reference.endpoint_id))
self.assertThat(result.url, matchers.Equals(reference.url))
self.assertThat(result.interface,
matchers.Equals(reference.interface))
self.assertThat(result.region,
matchers.Equals(reference.region))
self.assertThat(result.enabled, matchers.Equals(reference.enabled))
self.assert_calls()
def test_update_endpoint_v2(self):
self.use_keystone_v2()
self.assertRaises(OpenStackCloudUnavailableFeature,
self.cloud.update_endpoint, 'endpoint_id')
def test_update_endpoint_v3(self):
service_data = self._get_service_data()
dummy_url = self._dummy_url()
endpoint_data = self._get_endpoint_v3_data(
service_id=service_data.service_id, interface='admin',
enabled=False)
reference_request = endpoint_data.json_request.copy()
reference_request['endpoint']['url'] = dummy_url
self.register_uris([
dict(method='PATCH',
uri=self.get_mock_url(append=[endpoint_data.endpoint_id]),
status_code=200,
json=endpoint_data.json_response,
validate=dict(json=reference_request))
])
endpoint = self.cloud.update_endpoint(
endpoint_data.endpoint_id,
service_name_or_id=service_data.service_id,
region=endpoint_data.region,
url=dummy_url,
interface=endpoint_data.interface,
enabled=False
)
# test keys and values are correct
self.assertThat(endpoint.id,
matchers.Equals(endpoint_data.endpoint_id))
self.assertThat(endpoint.service_id,
matchers.Equals(service_data.service_id))
self.assertThat(endpoint.url,
matchers.Equals(endpoint_data.url))
self.assertThat(endpoint.interface,
matchers.Equals(endpoint_data.interface))
self.assert_calls()
def test_list_endpoints(self):
endpoints_data = [self._get_endpoint_v3_data() for e in range(1, 10)]
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(),
status_code=200,
json={'endpoints': [e.json_response['endpoint']
for e in endpoints_data]})
])
endpoints = self.cloud.list_endpoints()
# test we are getting exactly len(self.mock_endpoints) elements
self.assertThat(len(endpoints), matchers.Equals(len(endpoints_data)))
# test keys and values are correct
for i, ep in enumerate(endpoints_data):
self.assertThat(endpoints[i].id,
matchers.Equals(ep.endpoint_id))
self.assertThat(endpoints[i].service_id,
matchers.Equals(ep.service_id))
self.assertThat(endpoints[i].url,
matchers.Equals(ep.url))
self.assertThat(endpoints[i].interface,
matchers.Equals(ep.interface))
self.assert_calls()
def test_search_endpoints(self):
endpoints_data = [self._get_endpoint_v3_data(region='region1')
for e in range(0, 2)]
endpoints_data.extend([self._get_endpoint_v3_data()
for e in range(1, 8)])
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(),
status_code=200,
json={'endpoints': [e.json_response['endpoint']
for e in endpoints_data]}),
dict(method='GET',
uri=self.get_mock_url(),
status_code=200,
json={'endpoints': [e.json_response['endpoint']
for e in endpoints_data]}),
dict(method='GET',
uri=self.get_mock_url(),
status_code=200,
json={'endpoints': [e.json_response['endpoint']
for e in endpoints_data]}),
dict(method='GET',
uri=self.get_mock_url(),
status_code=200,
json={'endpoints': [e.json_response['endpoint']
for e in endpoints_data]})
])
# Search by id
endpoints = self.cloud.search_endpoints(
id=endpoints_data[-1].endpoint_id)
# # test we are getting exactly 1 element
self.assertEqual(1, len(endpoints))
self.assertThat(endpoints[0].id,
matchers.Equals(endpoints_data[-1].endpoint_id))
self.assertThat(endpoints[0].service_id,
matchers.Equals(endpoints_data[-1].service_id))
self.assertThat(endpoints[0].url,
matchers.Equals(endpoints_data[-1].url))
self.assertThat(endpoints[0].interface,
matchers.Equals(endpoints_data[-1].interface))
# Not found
endpoints = self.cloud.search_endpoints(id='!invalid!')
self.assertEqual(0, len(endpoints))
# Multiple matches
endpoints = self.cloud.search_endpoints(
filters={'region_id': 'region1'})
# # test we are getting exactly 2 elements
self.assertEqual(2, len(endpoints))
# test we are getting the correct response for region/region_id compat
endpoints = self.cloud.search_endpoints(
filters={'region': 'region1'})
# # test we are getting exactly 2 elements, this is v3
self.assertEqual(2, len(endpoints))
self.assert_calls()
def test_delete_endpoint(self):
endpoint_data = self._get_endpoint_v3_data()
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(),
status_code=200,
json={'endpoints': [
endpoint_data.json_response['endpoint']]}),
dict(method='DELETE',
uri=self.get_mock_url(append=[endpoint_data.endpoint_id]),
status_code=204)
])
# Delete by id
self.cloud.delete_endpoint(id=endpoint_data.endpoint_id)
self.assert_calls()
|
ctrlaltdel/neutrinator
|
vendor/openstack/tests/unit/cloud/test_endpoints.py
|
Python
|
gpl-3.0
| 16,034
|
"""A key-value[] store that implements reservoir sampling on the values."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import random
import threading
class Reservoir(object):
"""A map-to-arrays container, with deterministic Reservoir Sampling.
Items are added with an associated key. Items may be retrieved by key, and
a list of keys can also be retrieved. If size is not zero, then it dictates
the maximum number of items that will be stored with each key. Once there are
more items for a given key, they are replaced via reservoir sampling, such
that each item has an equal probability of being included in the sample.
Deterministic means that for any given seed and bucket size, the sequence of
values that are kept for any given tag will always be the same, and that this
is independent of any insertions on other tags. That is:
>>> separate_reservoir = reservoir.Reservoir(10)
>>> interleaved_reservoir = reservoir.Reservoir(10)
>>> for i in xrange(100):
>>> separate_reservoir.AddItem('key1', i)
>>> for i in xrange(100):
>>> separate_reservoir.AddItem('key2', i)
>>> for i in xrange(100):
>>> interleaved_reservoir.AddItem('key1', i)
>>> interleaved_reservoir.AddItem('key2', i)
separate_reservoir and interleaved_reservoir will be in identical states.
See: https://en.wikipedia.org/wiki/Reservoir_sampling
Adding items has amortized O(1) runtime.
"""
def __init__(self, size, seed=0):
"""Creates a new reservoir.
Args:
size: The number of values to keep in the reservoir for each tag. If 0,
all values will be kept.
seed: The seed of the random number generator to use when sampling.
Different values for |seed| will produce different samples from the same
input items.
Raises:
ValueError: If size is negative or not an integer.
"""
if size < 0 or size != round(size):
raise ValueError('size must be nonegative integer, was %s' % size)
self._buckets = collections.defaultdict(
lambda: _ReservoirBucket(size, random.Random(seed)))
# _mutex guards the keys - creating new keys, retreiving by key, etc
# the internal items are guarded by the ReservoirBuckets' internal mutexes
self._mutex = threading.Lock()
def Keys(self):
"""Return all the keys in the reservoir.
Returns:
['list', 'of', 'keys'] in the Reservoir.
"""
with self._mutex:
return list(self._buckets.keys())
def Items(self, key):
"""Return items associated with given key.
Args:
key: The key for which we are finding associated items.
Raises:
KeyError: If the key is not found in the reservoir.
Returns:
[list, of, items] associated with that key.
"""
with self._mutex:
if key not in self._buckets:
raise KeyError('Key %s was not found in Reservoir' % key)
bucket = self._buckets[key]
return bucket.Items()
def AddItem(self, key, item):
"""Add a new item to the Reservoir with the given tag.
The new item is guaranteed to be kept in the Reservoir. One other item might
be replaced.
Args:
key: The key to store the item under.
item: The item to add to the reservoir.
"""
with self._mutex:
bucket = self._buckets[key]
bucket.AddItem(item)
def FilterItems(self, filterFn):
"""Filter items within a Reservoir, using a filtering function.
Args:
filterFn: A function that returns True for the items to be kept.
Returns:
The number of items removed.
"""
with self._mutex:
return sum(bucket.FilterItems(filterFn)
for bucket in self._buckets.values())
class _ReservoirBucket(object):
"""A container for items from a stream, that implements reservoir sampling.
It always stores the most recent item as its final item.
"""
def __init__(self, _max_size, _random=None):
"""Create the _ReservoirBucket.
Args:
_max_size: The maximum size the reservoir bucket may grow to. If size is
zero, the bucket has unbounded size.
_random: The random number generator to use. If not specified, defaults to
random.Random(0).
Raises:
ValueError: if the size is not a nonnegative integer.
"""
if _max_size < 0 or _max_size != round(_max_size):
raise ValueError('_max_size must be nonegative int, was %s' % _max_size)
self.items = []
# This mutex protects the internal items, ensuring that calls to Items and
# AddItem are thread-safe
self._mutex = threading.Lock()
self._max_size = _max_size
self._num_items_seen = 0
if _random is not None:
self._random = _random
else:
self._random = random.Random(0)
def AddItem(self, item):
"""Add an item to the ReservoirBucket, replacing an old item if necessary.
The new item is guaranteed to be added to the bucket, and to be the last
element in the bucket. If the bucket has reached capacity, then an old item
will be replaced. With probability (_max_size/_num_items_seen) a random item
in the bucket will be popped out and the new item will be appended
to the end. With probability (1 - _max_size/_num_items_seen)
the last item in the bucket will be replaced.
Since the O(n) replacements occur with O(1/_num_items_seen) likelihood,
the amortized runtime is O(1).
Args:
item: The item to add to the bucket.
"""
with self._mutex:
if len(self.items) < self._max_size or self._max_size == 0:
self.items.append(item)
else:
r = self._random.randint(0, self._num_items_seen)
if r < self._max_size:
self.items.pop(r)
self.items.append(item)
else:
self.items[-1] = item
self._num_items_seen += 1
def FilterItems(self, filterFn):
"""Filter items in a ReservoirBucket, using a filtering function.
Filtering items from the reservoir bucket must update the
internal state variable self._num_items_seen, which is used for determining
the rate of replacement in reservoir sampling. Ideally, self._num_items_seen
would contain the exact number of items that have ever seen by the
ReservoirBucket and satisfy filterFn. However, the ReservoirBucket does not
have access to all items seen -- it only has access to the subset of items
that have survived sampling (self.items). Therefore, we estimate
self._num_items_seen by scaling it by the same ratio as the ratio of items
not removed from self.items.
Args:
filterFn: A function that returns True for items to be kept.
Returns:
The number of items removed from the bucket.
"""
with self._mutex:
size_before = len(self.items)
self.items = filter(filterFn, self.items)
size_diff = size_before - len(self.items)
# Estimate a correction the the number of items seen
prop_remaining = len(self.items) / float(
size_before) if size_before > 0 else 0
self._num_items_seen = int(round(self._num_items_seen * prop_remaining))
return size_diff
def Items(self):
"""Get all the items in the bucket."""
with self._mutex:
return self.items
|
kcartier/tensorflow-toe-in-the-water
|
tensorflow/python/summary/impl/reservoir.py
|
Python
|
apache-2.0
| 7,300
|
import os
import dbus.service # Ignore PyImportSortBear
from coalib.misc.Exceptions import get_exitcode
from coalib.output.Interactions import fail_acquire_settings
from coalib.output.printers.ListLogPrinter import ListLogPrinter
from coalib.parsing.Globbing import fnmatch
from coalib.processes.Processing import execute_section
from coalib.results.HiddenResult import HiddenResult
from coalib.settings.ConfigurationGathering import (
find_user_config, gather_configuration)
from coalib.settings.Setting import glob_list
class DbusDocument(dbus.service.Object):
interface = "org.coala_analyzer.v1"
def __init__(self, doc_id, path=""):
"""
Creates a new dbus object-path for every document that a
DbusApplication wants coala to analyze. It stores the information
(path) of the document and the config file to use when analyzing the
given document.
:param doc_id: An id for the document.
:param path: The path to the document.
"""
dbus.service.Object.__init__(self)
self.config_file = ""
self.path = path
self.doc_id = doc_id
@dbus.service.method(interface,
in_signature="",
out_signature="s")
def FindConfigFile(self):
"""
This method uses the path of the document to identify a user config
file for it
:return: The config file path
"""
if self.path == "":
return ""
self.config_file = find_user_config(self.path)
return self.config_file
@dbus.service.method(interface,
in_signature="s",
out_signature="s")
def SetConfigFile(self, config_file):
"""
This method sets the config file to use. It has to be an absolute path,
as otherwise it is difficult to find it.
:param config_file: The path fo the config file to use. This has to be
an absolute path
:return: The config path which has been used
"""
self.config_file = config_file
return self.config_file
@dbus.service.method(interface,
in_signature="",
out_signature="s")
def GetConfigFile(self):
"""
This method gets the config file which is being used
:return: The config path which is being used
"""
return self.config_file
# Signature explanation:
# s -> string
# b -> boolean
# i -> integer (32bit)
# a -> array (list of tuple in python)
# () -> structure (or tuple in python)
# a{ss} -> dictionary with string keys and string values
@dbus.service.method(interface,
in_signature="",
out_signature="(iaa{ss}a(sbaa{ss}))")
def Analyze(self):
"""
This method analyzes the document and sends back the result
:return: The output is structure which has 3 items:
- The exitcode from the analysis.
- List of logs from the analysis.
- List of information about each section that contains:
- The name of the section.
- Boolean which is true if all bears in the section
executed successfully.
- List of results where each result is a string
dictionary which contains:
id, origin, message, file, line_nr, severity
"""
retval = []
if self.path == "" or self.config_file == "":
return retval
args = ["--config=" + self.config_file]
log_printer = ListLogPrinter()
exitcode = 0
try:
yielded_results = False
(sections,
local_bears,
global_bears,
targets) = gather_configuration(fail_acquire_settings,
log_printer,
arg_list=args)
for section_name in sections:
section = sections[section_name]
if not section.is_enabled(targets):
continue
if any([fnmatch(self.path, file_pattern)
for file_pattern in glob_list(section["files"])]):
section["files"].value = self.path
section_result = execute_section(
section=section,
global_bear_list=global_bears[section_name],
local_bear_list=local_bears[section_name],
print_results=lambda *args: True,
log_printer=log_printer)
yielded_results = yielded_results or section_result[0]
retval.append(
DbusDocument.results_to_dbus_struct(section_result,
section_name))
if yielded_results:
exitcode = 1
except BaseException as exception: # pylint: disable=broad-except
exitcode = exitcode or get_exitcode(exception, log_printer)
logs = [log.to_string_dict() for log in log_printer.logs]
return (exitcode, logs, retval)
@staticmethod
def results_to_dbus_struct(section_result, section_name):
"""
Converts the result tuple given by execute_section() - which has
dictionaries and classes inside it - into a purely array based format
as dbus protocol only allows arrays.
:param section_result: The result tuple given by execute_section()
for a section
:param section_name: The name of the section
:return: The result for a section in the form of an
array which is sendable through dbus.
"""
results_for_section = []
for i in range(1, 3): # Loop over bear types - local, global
# Loop over every file affected for local bears
# and every bear for global bears
for key, value in section_result[i].items():
# Loop over every result for a file
results_for_section += [result.to_string_dict()
for result in filter(
lambda x: not isinstance(x, HiddenResult),
value)]
return [section_name, section_result[0], results_for_section]
@property
def path(self):
return self._path
@path.setter
def path(self, new_path):
if new_path:
new_path = os.path.abspath(os.path.expanduser(new_path))
self._path = new_path
|
svsn2117/coala
|
coalib/output/dbus/DbusDocument.py
|
Python
|
agpl-3.0
| 6,851
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import warnings
try:
# Python 2.6
import unittest2 as unittest
except ImportError:
import unittest
try:
from mock import Mock, patch
except:
# python3
from unittest.mock import Mock, patch # noqa
from contextlib import contextmanager
from flask import Flask
from nose.tools import assert_equal, assert_raises # noqa
try:
from nose.tools import assert_equal, assert_raises, assert_in, assert_not_in, assert_is_none # noqa
except:
# Extract unittest2.TestCase methods using the same method than nose (only for Python 2.6)
class Dummy(unittest.TestCase):
def nop():
pass
dummy = Dummy('nop')
assertions = {
'assert_equal': 'assertEqual',
'assert_raises': 'assertRaises',
'assert_in': 'assertIn',
'assert_not_in': 'assertNotIn',
'assert_is_none': 'assertIsNone',
}
for local_name, unittest_name in assertions.items():
vars()[local_name] = getattr(dummy, unittest_name)
del Dummy
del dummy
def assert_data_equal(tested, expected):
'''Compare data without caring about order and type (dict vs. OrderedDict)'''
tested = json.loads(json.dumps(tested))
expected = json.loads(json.dumps(expected))
assert_equal(tested, expected)
class TestCase(unittest.TestCase):
'''An helper mixin for common operations'''
def setUp(self):
'''Initialize an Flask application'''
self.app = Flask(__name__)
@contextmanager
def context(self, **kwargs):
with self.app.test_request_context('/', **kwargs):
yield
@contextmanager
def settings(self, **settings):
'''
A context manager to alter app settings during a test and restore it after..
'''
original = {}
# backup
for key, value in settings.items():
original[key] = self.app.config.get(key)
self.app.config[key] = value
yield
# restore
for key, value in original.items():
self.app.config[key] = value
@contextmanager
def assert_warning(self, category=Warning):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
# Cause all warnings to always be triggered.
yield
self.assertGreaterEqual(len(w), 1, 'It should raise a warning')
warning = w[0]
self.assertEqual(warning.category, category, 'It should raise {0}'.format(category.__name__))
def get(self, url, **kwargs):
with self.app.test_client() as client:
return client.get(url, **kwargs)
def post(self, url, **kwargs):
with self.app.test_client() as client:
return client.post(url, **kwargs)
def get_json(self, url, status=200, **kwargs):
response = self.get(url, **kwargs)
self.assertEqual(response.status_code, status)
self.assertEqual(response.content_type, 'application/json')
return json.loads(response.data.decode('utf8'))
def get_specs(self, prefix='', status=200, **kwargs):
'''Get a Swagger specification for a RestPlus API'''
return self.get_json('{0}/swagger.json'.format(prefix), status=status, **kwargs)
def assertDataEqual(self, tested, expected):
'''Compare data without caring about order and type (dict vs. OrderedDict)'''
assert_data_equal(tested, expected)
|
awiddersheim/flask-restplus
|
tests/__init__.py
|
Python
|
mit
| 3,507
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Upgrade Search models."""
from invenio.ext.sqlalchemy import db
from invenio.legacy.dbquery import run_sql
from invenio.modules.upgrader.api import op
depends_on = [u'search_2014_07_21_facets_per_collection']
def info():
"""Info."""
return "Add a autoincrement id in collection_field_fieldvalue table."""
def do_upgrade():
"""Implement your upgrades here."""
# drop primary keys
# (not necessary, because trick in invenio/base/scripts/database.py already
# remove the primary key)
# op.drop_constraint(None, 'collection_field_fieldvalue',
# type_='primary')
# add column "id" in the table
op.add_column('collection_field_fieldvalue',
db.Column('id', db.MediumInteger(9, unsigned=True),
nullable=False))
# set all new ids
records = run_sql("""SELECT id_collection, id_field, type, score,
score_fieldvalue
FROM collection_field_fieldvalue AS cff
ORDER BY cff.id_collection, id_field, type, score,
score_fieldvalue""")
for index, rec in enumerate(records):
run_sql("""UPDATE collection_field_fieldvalue
SET id = %s WHERE id_collection = %s AND id_field = %s
AND type = %s AND score = %s AND score_fieldvalue = %s """,
(index + 1, rec[0], rec[1], rec[2], rec[3], rec[4]))
# create new primary key with id
op.create_primary_key('pk_collection_field_fieldvalue_id',
'collection_field_fieldvalue', ['id'])
# set id as autoincrement
op.alter_column('collection_field_fieldvalue', 'id',
existing_type=db.MediumInteger(9, unsigned=True),
existing_nullable=False, autoincrement=True)
def estimate():
"""Estimate running time of upgrade in seconds (optional)."""
total = run_sql("SELECT count(*) FROM collection_field_fieldvalue")
return int(float(total[0][0]) / 1000) + 1
def pre_upgrade():
"""Run pre-upgrade checks (optional)."""
# Example of raising errors:
# raise RuntimeError("Description of error 1", "Description of error 2")
def post_upgrade():
"""Run post-upgrade checks (optional)."""
# Example of issuing warnings:
# warnings.warn("A continuable error occurred")
|
Lilykos/invenio
|
invenio/modules/search/upgrades/search_2015_03_03_fix_models.py
|
Python
|
gpl-2.0
| 3,136
|
import _plotly_utils.basevalidators
class FontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="font", parent_name="heatmap.colorbar.title", **kwargs
):
super(FontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Font"),
data_docs=kwargs.pop(
"data_docs",
"""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
""",
),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/heatmap/colorbar/title/_font.py
|
Python
|
mit
| 1,535
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests For miscellaneous util methods used with compute."""
import copy
import string
import mock
from oslo.config import cfg
from oslo.serialization import jsonutils
from oslo.utils import importutils
import six
import testtools
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova import context
from nova import db
from nova import exception
from nova.image import glance
from nova.network import api as network_api
from nova import objects
from nova.objects import block_device as block_device_obj
from nova.objects import instance as instance_obj
from nova.openstack.common import periodic_task
from nova import rpc
from nova import test
from nova.tests import fake_block_device
from nova.tests import fake_instance
from nova.tests import fake_network
from nova.tests import fake_notifier
from nova.tests import fake_server_actions
import nova.tests.image.fake
from nova.tests import matchers
from nova import utils
from nova.virt import driver
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
CONF.import_opt('compute_driver', 'nova.virt.driver')
class ComputeValidateDeviceTestCase(test.TestCase):
def setUp(self):
super(ComputeValidateDeviceTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
# check if test name includes "xen"
if 'xen' in self.id():
self.flags(compute_driver='xenapi.XenAPIDriver')
self.instance = {
'uuid': 'fake',
'root_device_name': None,
'instance_type_id': 'fake',
}
else:
self.instance = {
'uuid': 'fake',
'root_device_name': '/dev/vda',
'default_ephemeral_device': '/dev/vdb',
'instance_type_id': 'fake',
}
self.data = []
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
lambda context, instance, use_slave=False: self.data)
def _update_flavor(self, flavor_info):
self.flavor = {
'id': 1,
'name': 'foo',
'memory_mb': 128,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': 1,
'swap': 0,
'rxtx_factor': 1.0,
'vcpu_weight': 1,
}
self.flavor.update(flavor_info)
self.instance['system_metadata'] = [{'key': 'instance_type_%s' % key,
'value': value}
for key, value in
self.flavor.items()]
def _validate_device(self, device=None):
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, self.instance['uuid'])
return compute_utils.get_device_name_for_instance(
self.context, self.instance, bdms, device)
@staticmethod
def _fake_bdm(device):
return fake_block_device.FakeDbBlockDeviceDict({
'source_type': 'volume',
'destination_type': 'volume',
'device_name': device,
'no_device': None,
'volume_id': 'fake',
'snapshot_id': None,
'guest_format': None
})
def test_wrap(self):
self.data = []
for letter in string.ascii_lowercase[2:]:
self.data.append(self._fake_bdm('/dev/vd' + letter))
device = self._validate_device()
self.assertEqual(device, '/dev/vdaa')
def test_wrap_plus_one(self):
self.data = []
for letter in string.ascii_lowercase[2:]:
self.data.append(self._fake_bdm('/dev/vd' + letter))
self.data.append(self._fake_bdm('/dev/vdaa'))
device = self._validate_device()
self.assertEqual(device, '/dev/vdab')
def test_later(self):
self.data = [
self._fake_bdm('/dev/vdc'),
self._fake_bdm('/dev/vdd'),
self._fake_bdm('/dev/vde'),
]
device = self._validate_device()
self.assertEqual(device, '/dev/vdf')
def test_gap(self):
self.data = [
self._fake_bdm('/dev/vdc'),
self._fake_bdm('/dev/vde'),
]
device = self._validate_device()
self.assertEqual(device, '/dev/vdd')
def test_no_bdms(self):
self.data = []
device = self._validate_device()
self.assertEqual(device, '/dev/vdc')
def test_lxc_names_work(self):
self.instance['root_device_name'] = '/dev/a'
self.instance['ephemeral_device_name'] = '/dev/b'
self.data = []
device = self._validate_device()
self.assertEqual(device, '/dev/c')
def test_name_conversion(self):
self.data = []
device = self._validate_device('/dev/c')
self.assertEqual(device, '/dev/vdc')
device = self._validate_device('/dev/sdc')
self.assertEqual(device, '/dev/vdc')
device = self._validate_device('/dev/xvdc')
self.assertEqual(device, '/dev/vdc')
def test_invalid_device_prefix(self):
self.assertRaises(exception.InvalidDevicePath,
self._validate_device, '/baddata/vdc')
def test_device_in_use(self):
exc = self.assertRaises(exception.DevicePathInUse,
self._validate_device, '/dev/vda')
self.assertIn('/dev/vda', six.text_type(exc))
def test_swap(self):
self.instance['default_swap_device'] = "/dev/vdc"
device = self._validate_device()
self.assertEqual(device, '/dev/vdd')
def test_swap_no_ephemeral(self):
del self.instance['default_ephemeral_device']
self.instance['default_swap_device'] = "/dev/vdb"
device = self._validate_device()
self.assertEqual(device, '/dev/vdc')
def test_ephemeral_xenapi(self):
self._update_flavor({
'ephemeral_gb': 10,
'swap': 0,
})
self.stubs.Set(flavors, 'get_flavor',
lambda instance_type_id, ctxt=None: self.flavor)
device = self._validate_device()
self.assertEqual(device, '/dev/xvdc')
def test_swap_xenapi(self):
self._update_flavor({
'ephemeral_gb': 0,
'swap': 10,
})
self.stubs.Set(flavors, 'get_flavor',
lambda instance_type_id, ctxt=None: self.flavor)
device = self._validate_device()
self.assertEqual(device, '/dev/xvdb')
def test_swap_and_ephemeral_xenapi(self):
self._update_flavor({
'ephemeral_gb': 10,
'swap': 10,
})
self.stubs.Set(flavors, 'get_flavor',
lambda instance_type_id, ctxt=None: self.flavor)
device = self._validate_device()
self.assertEqual(device, '/dev/xvdd')
def test_swap_and_one_attachment_xenapi(self):
self._update_flavor({
'ephemeral_gb': 0,
'swap': 10,
})
self.stubs.Set(flavors, 'get_flavor',
lambda instance_type_id, ctxt=None: self.flavor)
device = self._validate_device()
self.assertEqual(device, '/dev/xvdb')
self.data.append(self._fake_bdm(device))
device = self._validate_device()
self.assertEqual(device, '/dev/xvdd')
def test_no_dev_root_device_name_get_next_name(self):
self.instance['root_device_name'] = 'vda'
device = self._validate_device()
self.assertEqual('/dev/vdc', device)
class DefaultDeviceNamesForInstanceTestCase(test.NoDBTestCase):
def setUp(self):
super(DefaultDeviceNamesForInstanceTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.ephemerals = block_device_obj.block_device_make_list(
self.context,
[fake_block_device.FakeDbBlockDeviceDict(
{'id': 1, 'instance_uuid': 'fake-instance',
'device_name': '/dev/vdb',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': None,
'boot_index': -1})])
self.swap = block_device_obj.block_device_make_list(
self.context,
[fake_block_device.FakeDbBlockDeviceDict(
{'id': 2, 'instance_uuid': 'fake-instance',
'device_name': '/dev/vdc',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'boot_index': -1})])
self.block_device_mapping = block_device_obj.block_device_make_list(
self.context,
[fake_block_device.FakeDbBlockDeviceDict(
{'id': 3, 'instance_uuid': 'fake-instance',
'device_name': '/dev/vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'boot_index': 0}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 4, 'instance_uuid': 'fake-instance',
'device_name': '/dev/vdd',
'source_type': 'snapshot',
'destination_type': 'volume',
'snapshot_id': 'fake-snapshot-id-1',
'boot_index': -1}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 5, 'instance_uuid': 'fake-instance',
'device_name': '/dev/vde',
'source_type': 'blank',
'destination_type': 'volume',
'boot_index': -1})])
self.flavor = {'swap': 4}
self.instance = {'uuid': 'fake_instance', 'ephemeral_gb': 2}
self.is_libvirt = False
self.root_device_name = '/dev/vda'
self.update_called = False
def fake_extract_flavor(instance):
return self.flavor
def fake_driver_matches(driver_string):
if driver_string == 'libvirt.LibvirtDriver':
return self.is_libvirt
return False
self.patchers = []
self.patchers.append(
mock.patch.object(objects.BlockDeviceMapping, 'save'))
self.patchers.append(
mock.patch.object(
flavors, 'extract_flavor',
new=mock.Mock(side_effect=fake_extract_flavor)))
self.patchers.append(
mock.patch.object(driver,
'compute_driver_matches',
new=mock.Mock(
side_effect=fake_driver_matches)))
for patcher in self.patchers:
patcher.start()
def tearDown(self):
super(DefaultDeviceNamesForInstanceTestCase, self).tearDown()
for patcher in self.patchers:
patcher.stop()
def _test_default_device_names(self, *block_device_lists):
compute_utils.default_device_names_for_instance(self.instance,
self.root_device_name,
*block_device_lists)
def test_only_block_device_mapping(self):
# Test no-op
original_bdm = copy.deepcopy(self.block_device_mapping)
self._test_default_device_names([], [], self.block_device_mapping)
for original, new in zip(original_bdm, self.block_device_mapping):
self.assertEqual(original.device_name, new.device_name)
# Assert it defaults the missing one as expected
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names([], [], self.block_device_mapping)
self.assertEqual('/dev/vdb',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vdc',
self.block_device_mapping[2]['device_name'])
def test_with_ephemerals(self):
# Test ephemeral gets assigned
self.ephemerals[0]['device_name'] = None
self._test_default_device_names(self.ephemerals, [],
self.block_device_mapping)
self.assertEqual(self.ephemerals[0]['device_name'], '/dev/vdb')
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names(self.ephemerals, [],
self.block_device_mapping)
self.assertEqual('/dev/vdc',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vdd',
self.block_device_mapping[2]['device_name'])
def test_with_swap(self):
# Test swap only
self.swap[0]['device_name'] = None
self._test_default_device_names([], self.swap, [])
self.assertEqual(self.swap[0]['device_name'], '/dev/vdb')
# Test swap and block_device_mapping
self.swap[0]['device_name'] = None
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names([], self.swap,
self.block_device_mapping)
self.assertEqual(self.swap[0]['device_name'], '/dev/vdb')
self.assertEqual('/dev/vdc',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vdd',
self.block_device_mapping[2]['device_name'])
def test_all_together(self):
# Test swap missing
self.swap[0]['device_name'] = None
self._test_default_device_names(self.ephemerals,
self.swap, self.block_device_mapping)
self.assertEqual(self.swap[0]['device_name'], '/dev/vdc')
# Test swap and eph missing
self.swap[0]['device_name'] = None
self.ephemerals[0]['device_name'] = None
self._test_default_device_names(self.ephemerals,
self.swap, self.block_device_mapping)
self.assertEqual(self.ephemerals[0]['device_name'], '/dev/vdb')
self.assertEqual(self.swap[0]['device_name'], '/dev/vdc')
# Test all missing
self.swap[0]['device_name'] = None
self.ephemerals[0]['device_name'] = None
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names(self.ephemerals,
self.swap, self.block_device_mapping)
self.assertEqual(self.ephemerals[0]['device_name'], '/dev/vdb')
self.assertEqual(self.swap[0]['device_name'], '/dev/vdc')
self.assertEqual('/dev/vdd',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vde',
self.block_device_mapping[2]['device_name'])
class UsageInfoTestCase(test.TestCase):
def setUp(self):
def fake_get_nw_info(cls, ctxt, instance):
self.assertTrue(ctxt.is_admin)
return fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
super(UsageInfoTestCase, self).setUp()
self.stubs.Set(network_api.API, 'get_instance_nw_info',
fake_get_nw_info)
fake_notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
self.flags(use_local=True, group='conductor')
self.flags(compute_driver='nova.virt.fake.FakeDriver',
network_manager='nova.network.manager.FlatManager')
self.compute = importutils.import_object(CONF.compute_manager)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def fake_show(meh, context, id, **kwargs):
return {'id': 1, 'properties': {'kernel_id': 1, 'ramdisk_id': 1}}
self.stubs.Set(nova.tests.image.fake._FakeImageService,
'show', fake_show)
fake_network.set_stub_network_methods(self.stubs)
fake_server_actions.stub_out_action_events(self.stubs)
def _create_instance(self, params=None):
"""Create a test instance."""
params = params or {}
flavor = flavors.get_flavor_by_name('m1.tiny')
sys_meta = flavors.save_flavor_info({}, flavor)
inst = {}
inst['image_ref'] = 1
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['instance_type_id'] = flavor['id']
inst['system_metadata'] = sys_meta
inst['ami_launch_index'] = 0
inst['root_gb'] = 0
inst['ephemeral_gb'] = 0
inst['info_cache'] = {'network_info': '[]'}
inst.update(params)
return db.instance_create(self.context, inst)['id']
def test_notify_usage_exists(self):
# Ensure 'exists' notification generates appropriate usage data.
instance_id = self._create_instance()
instance = objects.Instance.get_by_id(self.context, instance_id)
# Set some system metadata
sys_metadata = {'image_md_key1': 'val1',
'image_md_key2': 'val2',
'other_data': 'meow'}
instance.system_metadata.update(sys_metadata)
instance.save()
compute_utils.notify_usage_exists(
rpc.get_notifier('compute'), self.context, instance)
self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1)
msg = fake_notifier.NOTIFICATIONS[0]
self.assertEqual(msg.priority, 'INFO')
self.assertEqual(msg.event_type, 'compute.instance.exists')
payload = msg.payload
self.assertEqual(payload['tenant_id'], self.project_id)
self.assertEqual(payload['user_id'], self.user_id)
self.assertEqual(payload['instance_id'], instance['uuid'])
self.assertEqual(payload['instance_type'], 'm1.tiny')
type_id = flavors.get_flavor_by_name('m1.tiny')['id']
self.assertEqual(str(payload['instance_type_id']), str(type_id))
flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid']
self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id))
for attr in ('display_name', 'created_at', 'launched_at',
'state', 'state_description',
'bandwidth', 'audit_period_beginning',
'audit_period_ending', 'image_meta'):
self.assertTrue(attr in payload,
msg="Key %s not in payload" % attr)
self.assertEqual(payload['image_meta'],
{'md_key1': 'val1', 'md_key2': 'val2'})
image_ref_url = "%s/images/1" % glance.generate_glance_url()
self.assertEqual(payload['image_ref_url'], image_ref_url)
self.compute.terminate_instance(self.context, instance, [], [])
def test_notify_usage_exists_deleted_instance(self):
# Ensure 'exists' notification generates appropriate usage data.
instance_id = self._create_instance()
instance = objects.Instance.get_by_id(self.context, instance_id,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
# Set some system metadata
sys_metadata = {'image_md_key1': 'val1',
'image_md_key2': 'val2',
'other_data': 'meow'}
instance.system_metadata.update(sys_metadata)
instance.save()
self.compute.terminate_instance(self.context, instance, [], [])
instance = objects.Instance.get_by_id(
self.context.elevated(read_deleted='yes'), instance_id,
expected_attrs=['system_metadata'])
compute_utils.notify_usage_exists(
rpc.get_notifier('compute'), self.context, instance)
msg = fake_notifier.NOTIFICATIONS[-1]
self.assertEqual(msg.priority, 'INFO')
self.assertEqual(msg.event_type, 'compute.instance.exists')
payload = msg.payload
self.assertEqual(payload['tenant_id'], self.project_id)
self.assertEqual(payload['user_id'], self.user_id)
self.assertEqual(payload['instance_id'], instance['uuid'])
self.assertEqual(payload['instance_type'], 'm1.tiny')
type_id = flavors.get_flavor_by_name('m1.tiny')['id']
self.assertEqual(str(payload['instance_type_id']), str(type_id))
flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid']
self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id))
for attr in ('display_name', 'created_at', 'launched_at',
'state', 'state_description',
'bandwidth', 'audit_period_beginning',
'audit_period_ending', 'image_meta'):
self.assertTrue(attr in payload,
msg="Key %s not in payload" % attr)
self.assertEqual(payload['image_meta'],
{'md_key1': 'val1', 'md_key2': 'val2'})
image_ref_url = "%s/images/1" % glance.generate_glance_url()
self.assertEqual(payload['image_ref_url'], image_ref_url)
def test_notify_usage_exists_instance_not_found(self):
# Ensure 'exists' notification generates appropriate usage data.
instance_id = self._create_instance()
instance = objects.Instance.get_by_id(self.context, instance_id,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.compute.terminate_instance(self.context, instance, [], [])
compute_utils.notify_usage_exists(
rpc.get_notifier('compute'), self.context, instance)
msg = fake_notifier.NOTIFICATIONS[-1]
self.assertEqual(msg.priority, 'INFO')
self.assertEqual(msg.event_type, 'compute.instance.exists')
payload = msg.payload
self.assertEqual(payload['tenant_id'], self.project_id)
self.assertEqual(payload['user_id'], self.user_id)
self.assertEqual(payload['instance_id'], instance['uuid'])
self.assertEqual(payload['instance_type'], 'm1.tiny')
type_id = flavors.get_flavor_by_name('m1.tiny')['id']
self.assertEqual(str(payload['instance_type_id']), str(type_id))
flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid']
self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id))
for attr in ('display_name', 'created_at', 'launched_at',
'state', 'state_description',
'bandwidth', 'audit_period_beginning',
'audit_period_ending', 'image_meta'):
self.assertTrue(attr in payload,
msg="Key %s not in payload" % attr)
self.assertEqual(payload['image_meta'], {})
image_ref_url = "%s/images/1" % glance.generate_glance_url()
self.assertEqual(payload['image_ref_url'], image_ref_url)
def test_notify_about_instance_usage(self):
instance_id = self._create_instance()
instance = objects.Instance.get_by_id(self.context, instance_id,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
# Set some system metadata
sys_metadata = {'image_md_key1': 'val1',
'image_md_key2': 'val2',
'other_data': 'meow'}
instance.system_metadata.update(sys_metadata)
instance.save()
extra_usage_info = {'image_name': 'fake_name'}
compute_utils.notify_about_instance_usage(
rpc.get_notifier('compute'),
self.context, instance, 'create.start',
extra_usage_info=extra_usage_info)
self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1)
msg = fake_notifier.NOTIFICATIONS[0]
self.assertEqual(msg.priority, 'INFO')
self.assertEqual(msg.event_type, 'compute.instance.create.start')
payload = msg.payload
self.assertEqual(payload['tenant_id'], self.project_id)
self.assertEqual(payload['user_id'], self.user_id)
self.assertEqual(payload['instance_id'], instance['uuid'])
self.assertEqual(payload['instance_type'], 'm1.tiny')
type_id = flavors.get_flavor_by_name('m1.tiny')['id']
self.assertEqual(str(payload['instance_type_id']), str(type_id))
flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid']
self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id))
for attr in ('display_name', 'created_at', 'launched_at',
'state', 'state_description', 'image_meta'):
self.assertTrue(attr in payload,
msg="Key %s not in payload" % attr)
self.assertEqual(payload['image_meta'],
{'md_key1': 'val1', 'md_key2': 'val2'})
self.assertEqual(payload['image_name'], 'fake_name')
image_ref_url = "%s/images/1" % glance.generate_glance_url()
self.assertEqual(payload['image_ref_url'], image_ref_url)
self.compute.terminate_instance(self.context, instance, [], [])
def test_notify_about_aggregate_update_with_id(self):
# Set aggregate payload
aggregate_payload = {'aggregate_id': 1}
compute_utils.notify_about_aggregate_update(self.context,
"create.end",
aggregate_payload)
self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1)
msg = fake_notifier.NOTIFICATIONS[0]
self.assertEqual(msg.priority, 'INFO')
self.assertEqual(msg.event_type, 'aggregate.create.end')
payload = msg.payload
self.assertEqual(payload['aggregate_id'], 1)
def test_notify_about_aggregate_update_with_name(self):
# Set aggregate payload
aggregate_payload = {'name': 'fakegroup'}
compute_utils.notify_about_aggregate_update(self.context,
"create.start",
aggregate_payload)
self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1)
msg = fake_notifier.NOTIFICATIONS[0]
self.assertEqual(msg.priority, 'INFO')
self.assertEqual(msg.event_type, 'aggregate.create.start')
payload = msg.payload
self.assertEqual(payload['name'], 'fakegroup')
def test_notify_about_aggregate_update_without_name_id(self):
# Set empty aggregate payload
aggregate_payload = {}
compute_utils.notify_about_aggregate_update(self.context,
"create.start",
aggregate_payload)
self.assertEqual(len(fake_notifier.NOTIFICATIONS), 0)
class ComputeGetImageMetadataTestCase(test.TestCase):
def setUp(self):
super(ComputeGetImageMetadataTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.image = {
"min_ram": 10,
"min_disk": 1,
"disk_format": "raw",
"container_format": "bare",
"properties": {},
}
self.mock_image_api = mock.Mock()
self.mock_image_api.get.return_value = self.image
self.ctx = context.RequestContext('fake', 'fake')
sys_meta = {
'image_min_ram': 10,
'image_min_disk': 1,
'image_disk_format': 'raw',
'image_container_format': 'bare',
'instance_type_id': 0,
'instance_type_name': 'm1.fake',
'instance_type_memory_mb': 10,
'instance_type_vcpus': 1,
'instance_type_root_gb': 1,
'instance_type_ephemeral_gb': 1,
'instance_type_flavorid': '0',
'instance_type_swap': 1,
'instance_type_rxtx_factor': 0.0,
'instance_type_vcpu_weight': None,
}
self.instance = fake_instance.fake_db_instance(
memory_mb=0, root_gb=0,
system_metadata=sys_meta)
@property
def instance_obj(self):
return objects.Instance._from_db_object(
self.ctx, objects.Instance(), self.instance,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS)
def test_get_image_meta(self):
image_meta = compute_utils.get_image_metadata(
self.ctx, self.mock_image_api, 'fake-image', self.instance_obj)
self.image['properties'] = 'DONTCARE'
self.assertThat(self.image, matchers.DictMatches(image_meta))
def test_get_image_meta_with_image_id_none(self):
self.image['properties'] = {'fake_property': 'fake_value'}
with mock.patch.object(flavors,
"extract_flavor") as mock_extract_flavor:
with mock.patch.object(utils, "get_system_metadata_from_image"
) as mock_get_sys_metadata:
image_meta = compute_utils.get_image_metadata(
self.ctx, self.mock_image_api, None, self.instance_obj)
self.assertEqual(0, self.mock_image_api.get.call_count)
self.assertEqual(0, mock_extract_flavor.call_count)
self.assertEqual(0, mock_get_sys_metadata.call_count)
self.assertNotIn('fake_property', image_meta['properties'])
# Checking mock_image_api_get is called with 0 image_id
# as 0 is a valid image ID
image_meta = compute_utils.get_image_metadata(self.ctx,
self.mock_image_api,
0, self.instance_obj)
self.assertEqual(1, self.mock_image_api.get.call_count)
self.assertIn('fake_property', image_meta['properties'])
def _test_get_image_meta_exception(self, error):
self.mock_image_api.get.side_effect = error
image_meta = compute_utils.get_image_metadata(
self.ctx, self.mock_image_api, 'fake-image', self.instance_obj)
self.image['properties'] = 'DONTCARE'
# NOTE(danms): The trip through system_metadata will stringify things
for key in self.image:
self.image[key] = str(self.image[key])
self.assertThat(self.image, matchers.DictMatches(image_meta))
def test_get_image_meta_no_image(self):
error = exception.ImageNotFound(image_id='fake-image')
self._test_get_image_meta_exception(error)
def test_get_image_meta_not_authorized(self):
error = exception.ImageNotAuthorized(image_id='fake-image')
self._test_get_image_meta_exception(error)
def test_get_image_meta_bad_request(self):
error = exception.Invalid()
self._test_get_image_meta_exception(error)
def test_get_image_meta_unexpected_exception(self):
error = test.TestingException()
with testtools.ExpectedException(test.TestingException):
self._test_get_image_meta_exception(error)
def test_get_image_meta_no_image_system_meta(self):
for k in self.instance['system_metadata'].keys():
if k.startswith('image_'):
del self.instance['system_metadata'][k]
image_meta = compute_utils.get_image_metadata(
self.ctx, self.mock_image_api, 'fake-image', self.instance_obj)
self.image['properties'] = 'DONTCARE'
self.assertThat(self.image, matchers.DictMatches(image_meta))
def test_get_image_meta_no_image_no_image_system_meta(self):
e = exception.ImageNotFound(image_id='fake-image')
self.mock_image_api.get.side_effect = e
for k in self.instance['system_metadata'].keys():
if k.startswith('image_'):
del self.instance['system_metadata'][k]
image_meta = compute_utils.get_image_metadata(
self.ctx, self.mock_image_api, 'fake-image', self.instance_obj)
expected = {'properties': 'DONTCARE'}
self.assertThat(expected, matchers.DictMatches(image_meta))
class ComputeUtilsGetValFromSysMetadata(test.TestCase):
def test_get_value_from_system_metadata(self):
instance = fake_instance.fake_instance_obj('fake-context')
system_meta = {'int_val': 1,
'int_string': '2',
'not_int': 'Nope'}
instance.system_metadata = system_meta
result = compute_utils.get_value_from_system_metadata(
instance, 'int_val', int, 0)
self.assertEqual(1, result)
result = compute_utils.get_value_from_system_metadata(
instance, 'int_string', int, 0)
self.assertEqual(2, result)
result = compute_utils.get_value_from_system_metadata(
instance, 'not_int', int, 0)
self.assertEqual(0, result)
class ComputeUtilsGetNWInfo(test.TestCase):
def test_instance_object_none_info_cache(self):
inst = fake_instance.fake_instance_obj('fake-context',
expected_attrs=['info_cache'])
self.assertIsNone(inst.info_cache)
result = compute_utils.get_nw_info_for_instance(inst)
self.assertEqual(jsonutils.dumps([]), result.json())
def test_instance_dict_none_info_cache(self):
inst = fake_instance.fake_db_instance(info_cache=None)
self.assertIsNone(inst['info_cache'])
result = compute_utils.get_nw_info_for_instance(inst)
self.assertEqual(jsonutils.dumps([]), result.json())
class ComputeUtilsGetRebootTypes(test.TestCase):
def setUp(self):
super(ComputeUtilsGetRebootTypes, self).setUp()
self.context = context.RequestContext('fake', 'fake')
def test_get_reboot_type_started_soft(self):
reboot_type = compute_utils.get_reboot_type(task_states.REBOOT_STARTED,
power_state.RUNNING)
self.assertEqual(reboot_type, 'SOFT')
def test_get_reboot_type_pending_soft(self):
reboot_type = compute_utils.get_reboot_type(task_states.REBOOT_PENDING,
power_state.RUNNING)
self.assertEqual(reboot_type, 'SOFT')
def test_get_reboot_type_hard(self):
reboot_type = compute_utils.get_reboot_type('foo', power_state.RUNNING)
self.assertEqual(reboot_type, 'HARD')
def test_get_reboot_not_running_hard(self):
reboot_type = compute_utils.get_reboot_type('foo', 'bar')
self.assertEqual(reboot_type, 'HARD')
class ComputeUtilsPeriodicTaskSpacingWarning(test.NoDBTestCase):
@mock.patch.object(compute_utils, 'LOG')
def test_periodic_task_spacing_warning_no_op(self, mock_log):
@compute_utils.periodic_task_spacing_warn("config_value")
def not_a_periodic_task():
return "something"
self.assertEqual("something", not_a_periodic_task())
self.assertFalse(mock_log.warning.called)
self.assertFalse(mock_log.warn.called)
@mock.patch.object(compute_utils, 'LOG')
def test_periodic_task_spacing_warning_nonzero_spacing(self, mock_log):
@compute_utils.periodic_task_spacing_warn("config_value")
@periodic_task.periodic_task(spacing=10)
def a_periodic_task():
return "something"
self.assertEqual("something", a_periodic_task())
self.assertFalse(mock_log.warning.called)
self.assertFalse(mock_log.warn.called)
@mock.patch.object(compute_utils, 'LOG')
def test_periodic_task_spacing_warning_zero_spacing(self, mock_log):
@compute_utils.periodic_task_spacing_warn("config_value")
@periodic_task.periodic_task(spacing=0)
def zero_spacing_periodic_task():
return "something"
self.assertEqual("something", zero_spacing_periodic_task())
mock_log.warning.assert_called_with(mock.ANY, "config_value")
@mock.patch.object(compute_utils, 'LOG')
def test_periodic_task_spacing_warning_none_spacing(self, mock_log):
@compute_utils.periodic_task_spacing_warn("config_value")
@periodic_task.periodic_task(spacing=None)
def none_spacing_periodic_task():
return "something"
self.assertEqual("something", none_spacing_periodic_task())
mock_log.warning.assert_called_with(mock.ANY, "config_value")
@mock.patch.object(compute_utils, 'LOG')
def test_periodic_task_spacing_warning_default_spacing(self, mock_log):
@compute_utils.periodic_task_spacing_warn("config_value")
@periodic_task.periodic_task
def default_spacing_periodic_task():
return "something"
self.assertEqual("something", default_spacing_periodic_task())
mock_log.warning.assert_called_with(mock.ANY, "config_value")
|
angdraug/nova
|
nova/tests/compute/test_compute_utils.py
|
Python
|
apache-2.0
| 37,999
|
# coding: utf-8
from django.db import models
class TipoDeGestion(models.Model):
nombre = models.CharField(max_length=255)
def __unicode__(self):
return u"TipoDeGestion: " + self.nombre
class Meta:
db_table = 'tiposDeGestion'
verbose_name_plural = 'tiposDeGestion'
class JSONAPIMeta:
resource_name = "tipos-de-gestion"
|
Dte-ba/suite-backend
|
escuelas/models/tipo_de_gestion.py
|
Python
|
gpl-3.0
| 370
|
from flask import render_template, request, redirect, url_for, abort, flash,\
make_response
from flask_login import login_required, current_user
from bson.objectid import ObjectId
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm
from .. import db
from ..paginations import PaginationPosts, PaginationUserPosts,\
PaginationFollowers, PaginationFollowing
from ..general import clean_tags, gravatar
from ..models import UserUtl, Permission, Post
from ..decorators import admin_required, permission_required
@main.route('/', methods=['GET', 'POST'])
def index():
"""
under pagination, a suffix "?page=<page_num>" is added when directing to
other pages
the choice of showing all or followed users' posts is stored in cookie
'show_followed_posts_cookie'. Cookies are stored in the request object as a
'request.cookies' dictionary.
cookies are set in two new routes:
main.show_followed_posts()
main.show_all_posts()
these two new routes are added to index.html template, and when clicked
they will set the cookie 'show_followed_posts_cookie' to a proper value and
redirect back to home page to display relating content based on the cookie:
('show_followed_posts_cookie' is False by default and index.html is
rendered, by displaying all posts)
1. user click on a tab and invoke a certain route, for example the
'main.show_followed_posts'
2. the route invoked route 'main.show_followed_posts' set the cookie
'show_followed_posts_cookie' to True, then redirect back to template
3. the index.html is redirected with cookie of True, and shows only
followers' posts
"""
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and form.validate_on_submit():
Post(body=form.body.data, user_id=current_user.id,
author_avatar=gravatar(current_user.avatar_hash, size=30)).\
add_new_post()
return redirect(url_for('main.index'))
current_page = request.args.get('page', 1, type=int)
show_followed_posts_cookie = False
if current_user.is_authenticated:
show_followed_posts_cookie = \
bool(request.cookies.get('show_followed_posts_cookie', ''))
if show_followed_posts_cookie:
pagination = PaginationPosts(current_page, show_followed_posts_cookie=1)
else:
pagination = PaginationPosts(current_page, show_followed_posts_cookie=0)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
Permission=Permission, pagination=pagination,
show_followed_posts_cookie=show_followed_posts_cookie)
@main.route('/show_all_posts')
@login_required
def show_all_posts():
"""
cookies can be set ONLY on a response object, and route itself need create
a response object via 'make_response' + a redirection, then use set_cookie
to config <cookie_name>, <cookie_value> and <max_age>
"""
resp = make_response(redirect(url_for('main.index')))
resp.set_cookie('show_followed_posts_cookie', '', max_age=30*24*60*60)
return resp
@main.route('/show_followed_posts')
@login_required
def show_followed_posts():
resp = make_response(redirect(url_for('main.index')))
resp.set_cookie('show_followed_posts_cookie', '1', max_age=30*24*60*60)
return resp
@main.route('/post/<id>')
def post(id):
post = db.Post.find_one({'_id': ObjectId(id)})
if not post:
abort(404)
return render_template('post.html', posts=[post])
@main.route('/create_post', methods=['GET', 'POST'])
@login_required
def create_post():
a = 1
@main.route('/edit_post/<id>', methods=['GET', 'POST'])
@login_required
def edit_post(id):
post = db.Post.find_one({'_id': ObjectId(id)})
if not post:
abort(404)
if current_user.id != post.get('author_id')\
and not current_user.can(Permission.ADMIN):
abort(403)
form = PostForm()
if form.validate_on_submit():
db.Post.update({'_id': ObjectId(id)}, {'$set':
{'body': form.body.data,
'body_html': clean_tags(form.body.data)}})
flash('The post has been updated.')
return redirect(url_for('main.post', id=str(post.get('_id'))))
form.body.data = post.get('body')
return render_template('/main/edit_post.html', form=form)
@main.route('/delete_post/<id>', methods=['GET', 'POST'])
@login_required
def delete_post(id):
post = db.Post.find_one({'_id': ObjectId(id)})
if not post:
abort(404)
if current_user.id != post.get('author_id')\
and not current_user.can(Permission.ADMIN):
abort(403)
db.Post.delete_one({'_id': ObjectId(id)})
flash('The post has been deleted.')
return redirect(url_for('main.index'))
@main.route('/user_profile/<id>')
def user(id):
user_dict = db.User.find_one({'_id': ObjectId(id)})
if not user_dict:
return render_template('/errors/missing_user.html')
user_utl = UserUtl(user_dict)
current_page = request.args.get('page', 1, type=int)
show_posts_cookie = True
show_followers_cookie = False
show_following_cookie = False
if current_user.is_authenticated:
show_posts_cookie = bool(request.cookies.get('show_posts_cookie', ''))
show_followers_cookie = \
bool(request.cookies.get('show_followers_cookie', ''))
show_following_cookie = \
bool(request.cookies.get('show_following_cookie', ''))
if (not show_posts_cookie and not show_followers_cookie and not
show_following_cookie) or not current_user.is_authenticated:
show_posts_cookie = True
show_followers_cookie = False
show_following_cookie = False
if show_posts_cookie:
pagination = PaginationUserPosts(id, current_page)
posts = pagination.items
followers = user_dict.get('followers')
following = user_dict.get('following')
total_count = pagination.total_count
return render_template('/main/user_profile.html', user_utl=user_utl,
posts=posts, pagination=pagination,
total_count=total_count,
Permission=Permission, followers=followers,
following=following,
show_posts_cookie=show_posts_cookie,
show_followers_cookie=show_followers_cookie,
show_following_cookie=show_following_cookie)
if show_followers_cookie:
pagination = PaginationFollowers(id, current_page)
followers = pagination.items
following = user_dict.get('following')
total_count = pagination.total_count
return render_template('/main/user_profile.html', user_utl=user_utl,
pagination=pagination,
total_count=total_count,
Permission=Permission, followers=followers,
following=following,
show_posts_cookie=show_posts_cookie,
show_followers_cookie=show_followers_cookie,
show_following_cookie=show_following_cookie)
if show_following_cookie:
pagination = PaginationFollowing(id, current_page)
followers = user_dict.get('followers')
following = pagination.items
total_count = pagination.total_count
return render_template('/main/user_profile.html', user_utl=user_utl,
pagination=pagination,
total_count=total_count,
Permission=Permission, followers=followers,
following=following,
show_posts_cookie=show_posts_cookie,
show_followers_cookie=show_followers_cookie,
show_following_cookie=show_following_cookie)
@main.route('/show_user_posts/<id>')
@login_required
def show_user_posts(id):
resp = make_response(redirect(url_for('main.user', id=id)))
resp.set_cookie('show_posts_cookie', '1', max_age=30*24*60*60)
resp.set_cookie('show_followers_cookie', '', max_age=30*24*60*60)
resp.set_cookie('show_following_cookie', '', max_age=30*24*60*60)
return resp
@main.route('/show_followers/<id>')
@login_required
def show_followers(id):
resp = make_response(redirect(url_for('main.user', id=id)))
resp.set_cookie('show_posts_cookie', '', max_age=30*24*60*60)
resp.set_cookie('show_followers_cookie', '1', max_age=30*24*60*60)
resp.set_cookie('show_following_cookie', '', max_age=30*24*60*60)
return resp
@main.route('/show_following/<id>')
@login_required
def show_following(id):
resp = make_response(redirect(url_for('main.user', id=id)))
resp.set_cookie('show_posts_cookie', '', max_age=30*24*60*60)
resp.set_cookie('show_followers_cookie', '', max_age=30*24*60*60)
resp.set_cookie('show_following_cookie', '1', max_age=30*24*60*60)
return resp
@main.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
db.User.update({'email': current_user.email},
{'$set': {'name': form.name.data,
'location': form.location.data,
'about_me': form.about_me.data}})
flash('Your profile has been updated.')
return redirect(url_for('main.user', id=current_user.id))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('/main/edit_profile.html', form=form)
@main.route('/admin_edit_profile/<id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user_dict = db.User.find_one({'_id': ObjectId(id)})
if not user_dict:
abort(404)
user_utl = UserUtl(user_dict)
form = EditProfileAdminForm(user=user_utl)
if form.validate_on_submit():
db.User.update({'email': user_dict.get('email')},
{'$set': {'email': form.email.data,
'username': form.username.data,
'is_confirmed': form.is_confirmed.data,
'role': form.role.data,
'name': form.name.data,
'location': form.location.data,
'about_me': form.about_me.data}})
flash('The profile has been updated.')
return redirect(url_for('main.user', id=user_utl.id))
form.email.data = user_utl.email
form.username.data = user_utl.username
form.is_confirmed.data = user_utl.is_confirmed
form.role.data = user_utl.role.type
form.name.data = user_utl.name
form.location.data = user_utl.location
form.about_me.data = user_utl.about_me
return render_template('/main/edit_profile.html', form=form)
@main.route('/follow/<id>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(id):
"""
follow the target user <id>
"""
target_user = db.User.find_one({'_id': ObjectId(id)})
if not target_user:
abort(404)
if current_user.following and id in current_user.following:
flash('You are already following this user.')
return redirect(url_for('main.user', id=id))
current_user.follow(id)
flash('You are now following this user.')
return redirect(url_for('main.user', id=id))
@main.route('/unfollow/<id>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(id):
"""
unfollow the target user <id>
"""
user = db.User.find_one({'_id': ObjectId(id)})
if not user:
abort(404)
if id not in current_user.following:
flash('You are not following this user.')
return redirect(url_for('main.user', id=id))
current_user.unfollow(id)
flash('You unfollow this user successfully.')
return redirect(url_for('main.user', id=id))
@main.route('/followers/<id>')
@login_required
def followers(id):
"""
list target user's followers, <id> can be a user or current_user
"""
user_dict = db.User.find_one({'_id': ObjectId(id)})
if not user_dict:
abort(404)
current_page = request.args.get('page', 1, type=int)
pagination = PaginationFollowers(id, current_page)
followers = pagination.items
return render_template('/main/followers.html', user_dict=user_dict,
pagination=pagination, followers=followers,
current_user_id=str(user_dict.get('_id')))
@main.route('/following/<id>')
@login_required
def following(id):
"""
list target user's followings, <id> can be a user or current_user
"""
user_dict = db.User.find_one({'_id': ObjectId(id)})
if not user_dict:
abort(404)
current_page = request.args.get('page', 1, type=int)
pagination = PaginationFollowing(id, current_page)
following = pagination.items
return render_template('/main/following.html', user_dict=user_dict,
pagination=pagination, following=following,
current_user_id=str(user_dict.get('_id')))
|
nonemaw/Flask_nonemaw
|
app/main/views.py
|
Python
|
mit
| 13,717
|
import numpy as np
import pywt
from scipy.misc import imresize
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
X_L = 10
L = 14
N_BATCH = 50
OBS_SIZE = 30
# ---------------------------- helpers
def vectorize(coords):
retX, retY = np.zeros([L]), np.zeros([L])
retX[coords[0]] = 1.0
retY[coords[1]] = 1.0
return retX, retY
# show dimension of a data object (list of list or a tensor)
def show_dim(lst1):
if hasattr(lst1, '__len__') and len(lst1) > 0:
return [len(lst1), show_dim(lst1[0])]
else:
try:
return lst1.get_shape()
except:
try:
return lst1.shape
except:
return type(lst1)
# -------------------------------------- making the datas
# assume X is already a 2D matrix
def mk_query(X):
avg = np.median(X)
X = X + avg
def query(O):
Ox, Oy = O
if X[Ox][Oy] > 0.0:
return [1.0, 0.0]
else:
return [0.0, 1.0]
return query
def sample_coord():
Ox, Oy = np.random.randint(0,L), np.random.randint(0,L)
if 0 <= Ox < L:
if 0 <= Oy < L:
return Ox, Oy
return sample_coord()
def gen_O(X):
query = mk_query(X)
Ox, Oy = sample_coord()
O = (Ox, Oy)
return O, query(O)
def get_img_class():
img, _x = mnist.train.next_batch(1)
img = np.reshape(img[0], [28, 28])
img = imresize(img, (L,L)) / 255.0
A,(B,C,D) = pywt.dwt2(img, 'haar')
img = np.reshape(np.array([A,B,C,D]), [L, L])
return img, _x
def gen_data():
x = []
obs_x = [[] for i in range(OBS_SIZE)]
obs_y = [[] for i in range(OBS_SIZE)]
obs_tfs = [[] for i in range(OBS_SIZE)]
new_ob_x = []
new_ob_y = []
new_ob_tf = []
imgs = []
for bb in range(N_BATCH):
# generate a hidden variable X
# get a single thing out
img, _x = get_img_class()
imgs.append(img)
# add to x
x.append(_x[0])
# generate new observation
_new_ob_coord, _new_ob_lab = gen_O(img)
_new_ob_x, _new_ob_y = vectorize(_new_ob_coord)
new_ob_x.append(_new_ob_x)
new_ob_y.append(_new_ob_y)
new_ob_tf.append(_new_ob_lab)
# generate observations for this hidden variable x
for ob_idx in range(OBS_SIZE):
_ob_coord, _ob_lab = gen_O(img)
_ob_x, _ob_y = vectorize(_ob_coord)
obs_x[ob_idx].append(_ob_x)
obs_y[ob_idx].append(_ob_y)
obs_tfs[ob_idx].append(_ob_lab)
return np.array(x, np.float32),\
np.array(obs_x, np.float32),\
np.array(obs_y, np.float32),\
np.array(obs_tfs, np.float32),\
np.array(new_ob_x, np.float32),\
np.array(new_ob_y, np.float32),\
np.array(new_ob_tf, np.float32), imgs
|
evanthebouncy/nnhmm
|
mnist_haar/data.py
|
Python
|
mit
| 2,672
|
from datetime import date
import pytest
from django.core.urlresolvers import reverse
from django.utils import translation
from ..views import Index, ByTag
from ..models import Content
from .factories import ContentFactory
pytestmark = pytest.mark.django_db
def test_anonymous_should_access_index_page(app):
assert app.get(reverse('blog:index'), status=200)
def test_only_published_should_be_in_index(app, published, draft, deleted,
published_in_the_future):
response = app.get(reverse('blog:index'))
assert published.title in response.content
assert draft.title not in response.content
assert deleted.title not in response.content
assert published_in_the_future.title not in response.content
def test_index_page_is_paginated(app, monkeypatch):
monkeypatch.setattr(Index, 'paginate_by', 2)
ContentFactory.create_batch(size=4, status=Content.PUBLISHED)
response = app.get(reverse('blog:index'))
assert response.pyquery.find('.pagination')
assert response.pyquery.find('.next')
assert not response.pyquery.find('.previous')
response = app.get(reverse('blog:index') + '?page=2')
assert response.pyquery.find('.pagination')
assert not response.pyquery.find('.next')
assert response.pyquery.find('.previous')
response = app.get(reverse('blog:index') + '?page=3', status=404)
def test_everyone_should_access_published_content(app, published):
assert app.get(reverse('blog:content_detail',
kwargs={'pk': published.pk}), status=200)
def test_anonymous_should_not_access_draft_content(app, draft):
assert app.get(reverse('blog:content_detail', kwargs={'pk': draft.pk}),
status=404)
def test_non_staff_should_not_access_draft_content(loggedapp, draft):
assert loggedapp.get(reverse('blog:content_detail',
kwargs={'pk': draft.pk}), status=404)
def test_staff_should_access_draft_content(staffapp, draft):
assert staffapp.get(reverse('blog:content_detail',
kwargs={'pk': draft.pk}), status=200)
def test_anonymous_should_not_access_published_in_the_future_content(
app,
published_in_the_future):
assert app.get(
reverse('blog:content_detail',
kwargs={'pk': published_in_the_future.pk}), status=404)
def test_non_staff_should_not_access_published_in_the_future_content(
loggedapp,
published_in_the_future):
assert loggedapp.get(
reverse('blog:content_detail',
kwargs={'pk': published_in_the_future.pk}), status=404)
def test_staff_should_access_published_in_the_future_content(
staffapp,
published_in_the_future):
assert staffapp.get(
reverse('blog:content_detail',
kwargs={'pk': published_in_the_future.pk}), status=200)
def test_anonymous_should_not_access_deleted_page(app, deleted):
assert app.get(reverse('blog:content_detail', kwargs={'pk': deleted.pk}),
status=404)
def test_non_staff_should_not_access_delete_page(loggedapp, deleted):
assert loggedapp.get(reverse('blog:content_detail',
kwargs={'pk': deleted.pk}), status=404)
def test_staff_should_access_deleted_content(staffapp, deleted):
assert staffapp.get(reverse('blog:content_detail',
kwargs={'pk': deleted.pk}), status=200)
def test_anonymous_should_not_access_edit_page(app, published):
assert app.get(reverse('blog:content_update',
kwargs={'pk': published.pk}), status=302)
def test_non_staff_should_not_access_edit_page(loggedapp, published):
assert loggedapp.get(reverse('blog:content_update',
kwargs={'pk': published.pk}), status=302)
def test_staff_should_access_published_edit_page(staffapp, published):
assert staffapp.get(reverse('blog:content_update',
kwargs={'pk': published.pk}), status=200)
def test_staff_should_access_draft_edit_page(staffapp, draft):
assert staffapp.get(reverse('blog:content_update',
kwargs={'pk': draft.pk}), status=200)
def test_staff_should_access_deleted_edit_page(staffapp, deleted):
assert staffapp.get(reverse('blog:content_update',
kwargs={'pk': deleted.pk}), status=200)
def test_staff_can_edit_published_content(staffapp, published):
url = reverse('blog:content_update', kwargs={'pk': published.pk})
form = staffapp.get(url).forms['model_form']
title = "New title"
assert Content.objects.get(pk=published.pk).title != title
form['title'] = title
form.submit().follow()
assert Content.objects.get(pk=published.pk).title == title
def test_staff_can_edit_draft_content(staffapp, draft):
url = reverse('blog:content_update', kwargs={'pk': draft.pk})
form = staffapp.get(url).forms['model_form']
title = "New title"
assert Content.objects.get(pk=draft.pk).title != title
form['title'] = title
form.submit().follow()
assert Content.objects.get(pk=draft.pk).title == title
def test_staff_can_edit_deleted_content(staffapp, deleted):
url = reverse('blog:content_update', kwargs={'pk': deleted.pk})
form = staffapp.get(url).forms['model_form']
title = "New title"
assert Content.objects.get(pk=deleted.pk).title != title
form['title'] = title
form.submit().follow()
assert Content.objects.get(pk=deleted.pk).title == title
def test_published_at_is_YMD_formatted_even_in_other_locale(staffapp,
published):
published.published_at = date(2015, 1, 7)
published.save()
translation.activate('fr')
url = reverse('blog:content_update', kwargs={'pk': published.pk})
form = staffapp.get(url).forms['model_form']
assert form['published_at'].value == '2015-01-07'
translation.deactivate()
def test_published_at_can_be_still_set_the_French_way(staffapp, published):
# We force the input at load time, but we should still accept other format
# at save.
translation.activate('fr')
url = reverse('blog:content_update', kwargs={'pk': published.pk})
form = staffapp.get(url).forms['model_form']
form['published_at'] = '11/01/2015'
form.submit().follow()
assert Content.objects.count()
assert Content.objects.first().published_at.date() == date(2015, 1, 11)
translation.deactivate()
def test_can_create_content_without_image(staffapp):
assert not Content.objects.count()
form = staffapp.get(reverse('blog:content_create')).forms['model_form']
form['title'] = 'my content title'
form['summary'] = 'my content summary'
form['text'] = 'my content text'
form['author'] = staffapp.user.pk
form['published_at'] = '2014-12-10'
form.submit().follow()
assert Content.objects.count()
def test_by_tag_page_should_be_filtered_by_tag(app):
plane = ContentFactory(status=Content.PUBLISHED, tags=['plane'])
boat = ContentFactory(status=Content.PUBLISHED, tags=['boat'])
response = app.get(reverse('blog:by_tag', kwargs={'tag': 'plane'}))
assert plane.title in response.content
assert boat.title not in response.content
def test_by_tag_page_is_paginated(app, monkeypatch):
monkeypatch.setattr(ByTag, 'paginate_by', 2)
ContentFactory.create_batch(size=4, status=Content.PUBLISHED,
tags=['plane'])
url = reverse('blog:by_tag', kwargs={'tag': 'plane'})
response = app.get(url)
assert response.pyquery.find('.pagination')
assert response.pyquery.find('.next')
assert not response.pyquery.find('.previous')
response = app.get(url + '?page=2')
assert response.pyquery.find('.pagination')
assert not response.pyquery.find('.next')
assert response.pyquery.find('.previous')
response = app.get(url + '?page=3', status=404)
def test_can_create_content_with_tags(staffapp):
url = reverse('blog:content_create')
form = staffapp.get(url).forms['model_form']
form['title'] = 'my content title'
form['summary'] = 'my content summary'
form['text'] = 'my content text'
form['author'] = staffapp.user.pk
form['published_at'] = '2014-12-10'
form['tags'] = 'tag1, tag2'
form.submit().follow()
content = Content.objects.last()
assert content.tags.count() == 2
assert content.tags.first().name == 'tag1'
def test_can_update_content_tags(staffapp, published):
assert published.tags.count() == 0
url = reverse('blog:content_update', kwargs={'pk': published.pk})
form = staffapp.get(url).forms['model_form']
form['tags'] = 'tag1, tag2'
form.submit().follow()
content = Content.objects.get(pk=published.pk)
assert content.tags.count() == 2
assert content.tags.first().name == 'tag1'
|
Lcaracol/ideasbox.lan
|
ideasbox/blog/tests/test_views.py
|
Python
|
mit
| 8,940
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This module copyright :
# (c) 2016 VMCloud Solution (http://vmcloudsolution.pe)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'POS lock screen',
'version': '0.1',
'category': 'Point Of Sale',
'description': """
""",
'author': 'VMCloud Solution',
'website': 'http://vmcloudsolution.pe',
'depends': ['point_of_sale'],
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'pos_lock_screen_view.xml',
'views/templates.xml',
],
'qweb':[
'static/src/xml/pos.xml',
],
'init_xml': [],
'update_xml': [],
'demo_xml': [],
'test': [],
'installable': True,
}
|
vmcloudsolution/odoo-generico
|
pos_lock_screen/__openerp__.py
|
Python
|
agpl-3.0
| 1,520
|
import unittest
import sys
from base_test_class import BaseTestCase
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import Select, WebDriverWait
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
from group_test import GroupTest
class ProductTypeGroupTest(BaseTestCase):
def test_group_add_product_type_group(self):
driver = self.navigate_to_group_view()
# Open the menu to add product type groups and click the 'Add' button
driver.find_element_by_id("dropdownMenuAddProductTypeGroup").click()
driver.find_element_by_id("addProductTypeGroup").click()
# Select the product type 'Research and Development'
try:
WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.ID, 'id_product_types_chosen')))
except TimeoutException:
self.fail('Timed out waiting for product types dropdown to initialize ')
driver.execute_script("document.getElementsByName('product_types')[0].style.display = 'inline'")
element = driver.find_element_by_xpath("//select[@name='product_types']")
product_type_option = element.find_elements_by_tag_name('option')[0]
Select(element).select_by_value(product_type_option.get_attribute("value"))
# Select the role 'Reader'
Select(driver.find_element_by_id("id_role")).select_by_visible_text("Reader")
# "Click" the submit button to complete the transaction
driver.find_element_by_css_selector("input.btn.btn-primary").click()
# Assert the message to determine success status
self.assertTrue(self.is_success_message_present(text='Product type groups added successfully.'))
# Query the site to determine if the member has been added
self.assertEqual(driver.find_elements_by_name("member_product_type")[0].text, "Research and Development")
self.assertEqual(driver.find_elements_by_name("member_product_type_role")[0].text, "Reader")
def test_group_edit_product_type_group(self):
driver = self.navigate_to_group_view()
# Open the menu to manage members and click the 'Edit' button
driver.find_elements_by_name("dropdownManageProductTypeGroup")[0].click()
driver.find_elements_by_name("editProductTypeGroup")[0].click()
# Select the role 'Owner'
Select(driver.find_element_by_id("id_role")).select_by_visible_text("Owner")
# "Click" the submit button to complete the transaction
driver.find_element_by_css_selector("input.btn.btn-primary").click()
# Assert the message to determine success status
self.assertTrue(self.is_success_message_present(text='Product type group updated successfully.'))
# Query the site to determine if the member has been edited
self.assertEqual(driver.find_elements_by_name("member_product_type")[0].text, "Research and Development")
self.assertEqual(driver.find_elements_by_name("member_product_type_role")[0].text, "Owner")
def test_group_delete_product_type_group(self):
driver = self.navigate_to_group_view()
# Open the menu to manage members and click the 'Delete' button
driver.find_elements_by_name("dropdownManageProductTypeGroup")[0].click()
driver.find_elements_by_name("deleteProductTypeGroup")[0].click()
# "Click" the submit button to complete the transaction
driver.find_element_by_css_selector("input.btn.btn-danger").click()
# Assert the message to determine success status
self.assertTrue(self.is_success_message_present(text='Product type group deleted successfully.'))
# Query the site to determine if the member has been deleted
self.assertFalse(driver.find_elements_by_name("member_product_type"))
def test_product_type_add_product_type_group(self):
# Login to the site. Password will have to be modified
# to match an admin password in your own container
driver = self.driver
# Navigate to the product type page
driver.get(self.base_url + "product/type")
driver.find_element_by_id("dropdownMenuProductType").click()
driver.find_element_by_partial_link_text("View").click()
# Open the menu to add groups and click the 'Add' button
driver.find_element_by_id("dropdownMenuAddProductTypeGroup").click()
driver.find_element_by_id("addProductTypeGroup").click()
# Select the group 'Group Name'
try:
WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.ID, 'id_groups_chosen')))
except TimeoutException:
self.fail('Timed out waiting for groups dropdown to initialize ')
driver.execute_script("document.getElementsByName('groups')[0].style.display = 'inline'")
element = driver.find_element_by_xpath("//select[@name='groups']")
group_option = element.find_elements_by_tag_name('option')[0]
Select(element).select_by_value(group_option.get_attribute("value"))
# Select the role 'Reader'
Select(driver.find_element_by_id("id_role")).select_by_visible_text("Reader")
# "Click" the submit button to complete the transaction
driver.find_element_by_css_selector("input.btn.btn-primary").click()
# Assert the message to determine success status
self.assertTrue(self.is_success_message_present(text='Product type groups added successfully.'))
# Query the site to determine if the member has been added
self.assertEqual(driver.find_elements_by_name("product_type_group_group")[0].text, "Group Name")
self.assertEqual(driver.find_elements_by_name("product_type_group_role")[0].text, "Reader")
def test_product_type_edit_product_type_group(self):
# Login to the site. Password will have to be modified
# to match an admin password in your own container
driver = self.driver
# Navigate to the product type page
driver.get(self.base_url + "product/type")
driver.find_element_by_id("dropdownMenuProductType").click()
driver.find_element_by_partial_link_text("View").click()
# Open the menu to manage groups and click the 'Edit' button
# The first group is the group we are looking for
driver.find_elements_by_name("dropdownManageProductTypeGroup")[0].click()
driver.find_elements_by_name("editProductTypeGroup")[0].click()
# Select the role 'Maintainer'
Select(driver.find_element_by_id("id_role")).select_by_visible_text("Maintainer")
# "Click" the submit button to complete the transaction
driver.find_element_by_css_selector("input.btn.btn-primary").click()
# Assert the message to determine success status
self.assertTrue(self.is_success_message_present(text='Product type group updated successfully.'))
# Query the site to determine if the member has been edited
self.assertEqual(driver.find_elements_by_name("product_type_group_group")[0].text, "Group Name")
self.assertEqual(driver.find_elements_by_name("product_type_group_role")[0].text, "Maintainer")
def test_product_type_delete_product_type_group(self):
# Login to the site. Password will have to be modified
# to match an admin password in your own container
driver = self.driver
# Navigate to the product type page
driver.get(self.base_url + "product/type")
driver.find_element_by_id("dropdownMenuProductType").click()
driver.find_element_by_partial_link_text("View").click()
# Open the menu to manage members and click the 'Delete' button
# The second group is the group we are looking for
driver.find_elements_by_name("dropdownManageProductTypeGroup")[0].click()
driver.find_elements_by_name("deleteProductTypeGroup")[0].click()
# "Click" the submit button to complete the transaction
driver.find_element_by_css_selector("input.btn.btn-danger").click()
# Assert the message to determine success status
self.assertTrue(self.is_success_message_present(text='Product type group deleted successfully.'))
# Query the site to determine if the member has been deleted
self.assertFalse(driver.find_elements_by_name("product_type_group_group"))
def navigate_to_group_view(self):
# Login to the site. Password will have to be modified
# to match an admin password in your own container
driver = self.driver
# Navigate to group management page
driver.get(self.base_url + "group")
# Select the previously created group to edit
# The name is not clickable
# so we would have to select specific group by filtering list of groups
driver.find_element_by_id("show-filters").click()
# Insert name to filter by into name box
driver.find_element_by_id("id_name").clear()
driver.find_element_by_id("id_name").send_keys("Group Name")
# click on 'apply filter' button
driver.find_element_by_css_selector("button.btn.btn-sm.btn-primary").click()
# only the needed group is now available, proceed with opening the context menu and clicking 'Edit' button
driver.find_element_by_id("dropdownMenuGroup").click()
driver.find_element_by_id("viewGroup").click()
return driver
def suite():
suite = unittest.TestSuite()
# Add each test the the suite to be run
# success and failure is output by the test
suite.addTest(BaseTestCase('test_login'))
suite.addTest(GroupTest('test_create_group'))
suite.addTest(ProductTypeGroupTest('test_group_add_product_type_group'))
suite.addTest(ProductTypeGroupTest('test_group_edit_product_type_group'))
suite.addTest(ProductTypeGroupTest('test_group_delete_product_type_group'))
suite.addTest(ProductTypeGroupTest('test_product_type_add_product_type_group'))
suite.addTest(ProductTypeGroupTest('test_product_type_edit_product_type_group'))
suite.addTest(ProductTypeGroupTest('test_product_type_delete_product_type_group'))
suite.addTest(GroupTest('test_group_edit_name_and_global_role'))
suite.addTest(GroupTest('test_group_delete'))
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(descriptions=True, failfast=True, verbosity=2)
ret = not runner.run(suite()).wasSuccessful()
BaseTestCase.tearDownDriver()
sys.exit(ret)
|
rackerlabs/django-DefectDojo
|
tests/product_type_group_test.py
|
Python
|
bsd-3-clause
| 10,530
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# imgbase
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Fabian Deutsch <fabiand@redhat.com>
#
import logging
log = logging.getLogger(__package__)
def init(app):
app.hooks.connect("pre-arg-parse", add_argparse)
app.hooks.connect("post-arg-parse", check_argparse)
def add_argparse(app, parser, subparsers):
layer_parser = subparsers.add_parser("layer",
help="Runtime layer handling")
layer_parser.add_argument("--add", action="store_true",
default=False, help="Add a new layer")
layer_parser.add_argument("--latest", action="store_true",
help="Get the latest layer")
layer_parser.add_argument("--current", action="store_true",
help="Get the current layer used to boot this")
layer_parser.add_argument("IMAGE", nargs="?",
help="Optional to be used with --add")
subparsers.add_parser("w",
help="Check on what layer you are")
def check_argparse(app, args):
log.debug("Operating on: %s" % app.imgbase)
if args.command == "w":
msg = "You are on %s" % app.imgbase.current_layer()
log.info(msg)
elif args.command == "layer":
if args.add:
# FIXME we could optionally allopw latest/current/specific
if args.latest:
app.imgbase.add_layer_on_latest()
elif args.IMAGE:
app.imgbase.add_layer(args.IMAGE)
else:
# current is default
app.imgbase.add_layer_on_latest()
elif args.current:
print(app.imgbase.current_layer())
elif args.latest:
print(app.imgbase.latest_layer())
# vim: sw=4 et sts=4
|
fabiand/imgbased
|
src/imgbased/plugins/layer.py
|
Python
|
gpl-2.0
| 2,481
|
#!/usr/bin/env python3
import os, os.path
import sys, shutil, subprocess
import urllib.request
import hashlib
import re
configure_args = sys.argv[1:]
host_arch = 'i686-w64-mingw32'
if configure_args[0] == '--64':
configure_args = configure_args[1:]
host_arch = 'x86_64-w64-mingw32'
# the path to the MPD sources
mpd_path = os.path.dirname(os.path.dirname(sys.argv[0])) or '.'
# output directories
lib_path = os.path.abspath('lib')
tarball_path = lib_path
src_path = os.path.join(lib_path, 'src')
arch_path = os.path.join(lib_path, host_arch)
build_path = os.path.join(arch_path, 'build')
root_path = os.path.join(arch_path, 'root')
# redirect pkg-config to use our root directory instead of the default
# one on the build host
os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(root_path, 'lib/pkgconfig')
gcc_toolchain = '/usr'
def select_toolchain():
global cc, cxx, ar, nm, strip, cflags, cxxflags, cppflags, ldflags, libs
target_arch = ''
cc = os.path.join(gcc_toolchain, 'bin', host_arch + '-gcc')
cxx = os.path.join(gcc_toolchain, 'bin', host_arch + '-g++')
ar = os.path.join(gcc_toolchain, 'bin', host_arch + '-ar')
nm = os.path.join(gcc_toolchain, 'bin', host_arch + '-nm')
strip = os.path.join(gcc_toolchain, 'bin', host_arch + '-strip')
cflags = '-O2 -g ' + target_arch
cxxflags = '-O2 -g ' + target_arch
cppflags = '-I' + root_path + '/include'
ldflags = '-L' + root_path + '/lib'
libs = ''
def file_md5(path):
"""Calculate the MD5 checksum of a file and return it in hexadecimal notation."""
with open(path, 'rb') as f:
m = hashlib.md5()
while True:
data = f.read(65536)
if len(data) == 0:
# end of file
return m.hexdigest()
m.update(data)
def download_tarball(url, md5):
"""Download a tarball, verify its MD5 checksum and return the local path."""
global tarball_path
os.makedirs(tarball_path, exist_ok=True)
path = os.path.join(tarball_path, os.path.basename(url))
try:
calculated_md5 = file_md5(path)
if md5 == calculated_md5: return path
os.unlink(path)
except FileNotFoundError:
pass
tmp_path = path + '.tmp'
print("download", url)
urllib.request.urlretrieve(url, tmp_path)
calculated_md5 = file_md5(tmp_path)
if calculated_md5 != md5:
os.unlink(tmp_path)
raise "MD5 mismatch"
os.rename(tmp_path, path)
return path
class Project:
def __init__(self, url, md5, installed, name=None, version=None,
base=None):
if base is None:
basename = os.path.basename(url)
m = re.match(r'^(.+)\.(tar(\.(gz|bz2|xz|lzma))?|zip)$', basename)
if not m: raise
self.base = m.group(1)
else:
self.base = base
if name is None or version is None:
m = re.match(r'^([-\w]+)-(\d[\d.]*[a-z]?)$', self.base)
if name is None: name = m.group(1)
if version is None: version = m.group(2)
self.name = name
self.version = version
self.url = url
self.md5 = md5
self.installed = installed
def download(self):
return download_tarball(self.url, self.md5)
def is_installed(self):
global root_path
tarball = self.download()
installed = os.path.join(root_path, self.installed)
tarball_mtime = os.path.getmtime(tarball)
try:
return os.path.getmtime(installed) >= tarball_mtime
except FileNotFoundError:
return False
def unpack(self):
global src_path
tarball = self.download()
path = os.path.join(src_path, self.base)
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
os.makedirs(src_path, exist_ok=True)
subprocess.check_call(['/bin/tar', 'xfC', tarball, src_path])
return path
def make_build_path(self):
path = os.path.join(build_path, self.base)
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
os.makedirs(path, exist_ok=True)
return path
class AutotoolsProject(Project):
def __init__(self, url, md5, installed, configure_args=[],
autogen=False,
cppflags='',
**kwargs):
Project.__init__(self, url, md5, installed, **kwargs)
self.configure_args = configure_args
self.autogen = autogen
self.cppflags = cppflags
def build(self):
src = self.unpack()
if self.autogen:
subprocess.check_call(['/usr/bin/aclocal'], cwd=src)
subprocess.check_call(['/usr/bin/automake', '--add-missing', '--force-missing', '--foreign'], cwd=src)
subprocess.check_call(['/usr/bin/autoconf'], cwd=src)
subprocess.check_call(['/usr/bin/libtoolize', '--force'], cwd=src)
build = self.make_build_path()
select_toolchain()
configure = [
os.path.join(src, 'configure'),
'CC=' + cc,
'CXX=' + cxx,
'CFLAGS=' + cflags,
'CXXFLAGS=' + cxxflags,
'CPPFLAGS=' + cppflags + ' ' + self.cppflags,
'LDFLAGS=' + ldflags,
'LIBS=' + libs,
'AR=' + ar,
'STRIP=' + strip,
'--host=' + host_arch,
'--prefix=' + root_path,
'--enable-silent-rules',
] + self.configure_args
subprocess.check_call(configure, cwd=build)
subprocess.check_call(['/usr/bin/make', '--quiet', '-j12'], cwd=build)
subprocess.check_call(['/usr/bin/make', '--quiet', 'install'], cwd=build)
class ZlibProject(Project):
def __init__(self, url, md5, installed,
**kwargs):
Project.__init__(self, url, md5, installed, **kwargs)
def build(self):
src = self.unpack()
build = self.make_build_path()
select_toolchain()
subprocess.check_call(['/usr/bin/make', '--quiet',
'-f', 'win32/Makefile.gcc',
'PREFIX=' + host_arch + '-',
'-j12',
'install',
'DESTDIR=' + root_path + '/',
'INCLUDE_PATH=include',
'LIBRARY_PATH=lib',
'BINARY_PATH=bin', 'SHARED_MODE=1'],
cwd=src)
class FfmpegProject(Project):
def __init__(self, url, md5, installed, configure_args=[],
cppflags='',
**kwargs):
Project.__init__(self, url, md5, installed, **kwargs)
self.configure_args = configure_args
self.cppflags = cppflags
def build(self):
src = self.unpack()
build = self.make_build_path()
select_toolchain()
configure = [
os.path.join(src, 'configure'),
'--cc=' + cc,
'--cxx=' + cxx,
'--nm=' + nm,
'--extra-cflags=' + cflags + ' ' + cppflags + ' ' + self.cppflags,
'--extra-cxxflags=' + cxxflags + ' ' + cppflags + ' ' + self.cppflags,
'--extra-ldflags=' + ldflags,
'--extra-libs=' + libs,
'--ar=' + ar,
'--enable-cross-compile',
'--arch=x86',
'--target-os=mingw32',
'--cross-prefix=' + host_arch + '-',
'--prefix=' + root_path,
] + self.configure_args
subprocess.check_call(configure, cwd=build)
subprocess.check_call(['/usr/bin/make', '--quiet', '-j12'], cwd=build)
subprocess.check_call(['/usr/bin/make', '--quiet', 'install'], cwd=build)
class BoostProject(Project):
def __init__(self, url, md5, installed,
**kwargs):
m = re.match(r'.*/boost_(\d+)_(\d+)_(\d+)\.tar\.bz2$', url)
version = "%s.%s.%s" % (m.group(1), m.group(2), m.group(3))
Project.__init__(self, url, md5, installed,
name='boost', version=version,
**kwargs)
def build(self):
src = self.unpack()
# install the headers manually; don't build any library
# (because right now, we only use header-only libraries)
includedir = os.path.join(root_path, 'include')
for dirpath, dirnames, filenames in os.walk(os.path.join(src, 'boost')):
relpath = dirpath[len(src)+1:]
destdir = os.path.join(includedir, relpath)
try:
os.mkdir(destdir)
except:
pass
for name in filenames:
if name[-4:] == '.hpp':
shutil.copyfile(os.path.join(dirpath, name),
os.path.join(destdir, name))
# a list of third-party libraries to be used by MPD on Android
thirdparty_libs = [
AutotoolsProject(
'http://downloads.xiph.org/releases/ogg/libogg-1.3.2.tar.xz',
'5c3a34309d8b98640827e5d0991a4015',
'lib/libogg.a',
['--disable-shared', '--enable-static'],
),
AutotoolsProject(
'http://downloads.xiph.org/releases/vorbis/libvorbis-1.3.4.tar.xz',
'55f2288055e44754275a17c9a2497391',
'lib/libvorbis.a',
['--disable-shared', '--enable-static'],
),
AutotoolsProject(
'http://downloads.xiph.org/releases/opus/opus-1.1.tar.gz',
'c5a8cf7c0b066759542bc4ca46817ac6',
'lib/libopus.a',
['--disable-shared', '--enable-static'],
),
AutotoolsProject(
'http://downloads.xiph.org/releases/flac/flac-1.3.1.tar.xz',
'b9922c9a0378c88d3e901b234f852698',
'lib/libFLAC.a',
[
'--disable-shared', '--enable-static',
'--disable-xmms-plugin', '--disable-cpplibs',
],
),
ZlibProject(
'http://zlib.net/zlib-1.2.8.tar.xz',
'28f1205d8dd2001f26fec1e8c2cebe37',
'lib/libz.a',
),
AutotoolsProject(
'ftp://ftp.mars.org/pub/mpeg/libid3tag-0.15.1b.tar.gz',
'e5808ad997ba32c498803822078748c3',
'lib/libid3tag.a',
['--disable-shared', '--enable-static'],
autogen=True,
),
FfmpegProject(
'http://ffmpeg.org/releases/ffmpeg-2.5.tar.bz2',
'4346fe710cc6bdd981f6534d2420d1ab',
'lib/libavcodec.a',
[
'--disable-shared', '--enable-static',
'--enable-gpl',
'--enable-small',
'--disable-pthreads',
'--disable-programs',
'--disable-doc',
'--disable-avdevice',
'--disable-swresample',
'--disable-swscale',
'--disable-postproc',
'--disable-avfilter',
'--disable-network',
'--disable-encoders',
'--disable-protocols',
'--disable-outdevs',
'--disable-filters',
],
),
AutotoolsProject(
'http://curl.haxx.se/download/curl-7.39.0.tar.lzma',
'e9aa6dec29920eba8ef706ea5823bad7',
'lib/libcurl.a',
[
'--disable-shared', '--enable-static',
'--disable-debug',
'--enable-http',
'--enable-ipv6',
'--disable-ftp', '--disable-file',
'--disable-ldap', '--disable-ldaps',
'--disable-rtsp', '--disable-proxy', '--disable-dict', '--disable-telnet',
'--disable-tftp', '--disable-pop3', '--disable-imap', '--disable-smtp',
'--disable-gopher',
'--disable-manual',
'--disable-threaded-resolver', '--disable-verbose', '--disable-sspi',
'--disable-crypto-auth', '--disable-ntlm-wb', '--disable-tls-srp', '--disable-cookies',
'--without-ssl', '--without-gnutls', '--without-nss', '--without-libssh2',
],
),
BoostProject(
'http://netcologne.dl.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2',
'd6eef4b4cacb2183f2bf265a5a03a354',
'include/boost/version.hpp',
),
]
# build the third-party libraries
for x in thirdparty_libs:
if not x.is_installed():
x.build()
# configure and build MPD
select_toolchain()
configure = [
os.path.join(mpd_path, 'configure'),
'CC=' + cc,
'CXX=' + cxx,
'CFLAGS=' + cflags,
'CXXFLAGS=' + cxxflags,
'CPPFLAGS=' + cppflags,
'LDFLAGS=' + ldflags + ' -static',
'LIBS=' + libs,
'AR=' + ar,
'STRIP=' + strip,
'--host=' + host_arch,
'--prefix=' + root_path,
'--enable-silent-rules',
'--disable-glib',
'--disable-icu',
] + configure_args
subprocess.check_call(configure)
subprocess.check_call(['/usr/bin/make', '--quiet', '-j12'])
|
jwise/musicpd
|
win32/build.py
|
Python
|
gpl-2.0
| 12,680
|
import vtk
"""
attempt to get images mapped to the same space, but it'ts not working.
"""
reader_path = "/home/ksansom/caseFiles/mri/VWI_proj/case2/vmtk/7_AX_3D_MRA_COW.mha"
reader = vtk.vtkMetaImageReader()
reader.SetFileName(reader_path)
reader.Update()
reader_path2 = "/home/ksansom/caseFiles/mri/VWI_proj/case2/vmtk/mra_crop.mha"
reader2 = vtk.vtkMetaImageReader()
reader2.SetFileName(reader_path2)
reader2.Update()
image = vtk.vtkImageData()
image = reader.GetOutput()
origin = image.GetOrigin()
image2 = vtk.vtkImageData()
image2 = reader2.GetOutput()
origin2 = image2.GetOrigin()
"""
pre = vtk.vtkTransform()
pre.RotateZ(180)
#Reslice does all of the work
reslice = vtk.vtkImageReslice()
reslice.SetInputConnection(reader.GetOutputPort())
reslice.SetResliceTransform(pre)
reslice.SetInterpolationModeToCubic()
reslice.SetOutputSpacing(
image.GetSpacing()[0],
image.GetSpacing()[1],
image.GetSpacing()[2])
reslice.SetOutputOrigin(
image.GetOrigin()[0],
image.GetOrigin()[1],
image.GetOrigin()[2])
reslice.SetOutputExtent(image.GetExtent())
"""
image.SetOrigin(
origin[0] - origin2[0],
origin[1] - origin2[1],
origin[2] - origin2[2])
writer = vtk.vtkMetaImageWriter()
writer.SetFileName("/home/ksansom/caseFiles/mri/VWI_proj/case2/vmtk/7_AX_3D_MRA_COW_rot.mha")
writer.SetInputData(image)
writer.Write()
|
kayarre/Tools
|
vtk/get_image_transform.py
|
Python
|
bsd-2-clause
| 1,514
|
import argparse
import sys,os
#custom libraries
from mosealib import controller
from mosealib import enrichment
#Things to do:
#1. Include pfm matrix creator
def main():
#create a top-level parser
parser = argparse.ArgumentParser(description = "Runs motif scan & enrichment")
subparsers = parser.add_subparsers(help="commands help")
#create parser for 'Get Fasta'
#parser_fasta = argparse.ArgumentParser(description = "Extract Fasta file from Bed file")
#parser_fasta = subparsers.add_parser(help="comamnds")
fasta_parser = subparsers.add_parser("getfasta",
help="Extract Fasta sequences from input bedfile and genome.fa")
fasta_parser.add_argument("--bedfile", "-b",
help="Coordinates to extract in bed format")
fasta_parser.add_argument("--genome", "-g", metavar="GENOME_FASTA_FILE",
help="Single Genome fasta file")
fasta_parser.add_argument("--output", "-o",
default=sys.stdout, help="output fasta file name")
fasta_parser.add_argument("--bedtoolspath", "-path", default="bedtools",
help="Bedtools location (if its not in default path)")
fasta_parser.set_defaults(func=getfasta)
# Scan Sequence by PFM & Kmers
scan_parser = subparsers.add_parser("scan",
help="Perform motif scanning on fasta sequences")
scan_parser.add_argument("--pfm", default=False,
action="store_true", help="Scan Motif PFMs file")
scan_parser.add_argument("--pfm_path",
help="Path to PFMs folder")
scan_parser.add_argument("--fmo_path","-fmopath", default="fimo",
help="Path to FIMO binary folder (if not in default path)")
scan_parser.add_argument("--kmer", default=False,
action="store_true", help="Scan Motif Kmers file")
scan_parser.add_argument("--kmer_path",
help="Path to Kmers folder")
scan_parser.add_argument("--count", default=False,
action="store_true", help="create count-Motif file")
scan_parser.add_argument("--fasta", "-fa",
help="Input Fasta file name to scan motifs")
scan_parser.add_argument("--out_dir","-odir", default="fmo",
help="Output Folder")
scan_parser.set_defaults(func=scan)
#enrichment
enrich_parser = subparsers.add_parser("enrich",
help="Perform enrichment analysis between regulated and control\
(background) set")
enrich_parser.add_argument("--reg_fa_file", default=False,
help="input file for regulated sequences in\
fasta format")
enrich_parser.add_argument("--reg_count_file", default=False,
help="input file for regulated motif counts\
file in table format(header=Motif_ids)")
enrich_parser.add_argument("--bg_fa_file", default=False,
help="input file for background(control) \
sequences in fasta format")
enrich_parser.add_argument("--bg_count_file", default=False,
help="input file for background(control) \
motif counts file in table format(header=Motif_ids)")
enrich_parser.add_argument("--out_file", default="output_MoSEA_enrichment.txt",
help="output filename for enrichment analysis")
enrich_parser.add_argument("--rand_count", metavar='int', default=100,
type=int, help="Number of times randomization performed \
for each sequence. Default:100")
enrich_parser.set_defaults(func=enrich)
# Overlap
overlap_parser = subparsers.add_parser(
"overlap", help="Compares motif lists.")
overlap_parser.add_argument("input_file")
overlap_parser.add_argument(
"--strict", "-s", default=False, action="store_true")
overlap_parser.set_defaults(func=overlap)
try:
args = parser.parse_args()
args.func(args)
except Exception, err:
print("\nError in parsing: {}".format(err))
print(parser.format_help())
sys.exit(1)
def getfasta(args):
controller.get_fasta_sequence(args.bedtoolspath, args.bedfile, args.genome,
args.output)
print("Extracted Fasta sequence saved in: {}".format(args.output))
def scan(args):
if args.pfm:
controller.scan_motif_pfm(args.fasta, args.out_dir, args.pfm_path, args.fmo_path, args.count)
if args.kmer:
controller.scan_motif_kmer(args.fasta, args.out_dir, args.kmer_path, args.count)
#if args.count:
# controller.count_motif(args.fasta, args.pfm_path, args.out_dir)
def enrich(args):
enrichment.calculate_enrichment(args.reg_fa_file, args.reg_count_file,
args.bg_fa_file, args.bg_count_file,
args.out_file, args.rand_count)
def overlap(args):
print("Looking for overlaps!")
#print(args.strict)
if __name__ == "__main__":
main()
#pwd: /home/babita/Documents/scan_motifs
#command 1: python scripts/find_motif_overlaps.py getFasta -b mock_files/tmp_bed_file -g mock_files/hg19_genome.fa -o output/tmp_bed_file.fa
#command 2: python scripts/find_motif_overlaps.py scan -fa output/tmp_bed_file.fa -odir output/fmo/ -pfm mock_files/pfm/
|
comprna/MoSEA
|
mosea.py
|
Python
|
isc
| 5,249
|
# -*- coding:utf-8 -*-
from common import http
def get_conf(request):
builder = http.ResponseBuilder()
home = [{"nodeName":"服务列表","href":"#service","nodestyle":["level1"],"children":[]},
{"nodeName":"集群信息","href":"#cluster","nodestyle":["level1"],"children":[
{"nodeName":"集群状态","href":"#cluster/status","subpage":"status","nodestyle":["level2"],"children":[]},
{"nodeName":"集群标签","href":"#cluster/tag","subpage":"tag","nodestyle":["level2"],"children":[]}
]},
{"nodeName":"配置集群","href":"#setup","nodestyle":["level1"],"children":[]}]
service = [{ "nodeName" : "Tera", "nodestyle":["level1"], "children" : [ { "nodestyle":["level2"],"subpage":"status","nodeName" : "服务状态", "children" : []}] }]
return builder.ok(data = {'home':home,'service':service}).build_json()
|
linyvxiang/galaxy
|
console/backend/src/console/conf/views.py
|
Python
|
bsd-3-clause
| 863
|
from wex import regex as r
def test_regroup():
f = r.re_group('(\d+)') | list
assert f('a1 b23') == ['1', '23']
def test_regroup_iterable():
f = r.re_group('(\d+)') | list
assert f(['a1 b23']) == ['1', '23']
def test_regroup_nested_iterable():
f = r.re_group('(\d+)') | list
assert f([['a1 b23'], 'c3']) == ['1', '23', '3']
def test_re_groupdict():
f = r.re_groupdict('(?P<num>\d+)') | list
assert f('a1 b23') == [{'num': '1'}, {'num': '23'}]
def test_re_groupdict_iterable():
f = r.re_groupdict('(?P<num>\d+)') | list
assert f(['a1 b23']) == [{'num': '1'}, {'num': '23'}]
def test_re_groupdict_nested_iterable():
f = r.re_groupdict('(?P<num>\d+)') | list
assert f([['a1 b23'], 'c3']) == [{'num': '1'}, {'num': '23'}, {'num': '3'}]
|
gilessbrown/wextracto
|
tests/test_regex.py
|
Python
|
bsd-3-clause
| 793
|
# https://graphics.stanford.edu/~seander/bithacks.html#NextBitPermutation
def selector(values, setBits):
maxBits = len(values)
def select(v):
out = []
for i in range(maxBits):
if (v & (1 << i)):
out.append(values[i])
return out
v = (2 ** setBits) - 1
endState = v << (maxBits - setBits)
yield select(v)
while v != endState:
t = (v | (v - 1)) + 1
v = t | ((((t & (-t % (1 << maxBits))) // (v & (-v % (1 << maxBits)))) >> 1) - 1)
yield select(v)
def normalize(perm):
ref = sorted(perm)
return [ref.index(x) for x in perm]
def contains_pattern(perm, patt):
if len(patt) > len(perm):
return False
for p in selector(perm, len(patt)):
if normalize(p) == patt:
return True
return False
if __name__ == '__main__':
print(contains_pattern(
[14, 12, 6, 10, 0, 9, 1, 11, 13, 16, 17, 3, 7, 5, 15, 2, 4, 8],
[3, 0, 1, 2]))
print(True)
|
asgeir/old-school-projects
|
python/verkefni2/cpattern.py
|
Python
|
mit
| 1,006
|
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Middleware that will provide Static Large Object (SLO) support.
This feature is very similar to Dynamic Large Object (DLO) support in that
it allows the user to upload many objects concurrently and afterwards
download them as a single object. It is different in that it does not rely
on eventually consistent container listings to do so. Instead, a user
defined manifest of the object segments is used.
----------------------
Uploading the Manifest
----------------------
After the user has uploaded the objects to be concatenated a manifest is
uploaded. The request must be a PUT with the query parameter::
?multipart-manifest=put
The body of this request will be an ordered list of files in
json data format. The data to be supplied for each segment is::
path: the path to the segment (not including account)
/container/object_name
etag: the etag given back when the segment was PUT, or null
size_bytes: the size of the segment in bytes, or null
The format of the list will be::
json:
[{"path": "/cont/object",
"etag": "etagoftheobjectsegment",
"size_bytes": 1048576}, ...]
The number of object segments is limited to a configurable amount, default
1000. Each segment, except for the final one, must be at least 1 megabyte
(configurable). On upload, the middleware will head every segment passed in to
verify:
1. the segment exists (i.e. the HEAD was successful);
2. the segment meets minimum size requirements (if not the last segment);
3. if the user provided a non-null etag, the etag matches; and
4. if the user provided a non-null size_bytes, the size_bytes matches.
Note that the etag and size_bytes keys are still required; this acts as a guard
against user errors such as typos. If any of the objects fail to verify (not
found, size/etag mismatch, below minimum size) then the user will receive a 4xx
error response. If everything does match, the user will receive a 2xx response
and the SLO object is ready for downloading.
Behind the scenes, on success, a json manifest generated from the user input is
sent to object servers with an extra "X-Static-Large-Object: True" header
and a modified Content-Type. The items in this manifest will include the etag
and size_bytes for each segment, regardless of whether the client specified
them for verification. The parameter: swift_bytes=$total_size will be
appended to the existing Content-Type, where total_size is the sum of all
the included segments' size_bytes. This extra parameter will be hidden from
the user.
Manifest files can reference objects in separate containers, which will improve
concurrent upload speed. Objects can be referenced by multiple manifests. The
segments of a SLO manifest can even be other SLO manifests. Treat them as any
other object i.e., use the Etag and Content-Length given on the PUT of the
sub-SLO in the manifest to the parent SLO.
-------------------------
Retrieving a Large Object
-------------------------
A GET request to the manifest object will return the concatenation of the
objects from the manifest much like DLO. If any of the segments from the
manifest are not found or their Etag/Content Length have changed since upload,
the connection will drop. In this case a 409 Conflict will be logged in the
proxy logs and the user will receive incomplete results. Note that this will be
enforced regardless of whether the user perfomed per-segment validation during
upload.
The headers from this GET or HEAD request will return the metadata attached
to the manifest object itself with some exceptions::
Content-Length: the total size of the SLO (the sum of the sizes of
the segments in the manifest)
X-Static-Large-Object: True
Etag: the etag of the SLO (generated the same way as DLO)
A GET request with the query parameter::
?multipart-manifest=get
Will return the actual manifest file itself. This is generated json and does
not match the data sent from the original multipart-manifest=put. This call's
main purpose is for debugging.
When the manifest object is uploaded you are more or less guaranteed that
every segment in the manifest exists and matched the specifications.
However, there is nothing that prevents the user from breaking the
SLO download by deleting/replacing a segment referenced in the manifest. It is
left to the user to use caution in handling the segments.
-----------------------
Deleting a Large Object
-----------------------
A DELETE request will just delete the manifest object itself.
A DELETE with a query parameter::
?multipart-manifest=delete
will delete all the segments referenced in the manifest and then the manifest
itself. The failure response will be similar to the bulk delete middleware.
------------------------
Modifying a Large Object
------------------------
PUTs / POSTs will work as expected, PUTs will just overwrite the manifest
object for example.
------------------
Container Listings
------------------
In a container listing the size listed for SLO manifest objects will be the
total_size of the concatenated segments in the manifest. The overall
X-Container-Bytes-Used for the container (and subsequently for the account)
will not reflect total_size of the manifest but the actual size of the json
data stored. The reason for this somewhat confusing discrepancy is we want the
container listing to reflect the size of the manifest object when it is
downloaded. We do not, however, want to count the bytes-used twice (for both
the manifest and the segments it's referring to) in the container and account
metadata which can be used for stats purposes.
"""
from six.moves import range
from datetime import datetime
import mimetypes
import re
import six
from six import BytesIO
from hashlib import md5
from swift.common.exceptions import ListingIterError, SegmentError
from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \
HTTPMethodNotAllowed, HTTPRequestEntityTooLarge, HTTPLengthRequired, \
HTTPOk, HTTPPreconditionFailed, HTTPException, HTTPNotFound, \
HTTPUnauthorized, HTTPConflict, HTTPRequestedRangeNotSatisfiable,\
Response
from swift.common.utils import json, get_logger, config_true_value, \
get_valid_utf8_str, override_bytes_from_content_type, split_path, \
register_swift_info, RateLimitedIterator, quote, close_if_possible, \
closing_if_possible
from swift.common.request_helpers import SegmentedIterable
from swift.common.constraints import check_utf8, MAX_BUFFERED_SLO_SEGMENTS
from swift.common.http import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED, is_success
from swift.common.wsgi import WSGIContext, make_subrequest
from swift.common.middleware.bulk import get_response_body, \
ACCEPTABLE_FORMATS, Bulk
DEFAULT_MIN_SEGMENT_SIZE = 1024 * 1024 # 1 MiB
DEFAULT_MAX_MANIFEST_SEGMENTS = 1000
DEFAULT_MAX_MANIFEST_SIZE = 1024 * 1024 * 2 # 2 MiB
def parse_input(raw_data):
"""
Given a request will parse the body and return a list of dictionaries
:raises: HTTPException on parse errors
:returns: a list of dictionaries on success
"""
try:
parsed_data = json.loads(raw_data)
except ValueError:
raise HTTPBadRequest("Manifest must be valid json.")
req_keys = set(['path', 'etag', 'size_bytes'])
try:
for seg_dict in parsed_data:
if (set(seg_dict) != req_keys or
'/' not in seg_dict['path'].lstrip('/')):
raise HTTPBadRequest('Invalid SLO Manifest File')
except (AttributeError, TypeError):
raise HTTPBadRequest('Invalid SLO Manifest File')
return parsed_data
class SloPutContext(WSGIContext):
def __init__(self, slo, slo_etag):
super(SloPutContext, self).__init__(slo.app)
self.slo_etag = '"' + slo_etag.hexdigest() + '"'
def handle_slo_put(self, req, start_response):
app_resp = self._app_call(req.environ)
for i in range(len(self._response_headers)):
if self._response_headers[i][0].lower() == 'etag':
self._response_headers[i] = ('Etag', self.slo_etag)
break
start_response(self._response_status,
self._response_headers,
self._response_exc_info)
return app_resp
class SloGetContext(WSGIContext):
max_slo_recursion_depth = 10
def __init__(self, slo):
self.slo = slo
self.first_byte = None
self.last_byte = None
super(SloGetContext, self).__init__(slo.app)
def _fetch_sub_slo_segments(self, req, version, acc, con, obj):
"""
Fetch the submanifest, parse it, and return it.
Raise exception on failures.
"""
sub_req = make_subrequest(
req.environ, path='/'.join(['', version, acc, con, obj]),
method='GET',
headers={'x-auth-token': req.headers.get('x-auth-token')},
agent=('%(orig)s ' + 'SLO MultipartGET'), swift_source='SLO')
sub_resp = sub_req.get_response(self.slo.app)
if not is_success(sub_resp.status_int):
close_if_possible(sub_resp.app_iter)
raise ListingIterError(
'ERROR: while fetching %s, GET of submanifest %s '
'failed with status %d' % (req.path, sub_req.path,
sub_resp.status_int))
try:
with closing_if_possible(sub_resp.app_iter):
return json.loads(''.join(sub_resp.app_iter))
except ValueError as err:
raise ListingIterError(
'ERROR: while fetching %s, JSON-decoding of submanifest %s '
'failed with %s' % (req.path, sub_req.path, err))
def _segment_listing_iterator(self, req, version, account, segments,
recursion_depth=1):
for seg_dict in segments:
if config_true_value(seg_dict.get('sub_slo')):
override_bytes_from_content_type(seg_dict,
logger=self.slo.logger)
# We handle the range stuff here so that we can be smart about
# skipping unused submanifests. For example, if our first segment is a
# submanifest referencing 50 MiB total, but self.first_byte falls in
# the 51st MiB, then we can avoid fetching the first submanifest.
#
# If we were to make SegmentedIterable handle all the range
# calculations, we would be unable to make this optimization.
total_length = sum(int(seg['bytes']) for seg in segments)
if self.first_byte is None:
self.first_byte = 0
if self.last_byte is None:
self.last_byte = total_length - 1
for seg_dict in segments:
seg_length = int(seg_dict['bytes'])
if self.first_byte >= seg_length:
# don't need any bytes from this segment
self.first_byte = max(self.first_byte - seg_length, -1)
self.last_byte = max(self.last_byte - seg_length, -1)
continue
if self.last_byte < 0:
# no bytes are needed from this or any future segment
break
if config_true_value(seg_dict.get('sub_slo')):
# do this check here so that we can avoid fetching this last
# manifest before raising the exception
if recursion_depth >= self.max_slo_recursion_depth:
raise ListingIterError("Max recursion depth exceeded")
sub_path = get_valid_utf8_str(seg_dict['name'])
sub_cont, sub_obj = split_path(sub_path, 2, 2, True)
sub_segments = self._fetch_sub_slo_segments(
req, version, account, sub_cont, sub_obj)
for sub_seg_dict, sb, eb in self._segment_listing_iterator(
req, version, account, sub_segments,
recursion_depth=recursion_depth + 1):
yield sub_seg_dict, sb, eb
else:
if isinstance(seg_dict['name'], unicode):
seg_dict['name'] = seg_dict['name'].encode("utf-8")
seg_length = int(seg_dict['bytes'])
yield (seg_dict,
(None if self.first_byte <= 0 else self.first_byte),
(None if self.last_byte >=
seg_length - 1 else self.last_byte))
self.first_byte = max(self.first_byte - seg_length, -1)
self.last_byte = max(self.last_byte - seg_length, -1)
def _need_to_refetch_manifest(self, req):
"""
Just because a response shows that an object is a SLO manifest does not
mean that response's body contains the entire SLO manifest. If it
doesn't, we need to make a second request to actually get the whole
thing.
Note: this assumes that X-Static-Large-Object has already been found.
"""
if req.method == 'HEAD':
return True
response_status = int(self._response_status[:3])
# These are based on etag, and the SLO's etag is almost certainly not
# the manifest object's etag. Still, it's highly likely that the
# submitted If-None-Match won't match the manifest object's etag, so
# we can avoid re-fetching the manifest if we got a successful
# response.
if ((req.if_match or req.if_none_match) and
not is_success(response_status)):
return True
if req.range and response_status in (206, 416):
content_range = ''
for header, value in self._response_headers:
if header.lower() == 'content-range':
content_range = value
break
# e.g. Content-Range: bytes 0-14289/14290
match = re.match('bytes (\d+)-(\d+)/(\d+)$', content_range)
if not match:
# Malformed or missing, so we don't know what we got.
return True
first_byte, last_byte, length = [int(x) for x in match.groups()]
# If and only if we actually got back the full manifest body, then
# we can avoid re-fetching the object.
got_everything = (first_byte == 0 and last_byte == length - 1)
return not got_everything
return False
def handle_slo_get_or_head(self, req, start_response):
"""
Takes a request and a start_response callable and does the normal WSGI
thing with them. Returns an iterator suitable for sending up the WSGI
chain.
:param req: swob.Request object; is a GET or HEAD request aimed at
what may be a static large object manifest (or may not).
:param start_response: WSGI start_response callable
"""
resp_iter = self._app_call(req.environ)
# make sure this response is for a static large object manifest
for header, value in self._response_headers:
if (header.lower() == 'x-static-large-object' and
config_true_value(value)):
break
else:
# Not a static large object manifest. Just pass it through.
start_response(self._response_status,
self._response_headers,
self._response_exc_info)
return resp_iter
# Handle pass-through request for the manifest itself
if req.params.get('multipart-manifest') == 'get':
new_headers = []
for header, value in self._response_headers:
if header.lower() == 'content-type':
new_headers.append(('Content-Type',
'application/json; charset=utf-8'))
else:
new_headers.append((header, value))
self._response_headers = new_headers
start_response(self._response_status,
self._response_headers,
self._response_exc_info)
return resp_iter
if self._need_to_refetch_manifest(req):
req.environ['swift.non_client_disconnect'] = True
close_if_possible(resp_iter)
del req.environ['swift.non_client_disconnect']
get_req = make_subrequest(
req.environ, method='GET',
headers={'x-auth-token': req.headers.get('x-auth-token')},
agent=('%(orig)s ' + 'SLO MultipartGET'), swift_source='SLO')
resp_iter = self._app_call(get_req.environ)
# Any Content-Range from a manifest is almost certainly wrong for the
# full large object.
resp_headers = [(h, v) for h, v in self._response_headers
if not h.lower() == 'content-range']
response = self.get_or_head_response(
req, resp_headers, resp_iter)
return response(req.environ, start_response)
def get_or_head_response(self, req, resp_headers, resp_iter):
with closing_if_possible(resp_iter):
resp_body = ''.join(resp_iter)
try:
segments = json.loads(resp_body)
except ValueError:
segments = []
etag = md5()
content_length = 0
for seg_dict in segments:
etag.update(seg_dict['hash'])
if config_true_value(seg_dict.get('sub_slo')):
override_bytes_from_content_type(
seg_dict, logger=self.slo.logger)
content_length += int(seg_dict['bytes'])
response_headers = [(h, v) for h, v in resp_headers
if h.lower() not in ('etag', 'content-length')]
response_headers.append(('Content-Length', str(content_length)))
response_headers.append(('Etag', '"%s"' % etag.hexdigest()))
if req.method == 'HEAD':
return self._manifest_head_response(req, response_headers)
else:
return self._manifest_get_response(
req, content_length, response_headers, segments)
def _manifest_head_response(self, req, response_headers):
return HTTPOk(request=req, headers=response_headers, body='',
conditional_response=True)
def _manifest_get_response(self, req, content_length, response_headers,
segments):
self.first_byte, self.last_byte = None, None
if req.range:
byteranges = req.range.ranges_for_length(content_length)
if len(byteranges) == 0:
return HTTPRequestedRangeNotSatisfiable(request=req)
elif len(byteranges) == 1:
self.first_byte, self.last_byte = byteranges[0]
# For some reason, swob.Range.ranges_for_length adds 1 to the
# last byte's position.
self.last_byte -= 1
else:
req.range = None
ver, account, _junk = req.split_path(3, 3, rest_with_last=True)
plain_listing_iter = self._segment_listing_iterator(
req, ver, account, segments)
ratelimited_listing_iter = RateLimitedIterator(
plain_listing_iter,
self.slo.rate_limit_segments_per_sec,
limit_after=self.slo.rate_limit_after_segment)
# self._segment_listing_iterator gives us 3-tuples of (segment dict,
# start byte, end byte), but SegmentedIterable wants (obj path, etag,
# size, start byte, end byte), so we clean that up here
segment_listing_iter = (
("/{ver}/{acc}/{conobj}".format(
ver=ver, acc=account, conobj=seg_dict['name'].lstrip('/')),
seg_dict['hash'], int(seg_dict['bytes']),
start_byte, end_byte)
for seg_dict, start_byte, end_byte in ratelimited_listing_iter)
segmented_iter = SegmentedIterable(
req, self.slo.app, segment_listing_iter,
name=req.path, logger=self.slo.logger,
ua_suffix="SLO MultipartGET",
swift_source="SLO",
max_get_time=self.slo.max_get_time)
try:
segmented_iter.validate_first_segment()
except (ListingIterError, SegmentError):
# Copy from the SLO explanation in top of this file.
# If any of the segments from the manifest are not found or
# their Etag/Content Length no longer match the connection
# will drop. In this case a 409 Conflict will be logged in
# the proxy logs and the user will receive incomplete results.
return HTTPConflict(request=req)
response = Response(request=req, content_length=content_length,
headers=response_headers,
conditional_response=True,
app_iter=segmented_iter)
if req.range:
response.headers.pop('Etag')
return response
class StaticLargeObject(object):
"""
StaticLargeObject Middleware
See above for a full description.
The proxy logs created for any subrequests made will have swift.source set
to "SLO".
:param app: The next WSGI filter or app in the paste.deploy chain.
:param conf: The configuration dict for the middleware.
"""
def __init__(self, app, conf, min_segment_size=DEFAULT_MIN_SEGMENT_SIZE,
max_manifest_segments=DEFAULT_MAX_MANIFEST_SEGMENTS,
max_manifest_size=DEFAULT_MAX_MANIFEST_SIZE):
self.conf = conf
self.app = app
self.logger = get_logger(conf, log_route='slo')
self.max_manifest_segments = max_manifest_segments
self.max_manifest_size = max_manifest_size
self.min_segment_size = min_segment_size
self.max_get_time = int(self.conf.get('max_get_time', 86400))
self.rate_limit_after_segment = int(self.conf.get(
'rate_limit_after_segment', '10'))
self.rate_limit_segments_per_sec = int(self.conf.get(
'rate_limit_segments_per_sec', '0'))
self.bulk_deleter = Bulk(app, {}, logger=self.logger)
def handle_multipart_get_or_head(self, req, start_response):
"""
Handles the GET or HEAD of a SLO manifest.
The response body (only on GET, of course) will consist of the
concatenation of the segments.
:params req: a swob.Request with a path referencing an object
:raises: HttpException on errors
"""
return SloGetContext(self).handle_slo_get_or_head(req, start_response)
def copy_hook(self, inner_hook):
def slo_hook(source_req, source_resp, sink_req):
x_slo = source_resp.headers.get('X-Static-Large-Object')
if (config_true_value(x_slo)
and source_req.params.get('multipart-manifest') != 'get'
and 'swift.post_as_copy' not in source_req.environ):
source_resp = SloGetContext(self).get_or_head_response(
source_req, source_resp.headers.items(),
source_resp.app_iter)
return inner_hook(source_req, source_resp, sink_req)
return slo_hook
def handle_multipart_put(self, req, start_response):
"""
Will handle the PUT of a SLO manifest.
Heads every object in manifest to check if is valid and if so will
save a manifest generated from the user input. Uses WSGIContext to
call self and start_response and returns a WSGI iterator.
:params req: a swob.Request with an obj in path
:raises: HttpException on errors
"""
try:
vrs, account, container, obj = req.split_path(1, 4, True)
except ValueError:
return self.app(req.environ, start_response)
if req.content_length > self.max_manifest_size:
raise HTTPRequestEntityTooLarge(
"Manifest File > %d bytes" % self.max_manifest_size)
if req.headers.get('X-Copy-From'):
raise HTTPMethodNotAllowed(
'Multipart Manifest PUTs cannot be COPY requests')
if req.content_length is None and \
req.headers.get('transfer-encoding', '').lower() != 'chunked':
raise HTTPLengthRequired(request=req)
parsed_data = parse_input(req.body_file.read(self.max_manifest_size))
problem_segments = []
if len(parsed_data) > self.max_manifest_segments:
raise HTTPRequestEntityTooLarge(
'Number of segments must be <= %d' %
self.max_manifest_segments)
total_size = 0
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if not out_content_type:
out_content_type = 'text/plain'
data_for_storage = []
slo_etag = md5()
for index, seg_dict in enumerate(parsed_data):
obj_name = seg_dict['path']
if isinstance(obj_name, unicode):
obj_name = obj_name.encode('utf-8')
obj_path = '/'.join(['', vrs, account, obj_name.lstrip('/')])
if req.path == quote(obj_path):
raise HTTPConflict(
'Manifest object name "%s" '
'cannot be included in the manifest'
% obj_name)
try:
seg_size = int(seg_dict['size_bytes'])
except (ValueError, TypeError):
if seg_dict['size_bytes'] is None:
seg_size = None
else:
raise HTTPBadRequest('Invalid Manifest File')
if seg_size is not None and seg_size < self.min_segment_size and \
index < len(parsed_data) - 1:
raise HTTPBadRequest(
'Each segment, except the last, must be at least '
'%d bytes.' % self.min_segment_size)
new_env = req.environ.copy()
new_env['PATH_INFO'] = obj_path
new_env['REQUEST_METHOD'] = 'HEAD'
new_env['swift.source'] = 'SLO'
del(new_env['wsgi.input'])
del(new_env['QUERY_STRING'])
new_env['CONTENT_LENGTH'] = 0
new_env['HTTP_USER_AGENT'] = \
'%s MultipartPUT' % req.environ.get('HTTP_USER_AGENT')
head_seg_resp = \
Request.blank(obj_path, new_env).get_response(self)
if head_seg_resp.is_success:
if head_seg_resp.content_length < self.min_segment_size and \
index < len(parsed_data) - 1:
raise HTTPBadRequest(
'Each segment, except the last, must be at least '
'%d bytes.' % self.min_segment_size)
total_size += head_seg_resp.content_length
if seg_size is not None and \
seg_size != head_seg_resp.content_length:
problem_segments.append([quote(obj_name), 'Size Mismatch'])
if seg_dict['etag'] is None or \
seg_dict['etag'] == head_seg_resp.etag:
slo_etag.update(head_seg_resp.etag)
else:
problem_segments.append([quote(obj_name), 'Etag Mismatch'])
if head_seg_resp.last_modified:
last_modified = head_seg_resp.last_modified
else:
# shouldn't happen
last_modified = datetime.now()
last_modified_formatted = \
last_modified.strftime('%Y-%m-%dT%H:%M:%S.%f')
seg_data = {'name': '/' + seg_dict['path'].lstrip('/'),
'bytes': head_seg_resp.content_length,
'hash': head_seg_resp.etag,
'content_type': head_seg_resp.content_type,
'last_modified': last_modified_formatted}
if config_true_value(
head_seg_resp.headers.get('X-Static-Large-Object')):
seg_data['sub_slo'] = True
data_for_storage.append(seg_data)
else:
problem_segments.append([quote(obj_name),
head_seg_resp.status])
if problem_segments:
resp_body = get_response_body(
out_content_type, {}, problem_segments)
raise HTTPBadRequest(resp_body, content_type=out_content_type)
env = req.environ
if not env.get('CONTENT_TYPE'):
guessed_type, _junk = mimetypes.guess_type(req.path_info)
env['CONTENT_TYPE'] = guessed_type or 'application/octet-stream'
env['swift.content_type_overridden'] = True
env['CONTENT_TYPE'] += ";swift_bytes=%d" % total_size
env['HTTP_X_STATIC_LARGE_OBJECT'] = 'True'
json_data = json.dumps(data_for_storage)
if six.PY3:
json_data = json_data.encode('utf-8')
env['CONTENT_LENGTH'] = str(len(json_data))
env['wsgi.input'] = BytesIO(json_data)
slo_put_context = SloPutContext(self, slo_etag)
return slo_put_context.handle_slo_put(req, start_response)
def get_segments_to_delete_iter(self, req):
"""
A generator function to be used to delete all the segments and
sub-segments referenced in a manifest.
:params req: a swob.Request with an SLO manifest in path
:raises HTTPPreconditionFailed: on invalid UTF8 in request path
:raises HTTPBadRequest: on too many buffered sub segments and
on invalid SLO manifest path
"""
if not check_utf8(req.path_info):
raise HTTPPreconditionFailed(
request=req, body='Invalid UTF8 or contains NULL')
vrs, account, container, obj = req.split_path(4, 4, True)
segments = [{
'sub_slo': True,
'name': ('/%s/%s' % (container, obj)).decode('utf-8')}]
while segments:
if len(segments) > MAX_BUFFERED_SLO_SEGMENTS:
raise HTTPBadRequest(
'Too many buffered slo segments to delete.')
seg_data = segments.pop(0)
if seg_data.get('sub_slo'):
try:
segments.extend(
self.get_slo_segments(seg_data['name'], req))
except HTTPException as err:
# allow bulk delete response to report errors
seg_data['error'] = {'code': err.status_int,
'message': err.body}
# add manifest back to be deleted after segments
seg_data['sub_slo'] = False
segments.append(seg_data)
else:
seg_data['name'] = seg_data['name'].encode('utf-8')
yield seg_data
def get_slo_segments(self, obj_name, req):
"""
Performs a swob.Request and returns the SLO manifest's segments.
:raises HTTPServerError: on unable to load obj_name or
on unable to load the SLO manifest data.
:raises HTTPBadRequest: on not an SLO manifest
:raises HTTPNotFound: on SLO manifest not found
:returns: SLO manifest's segments
"""
vrs, account, _junk = req.split_path(2, 3, True)
new_env = req.environ.copy()
new_env['REQUEST_METHOD'] = 'GET'
del(new_env['wsgi.input'])
new_env['QUERY_STRING'] = 'multipart-manifest=get'
new_env['CONTENT_LENGTH'] = 0
new_env['HTTP_USER_AGENT'] = \
'%s MultipartDELETE' % new_env.get('HTTP_USER_AGENT')
new_env['swift.source'] = 'SLO'
new_env['PATH_INFO'] = (
'/%s/%s/%s' % (vrs, account, obj_name.lstrip('/'))
).encode('utf-8')
resp = Request.blank('', new_env).get_response(self.app)
if resp.is_success:
if config_true_value(resp.headers.get('X-Static-Large-Object')):
try:
return json.loads(resp.body)
except ValueError:
raise HTTPServerError('Unable to load SLO manifest')
else:
raise HTTPBadRequest('Not an SLO manifest')
elif resp.status_int == HTTP_NOT_FOUND:
raise HTTPNotFound('SLO manifest not found')
elif resp.status_int == HTTP_UNAUTHORIZED:
raise HTTPUnauthorized('401 Unauthorized')
else:
raise HTTPServerError('Unable to load SLO manifest or segment.')
def handle_multipart_delete(self, req):
"""
Will delete all the segments in the SLO manifest and then, if
successful, will delete the manifest file.
:params req: a swob.Request with an obj in path
:returns: swob.Response whose app_iter set to Bulk.handle_delete_iter
"""
resp = HTTPOk(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if out_content_type:
resp.content_type = out_content_type
resp.app_iter = self.bulk_deleter.handle_delete_iter(
req, objs_to_delete=self.get_segments_to_delete_iter(req),
user_agent='MultipartDELETE', swift_source='SLO',
out_content_type=out_content_type)
return resp
def __call__(self, env, start_response):
"""
WSGI entry point
"""
req = Request(env)
try:
vrs, account, container, obj = req.split_path(4, 4, True)
except ValueError:
return self.app(env, start_response)
# install our COPY-callback hook
env['swift.copy_hook'] = self.copy_hook(
env.get('swift.copy_hook',
lambda src_req, src_resp, sink_req: src_resp))
try:
if req.method == 'PUT' and \
req.params.get('multipart-manifest') == 'put':
return self.handle_multipart_put(req, start_response)
if req.method == 'DELETE' and \
req.params.get('multipart-manifest') == 'delete':
return self.handle_multipart_delete(req)(env, start_response)
if req.method == 'GET' or req.method == 'HEAD':
return self.handle_multipart_get_or_head(req, start_response)
if 'X-Static-Large-Object' in req.headers:
raise HTTPBadRequest(
request=req,
body='X-Static-Large-Object is a reserved header. '
'To create a static large object add query param '
'multipart-manifest=put.')
except HTTPException as err_resp:
return err_resp(env, start_response)
return self.app(env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
max_manifest_segments = int(conf.get('max_manifest_segments',
DEFAULT_MAX_MANIFEST_SEGMENTS))
max_manifest_size = int(conf.get('max_manifest_size',
DEFAULT_MAX_MANIFEST_SIZE))
min_segment_size = int(conf.get('min_segment_size',
DEFAULT_MIN_SEGMENT_SIZE))
register_swift_info('slo',
max_manifest_segments=max_manifest_segments,
max_manifest_size=max_manifest_size,
min_segment_size=min_segment_size)
def slo_filter(app):
return StaticLargeObject(
app, conf,
max_manifest_segments=max_manifest_segments,
max_manifest_size=max_manifest_size,
min_segment_size=min_segment_size)
return slo_filter
|
bouncestorage/swift
|
swift/common/middleware/slo.py
|
Python
|
apache-2.0
| 36,514
|
from openstatesapi.jurisdiction import make_jurisdiction
J = make_jurisdiction('la')
J.url = 'http://louisiana.gov'
|
openstates/billy
|
billy2pupa/la.py
|
Python
|
bsd-3-clause
| 117
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015-16, Ritesh Khadgaray <khadgaray () gmail.com>
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_vm_shell
short_description: Run commands in a VMware guest operating system
description:
- Module allows user to run common system administration commands in the guest operating system.
version_added: "2.1"
author:
- Ritesh Khadgaray (@ritzk)
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 5.5, 6.0 and 6.5.
- Only the first match against vm_id is used, even if there are multiple matches.
requirements:
- "python >= 2.6"
- PyVmomi
options:
datacenter:
description:
- The datacenter hosting the virtual machine.
- If set, it will help to speed up virtual machine search.
cluster:
description:
- The cluster hosting the virtual machine.
- If set, it will help to speed up virtual machine search.
folder:
description:
- Destination folder, absolute or relative path to find an existing guest or create the new guest.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter.
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
version_added: "2.4"
vm_id:
description:
- Name of the virtual machine to work with.
required: True
vm_id_type:
description:
- The VMware identification method by which the virtual machine will be identified.
default: vm_name
choices: ['uuid', 'instance_uuid', 'dns_name', 'inventory_path', 'vm_name']
vm_username:
description:
- The user to login-in to the virtual machine.
required: True
vm_password:
description:
- The password used to login-in to the virtual machine.
required: True
vm_shell:
description:
- The absolute path to the program to start.
- On Linux, shell is executed via bash.
required: True
vm_shell_args:
description:
- The argument to the program.
- The characters which must be escaped to the shell also be escaped on the command line provided.
default: " "
vm_shell_env:
description:
- Comma separated list of environment variable, specified in the guest OS notation.
vm_shell_cwd:
description:
- The current working directory of the application from which it will be run.
wait_for_process:
description:
- If set to C(True), module will wait for process to complete in the given virtual machine.
default: False
type: bool
version_added: 2.7
timeout:
description:
- Timeout in seconds.
- If set to positive integers, then C(wait_for_process) will honor this parameter and will exit after this timeout.
default: 3600
version_added: 2.7
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Run command inside a virtual machine
vmware_vm_shell:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter }}"
folder: "/{{datacenter}}/vm"
vm_id: "{{ vm_name }}"
vm_username: root
vm_password: superSecret
vm_shell: /bin/echo
vm_shell_args: " $var >> myFile "
vm_shell_env:
- "PATH=/bin"
- "VAR=test"
vm_shell_cwd: "/tmp"
delegate_to: localhost
register: shell_command_output
- name: Run command inside a virtual machine with wait and timeout
vmware_vm_shell:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter }}"
folder: "/{{datacenter}}/vm"
vm_id: NameOfVM
vm_username: root
vm_password: superSecret
vm_shell: /bin/sleep
vm_shell_args: 100
wait_for_process: True
timeout: 2000
delegate_to: localhost
register: shell_command_with_wait_timeout
- name: Change user password in the guest machine
vmware_vm_shell:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter }}"
folder: "/{{datacenter}}/vm"
vm_id: "{{ vm_name }}"
vm_username: sample
vm_password: old_password
vm_shell: "/bin/echo"
vm_shell_args: "-e 'old_password\nnew_password\nnew_password' | passwd sample > /tmp/$$.txt 2>&1"
delegate_to: localhost
- name: Change hostname of guest machine
vmware_vm_shell:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
datacenter: "{{ datacenter }}"
folder: "/{{datacenter}}/vm"
vm_id: "{{ vm_name }}"
vm_username: testUser
vm_password: SuperSecretPassword
vm_shell: "/usr/bin/hostnamectl"
vm_shell_args: "set-hostname new_hostname > /tmp/$$.txt 2>&1"
delegate_to: localhost
'''
RETURN = r'''
results:
description: metadata about the new process after completion with wait_for_process
returned: on success
type: dict
sample:
{
"cmd_line": "\"/bin/sleep\" 1",
"end_time": "2018-04-26T05:03:21+00:00",
"exit_code": 0,
"name": "sleep",
"owner": "dev1",
"start_time": "2018-04-26T05:03:19+00:00",
"uuid": "564db1e2-a3ff-3b0e-8b77-49c25570bb66",
}
'''
import time
try:
from pyVmomi import vim, vmodl
except ImportError:
pass
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import (PyVmomi, find_cluster_by_name,
find_datacenter_by_name, find_vm_by_id,
vmware_argument_spec)
class VMwareShellManager(PyVmomi):
def __init__(self, module):
super(VMwareShellManager, self).__init__(module)
datacenter_name = module.params['datacenter']
cluster_name = module.params['cluster']
folder = module.params['folder']
self.pm = self.content.guestOperationsManager.processManager
self.timeout = self.params.get('timeout', 3600)
self.wait_for_pid = self.params.get('wait_for_process', False)
datacenter = None
if datacenter_name:
datacenter = find_datacenter_by_name(self.content, datacenter_name)
if not datacenter:
module.fail_json(changed=False, msg="Unable to find %(datacenter)s datacenter" % module.params)
cluster = None
if cluster_name:
cluster = find_cluster_by_name(self.content, cluster_name, datacenter)
if not cluster:
module.fail_json(changed=False, msg="Unable to find %(cluster)s cluster" % module.params)
if module.params['vm_id_type'] == 'inventory_path':
vm = find_vm_by_id(self.content,
vm_id=module.params['vm_id'],
vm_id_type="inventory_path",
folder=folder)
else:
vm = find_vm_by_id(self.content,
vm_id=module.params['vm_id'],
vm_id_type=module.params['vm_id_type'],
datacenter=datacenter,
cluster=cluster)
if not vm:
module.fail_json(msg='Unable to find virtual machine.')
tools_status = vm.guest.toolsStatus
if tools_status in ['toolsNotInstalled', 'toolsNotRunning']:
self.module.fail_json(msg="VMwareTools is not installed or is not running in the guest."
" VMware Tools are necessary to run this module.")
try:
self.execute_command(vm, module.params)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(changed=False, msg=to_native(runtime_fault.msg))
except vmodl.MethodFault as method_fault:
module.fail_json(changed=False, msg=to_native(method_fault.msg))
except Exception as e:
module.fail_json(changed=False, msg=to_native(e))
def execute_command(self, vm, params):
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/execute_program_in_vm.py
vm_username = params['vm_username']
vm_password = params['vm_password']
program_path = params['vm_shell']
args = params['vm_shell_args']
env = params['vm_shell_env']
cwd = params['vm_shell_cwd']
credentials = vim.vm.guest.NamePasswordAuthentication(username=vm_username,
password=vm_password)
cmd_spec = vim.vm.guest.ProcessManager.ProgramSpec(arguments=args,
envVariables=env,
programPath=program_path,
workingDirectory=cwd)
res = self.pm.StartProgramInGuest(vm=vm, auth=credentials, spec=cmd_spec)
if self.wait_for_pid:
res_data = self.wait_for_process(vm, res, credentials)
results = dict(uuid=vm.summary.config.uuid,
owner=res_data.owner,
start_time=res_data.startTime.isoformat(),
end_time=res_data.endTime.isoformat(),
exit_code=res_data.exitCode,
name=res_data.name,
cmd_line=res_data.cmdLine)
if res_data.exitCode != 0:
results['msg'] = "Failed to execute command"
results['changed'] = False
results['failed'] = True
self.module.fail_json(**results)
else:
results['changed'] = True
results['failed'] = False
self.module.exit_json(**results)
else:
self.module.exit_json(changed=True, uuid=vm.summary.config.uuid, msg=res)
def process_exists_in_guest(self, vm, pid, creds):
res = self.pm.ListProcessesInGuest(vm, creds, pids=[pid])
if not res:
self.module.fail_json(
changed=False, msg='ListProcessesInGuest: None (unexpected)')
res = res[0]
if res.exitCode is None:
return True, None
else:
return False, res
def wait_for_process(self, vm, pid, creds):
start_time = time.time()
while True:
current_time = time.time()
process_status, res_data = self.process_exists_in_guest(vm, pid, creds)
if not process_status:
return res_data
elif current_time - start_time >= self.timeout:
self.module.fail_json(
msg="Timeout waiting for process to complete.",
vm=vm._moId,
pid=pid,
start_time=start_time,
current_time=current_time,
timeout=self.timeout)
else:
time.sleep(5)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
dict(
datacenter=dict(type='str'),
cluster=dict(type='str'),
folder=dict(type='str'),
vm_id=dict(type='str', required=True),
vm_id_type=dict(default='vm_name', type='str',
choices=['inventory_path',
'uuid',
'instance_uuid',
'dns_name',
'vm_name']),
vm_username=dict(type='str', required=True),
vm_password=dict(type='str', no_log=True, required=True),
vm_shell=dict(type='str', required=True),
vm_shell_args=dict(default=" ", type='str'),
vm_shell_env=dict(type='list'),
vm_shell_cwd=dict(type='str'),
wait_for_process=dict(type='bool', default=False),
timeout=dict(type='int', default=3600),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False,
required_if=[
['vm_id_type', 'inventory_path', ['folder']]
],
)
vm_shell_mgr = VMwareShellManager(module)
if __name__ == '__main__':
main()
|
pgmillon/ansible
|
lib/ansible/modules/cloud/vmware/vmware_vm_shell.py
|
Python
|
gpl-3.0
| 13,224
|
# xorn.geda - Python library for manipulating gEDA files
# Copyright (C) 1998-2010 Ales Hvezda
# Copyright (C) 1998-2010 gEDA Contributors (see ChangeLog for details)
# Copyright (C) 2013-2017 Roland Lutz
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
## \namespace xorn.geda.plainread
## Reading gEDA schematic/symbol files.
#
# The gEDA file format is a space separated list of characters and
# numbers in plain ASCII. The meaning of each item is described in
# the file format documentation which can be found in \c
# doc/geda/file_format_spec or
# [here](http://wiki.geda-project.org/geda:file_format_spec).
import codecs, os, sys
from gettext import gettext as _
import xorn.base64
import xorn.proxy
import xorn.storage
import xorn.geda.attrib
import xorn.geda.read
import xorn.geda.ref
from xorn.geda.plainformat import *
## Describes the properties of a gEDA schematic/symbol file format version.
#
# The file format version of a gEDA file is described by a line of the form
#
# \verbatim
# v release_version fileformat_version
# \endverbatim
#
# where \a release_version is an eight-digit number representing a
# date and \c fileformat_version is a low integer (usually \c 1 or \c
# 2). These represent the file format features that are used:
#
# Feature | Release version | File format version |
# -------------------------------------------|------------------------|------|
# Text alignment attribute | >= February 20th, 2000 | |
# Line and fill attributes | > September 4th, 2000 | |
# Bus pins, whichend and ripperdir attribute | > August 25th, 2002 | |
# Text objects with multiple lines | (October 2003) | >= 1 |
# Path objects | (November 2008) | >= 2 |
#
# Depending on the version of the file format, the file is parsed
# differently. The unspecified parameters in the older file formats
# are set to default values.
#
# In older libgeda file format versions there was no information about
# the active end of pins.
class FileFormat:
## Create a new instance from a version number pair and derive its
## properties.
def __init__(self, release_ver, fileformat_ver):
## libgeda release version number
self.release_ver = release_ver
## libgeda file format version number
self.fileformat_ver = fileformat_ver
## Is the text alignment attribute supported?
self.supports_text_alignment = release_ver >= VERSION_20000220
# yes, above greater or equal (not just greater) is correct.
# The format change occurred in 20000220
## Are line and fill attributes supported?
self.supports_linefill_attributes = release_ver > VERSION_20000704
## Are bus pins, whichend and ripperdir attributes supported?
self.enhanced_pinbus_format = release_ver > VERSION_20020825
## Can text objects have multiple lines?
self.supports_multiline_text = fileformat_ver >= 1
## Helper function for \ref sscanf.
def parse_token(s, fmt):
if fmt == '%c':
if len(s) != 1:
raise ValueError
return s
if fmt == '%d':
return int(s)
if fmt == '%u':
val = int(s)
if val < 0:
raise ValueError
return val
if fmt == '%s':
return s
raise ValueError, "Invalid format token: '%s'" % fmt
## Parse a string of space-separated values.
#
# This is a mock-up version of the standard \c sscanf(3). The format
# string must consist of zero or more tokens separated by a space,
# optionally followed by a newline character. The format string must
# exactly match this pattern. Only the tokens \c %%c, \c %%d, \c %%s,
# and \c %%u are allowed.
#
# \throw ValueError if the string does not match the format
# \throw ValueError if an invalid format token is passed
#
# \return a tuple containing the parsed values
def sscanf(s, fmt):
while fmt.endswith('\n'):
if not s.endswith('\n'):
raise ValueError
fmt = fmt[:-1]
s = s[:-1]
if s.endswith('\n'):
raise ValueError
# gEDA/gaf ignores trailing spaces and, in some older versions,
# wrote them for text objects
s = s.rstrip(' ')
stok = s.split(' ')
fmttok = fmt.split(' ')
if len(stok) != len(fmttok):
raise ValueError
return [parse_token(st, ft) for (st, ft) in zip(stok, fmttok)]
## Replace "\r\n" line endings with "\n" line endings.
def strip_carriage_return(f):
for line in f:
if line.endswith('\r\n'):
yield line[:-2] + '\n'
else:
yield line
## Read a symbol or schematic file in libgeda format from a file object.
#
# \param [in] f A file-like object from which to read
# \param [in] name The file name displayed in warning and
# error messages
# \param [in] log An object to which errors are logged
# \param [in] load_symbol Function for loading referenced symbol files
# \param [in] load_pixmap Function for loading referenced pixmap files
# \param [in] force_boundingbox <i>currently unused</i>
#
# \returns a transient xorn.proxy.RevisionProxy instance containing
# the file's contents
#
# \throws xorn.geda.read.ParseError if the file is not a valid gEDA
# schematic/symbol file
def read_file(f, name, log, load_symbol, load_pixmap,
force_boundingbox = False):
f = codecs.iterdecode(f, 'utf-8')
f = strip_carriage_return(f)
def lineno_incrementor(f):
for line in f:
yield line
log.lineno += 1
f = lineno_incrementor(f)
# "Stack" of outer contexts for embedded components
object_lists_save = []
# Last read object. Attributes and embedded components attach to this.
ob = None
# This is where read objects end up. Will be swapped for embedded comps.
rev = xorn.storage.Revision()
format = FileFormat(0, 0) # no file format definition at all
for line in f:
if not line:
continue
objtype = line[0]
if objtype == OBJ_LINE:
data = read_line(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_NET:
data = read_net(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_BUS:
data = read_bus(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_BOX:
data = read_box(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_PICTURE:
data = read_picture(line, f, format, log, load_pixmap)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_CIRCLE:
data = read_circle(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_COMPLEX:
data = read_complex(line, format, log, load_symbol)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_TEXT:
data = read_text(line, f, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_PATH:
data = read_path(line, f, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_PIN:
data = read_pin(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == OBJ_ARC:
data = read_arc(line, format, log)
if data is not None:
ob = rev.add_object(data)
elif objtype == STARTATTACH_ATTR:
if ob is None:
log.error(_("unexpected attribute list start marker"))
continue
if not isinstance(rev.get_object_data(ob), xorn.storage.Net) and \
not isinstance(rev.get_object_data(ob), xorn.storage.Component):
log.error(_("can't attach attributes to this object type"))
continue
while True:
try:
line = f.next()
except StopIteration:
log.error(_("unterminated attribute list"))
break
if not line:
continue
if line[0] == ENDATTACH_ATTR:
break
if line[0] != OBJ_TEXT:
log.error(
_("tried to attach a non-text item as an attribute"))
continue
attrib = read_text(line, f, format, log)
if attrib is not None:
rev.relocate_object(rev.add_object(attrib), ob, None)
ob = None
elif objtype == START_EMBEDDED:
if ob is None:
log.error(_("unexpected embedded symbol start marker"))
continue
component_data = rev.get_object_data(ob)
if type(component_data) != xorn.storage.Component:
log.error(_("embedded symbol start marker following "
"non-component object"))
continue
if not component_data.symbol.embedded:
log.error(_("embedded symbol start marker following "
"component with non-embedded symbol"))
continue
if component_data.symbol.prim_objs is not None:
log.error(_("embedded symbol start marker following "
"embedded symbol"))
continue
object_lists_save.append((rev, ob))
rev = xorn.storage.Revision()
component_data.symbol.prim_objs = rev
elif objtype == END_EMBEDDED:
if not object_lists_save:
log.error(_("unexpected embedded symbol end marker"))
continue
rev, ob = object_lists_save.pop()
elif objtype == ENDATTACH_ATTR:
log.error(_("unexpected attribute list end marker"))
elif objtype == INFO_FONT:
# NOP
pass
elif objtype == COMMENT:
# do nothing
pass
elif objtype == VERSION_CHAR:
try:
objtype, release_ver, fileformat_ver = \
sscanf(line, "%c %u %u\n")
except ValueError:
try:
objtype, release_ver = sscanf(line, "%c %u\n")
except ValueError:
log.error(_("failed to parse version string"))
continue
fileformat_ver = 0
assert objtype == VERSION_CHAR
# 20030921 was the last version which did not have a fileformat
# version.
if release_ver <= VERSION_20030921:
fileformat_ver = 0
if fileformat_ver == 0:
log.warn(_("Read an old format sym/sch file! "
"Please run g[sym|sch]update on this file"))
format = FileFormat(release_ver, fileformat_ver)
else:
log.error(_("read garbage"))
for ob in rev.get_objects():
data = rev.get_object_data(ob)
if not isinstance(data, xorn.storage.Component) \
or not data.symbol.embedded:
continue
if data.symbol.prim_objs is None:
log.error(_("embedded symbol is missing"))
continue
# rotate/translate objects back to normal
xorn.geda.plainformat.untransform(
data.symbol.prim_objs, data.x, data.y, data.angle, data.mirror)
# un-hide overwritten attributes in embedded symbol
ob = xorn.proxy.ObjectProxy(rev, ob)
visibility = {}
for attached in xorn.geda.attrib.find_attached_attribs(ob):
attached_name, attached_value = \
xorn.geda.attrib.parse_string(attached.text)
visibility[attached_name] = attached.visibility
for inherited in xorn.geda.attrib.find_inherited_attribs(ob):
inherited_name, inherited_value = \
xorn.geda.attrib.parse_string(inherited.text)
if inherited_name in visibility:
inherited.visibility = visibility[inherited_name]
if not format.enhanced_pinbus_format:
pin_update_whichend(rev, force_boundingbox, log)
return xorn.proxy.RevisionProxy(rev)
## Guess the orientation of pins.
#
# Calculates the bounding box of all pins in the revision. The end of
# a pin that is closer to the boundary of the box is set as the active
# end.
#
# \return \c None.
#
# \warning This function is not implemented. See Xorn bug #148.
def pin_update_whichend(rev, force_boundingbox, log):
log.error(_("file is lacking pin orientation information"))
## Read a circle object from a string in gEDA format.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a circle object
def read_circle(buf, format, log):
try:
if not format.supports_linefill_attributes:
type, x1, y1, radius, color = sscanf(buf, "%c %d %d %d %d\n")
circle_width = 0
circle_end = 0
circle_type = 0
circle_length = -1
circle_space = -1
circle_fill = 0
fill_width = 0
angle1 = -1
pitch1 = -1
angle2 = -1
pitch2 = -1
else:
type, x1, y1, radius, color, circle_width, \
circle_end, circle_type, circle_length, circle_space, \
circle_fill, fill_width, angle1, pitch1, angle2, pitch2 = sscanf(
buf, "%c %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse circle object"))
return None
if type != OBJ_CIRCLE:
raise ValueError
if radius == 0:
log.warn(_("circle has radius zero"))
elif radius < 0:
log.warn(_("circle has negative radius (%d), setting to 0") % radius)
radius = 0
if color < 0 or color > MAX_COLORS:
log.warn(_("circle has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
return xorn.storage.Circle(
x = x1,
y = y1,
radius = radius,
color = color,
line = xorn.storage.LineAttr(
width = circle_width,
cap_style = circle_end,
dash_style = circle_type,
dash_length = circle_length,
dash_space = circle_space),
fill = xorn.storage.FillAttr(
type = circle_fill,
width = fill_width,
angle0 = angle1,
pitch0 = pitch1,
angle1 = angle2,
pitch1 = pitch2))
## Read an arc object from a string in gEDA format.
#
# A negative or null radius is not allowed.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a arc object
def read_arc(buf, format, log):
try:
if not format.supports_linefill_attributes:
type, x1, y1, radius, start_angle, sweep_angle, color = sscanf(
buf, "%c %d %d %d %d %d %d\n")
arc_width = 0
arc_end = 0
arc_type = 0
arc_space = -1
arc_length = -1
else:
type, x1, y1, radius, start_angle, sweep_angle, color, \
arc_width, arc_end, arc_type, arc_length, arc_space = sscanf(
buf, "%c %d %d %d %d %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse arc object"))
return None
if type != OBJ_ARC:
raise ValueError
if radius == 0:
log.warn(_("arc has radius zero"))
elif radius < 0:
log.warn(_("arc has negative radius (%d), setting to 0") % radius)
radius = 0
if color < 0 or color > MAX_COLORS:
log.warn(_("arc has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
return xorn.storage.Arc(
x = x1,
y = y1,
radius = radius,
startangle = start_angle,
sweepangle = sweep_angle,
color = color,
line = xorn.storage.LineAttr(
width = arc_width,
cap_style = arc_end,
dash_style = arc_type,
dash_length = arc_length,
dash_space = arc_space))
## Read a box object from a string in gEDA format.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a box object
def read_box(buf, format, log):
try:
if not format.supports_linefill_attributes:
type, x1, y1, width, height, color = sscanf(
buf, "%c %d %d %d %d %d\n")
box_width = 0
box_end = 0
box_type = 0
box_length = -1
box_space = -1
box_filling = 0
fill_width = 0
angle1 = -1
pitch1 = -1
angle2 = -1
pitch2 = -1
else:
type, x1, y1, width, height, color, \
box_width, box_end, box_type, box_length, box_space, \
box_filling, fill_width, angle1, pitch1, angle2, pitch2 = sscanf(
buf, "%c %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse box object"))
return None
if type != OBJ_BOX:
raise ValueError
if width == 0 or height == 0:
log.warn(_("box has width/height zero"))
if color < 0 or color > MAX_COLORS:
log.warn(_("box has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
# In libgeda, a box is internally represented by its lower right
# and upper left corner, whereas in the file format, it is
# described as its lower left corner and its width and height.
#
# We don't care and just use the file format representation.
return xorn.storage.Box(
x = x1,
y = y1,
width = width,
height = height,
color = color,
line = xorn.storage.LineAttr(
width = box_width,
cap_style = box_end,
dash_style = box_type,
dash_length = box_length,
dash_space = box_space),
fill = xorn.storage.FillAttr(
type = box_filling,
width = fill_width,
angle0 = angle1,
pitch0 = pitch1,
angle1 = angle2,
pitch1 = pitch2))
## Read a bus object from a string in gEDA format.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a bus object
def read_bus(buf, format, log):
try:
if not format.enhanced_pinbus_format:
type, x1, y1, x2, y2, color = sscanf(
buf, "%c %d %d %d %d %d\n")
ripper_dir = 0
else:
type, x1, y1, x2, y2, color, ripper_dir = sscanf(
buf, "%c %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse bus object"))
return None
if type != OBJ_BUS:
raise ValueError
if x1 == x2 and y1 == y2:
log.warn(_("bus has length zero"))
if color < 0 or color > MAX_COLORS:
log.warn(_("bus has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
if ripper_dir < -1 or ripper_dir > 1:
log.warn(_("bus has invalid ripper direction (%d)") % ripper_dir)
ripper_dir = 0 # isn't used
return xorn.storage.Net(
x = x1,
y = y1,
width = x2 - x1,
height = y2 - y1,
color = color,
is_bus = True,
is_pin = False,
is_inverted = False)
## Read a component object from a string in gEDA format.
#
# If the symbol is not embedded, use the function \a load_symbol to
# load it from the component library.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a
# component object
def read_complex(buf, format, log, load_symbol):
try:
type, x1, y1, selectable, angle, mirror, basename = sscanf(
buf, "%c %d %d %d %d %d %s\n")
except ValueError:
log.error(_("failed to parse complex object"))
return None
if type != OBJ_COMPLEX:
raise ValueError
if angle not in [0, 90, 180, 270]:
log.warn(_("component has invalid angle (%d), setting to 0") % angle)
angle = 0
if mirror != 0 and mirror != 1:
log.warn(_("component has invalid mirror flag (%d), "
"setting to 0") % mirror)
mirror = 0
# color = DEFAULT_COLOR
if basename.startswith('EMBEDDED'):
symbol = xorn.geda.ref.Symbol(basename[8:], None, True)
else:
symbol = load_symbol(basename, False)
assert not symbol.embedded
return xorn.storage.Component(
x = x1,
y = y1,
selectable = selectable,
angle = angle,
mirror = mirror,
symbol = symbol)
## Read a line object from a string in gEDA format.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a line object
def read_line(buf, format, log):
try:
if not format.supports_linefill_attributes:
type, x1, y1, x2, y2, color = sscanf(buf, "%c %d %d %d %d %d\n")
line_width = 0
line_end = 0
line_type = 0
line_length = -1
line_space = -1
else:
type, x1, y1, x2, y2, color, \
line_width, line_end, line_type, line_length, line_space = sscanf(
buf, "%c %d %d %d %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse line object"))
return None
if type != OBJ_LINE:
raise ValueError
# Null length line are not allowed. If such a line is detected, a
# message is issued.
if x1 == x2 and y1 == y2:
log.warn(_("line has length zero"))
if color < 0 or color > MAX_COLORS:
log.warn(_("line has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
return xorn.storage.Line(
x = x1,
y = y1,
width = x2 - x1,
height = y2 - y1,
color = color,
line = xorn.storage.LineAttr(
width = line_width,
cap_style = line_end,
dash_style = line_type,
dash_length = line_length,
dash_space = line_space))
## Read a net object from a string in gEDA format.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a net object
def read_net(buf, format, log):
try:
type, x1, y1, x2, y2, color = sscanf(buf, "%c %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse net object"))
return None
if type != OBJ_NET:
raise ValueError
if x1 == x2 and y1 == y2:
log.warn(_("net has length zero"))
if color < 0 or color > MAX_COLORS:
log.warn(_("net has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
return xorn.storage.Net(
x = x1,
y = y1,
width = x2 - x1,
height = y2 - y1,
color = color,
is_bus = False,
is_pin = False,
is_inverted = False)
## Read a path object from a string and a file in gEDA format.
#
# Creates a path object from the string \a first_line and reads as
# many lines describing the path as specified there from \a f.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw xorn.geda.read.ParseError if not enough lines could be read
# from the file
# \throw ValueError if \a first_line doesn't describe a
# path object
def read_path(first_line, f, format, log):
try:
type, color, \
line_width, line_end, line_type, line_length, line_space, \
fill_type, fill_width, angle1, pitch1, angle2, pitch2, \
num_lines = sscanf(
first_line, "%c %d %d %d %d %d %d %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse path object"))
return None
if type != OBJ_PATH:
raise ValueError
# Checks if the required color is valid.
if color < 0 or color > MAX_COLORS:
log.warn(_("path has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
pathstr = ''
for i in xrange(0, num_lines):
try:
line = f.next()
except StopIteration:
log.error(_("unexpected end of file after %d lines "
"while reading path") % i)
break
pathstr += line
if pathstr.endswith('\n'):
pathstr = pathstr[:-1]
return xorn.storage.Path(
pathdata = pathstr.encode('utf-8'),
color = color,
line = xorn.storage.LineAttr(
width = line_width,
cap_style = line_end,
dash_style = line_type,
dash_length = line_length,
dash_space = line_space),
fill = xorn.storage.FillAttr(
type = fill_type,
width = fill_width,
angle0 = angle1,
pitch0 = pitch1,
angle1 = angle2,
pitch1 = pitch2))
## Read a picture object from a string and a file in gEDA format.
#
# Creates a picture object from the string \a first_line. If the
# pixmap is not embedded, uses the function \a load_pixmap to load it
# from an external file. If the pixmap is embedded, reads its data in
# base64 encoding from \a f.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw xorn.geda.read.ParseError if the picture data could be read
# from the file
# \throw ValueError if \a first_line doesn't describe a
# picture object
def read_picture(first_line, f, format, log, load_pixmap):
try:
type, x1, y1, width, height, angle, mirrored, embedded = sscanf(
first_line, "%c %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse picture definition"))
return None
if type != OBJ_PICTURE:
raise ValueError
if width == 0 or height == 0:
log.warn(_("picture has width/height zero"))
if mirrored != 0 and mirrored != 1:
log.warn(_("picture has wrong 'mirrored' parameter (%d), "
"setting to 0") % mirrored)
mirrored = 0
if angle not in [0, 90, 180, 270]:
log.warn(_("picture has unsupported angle (%d), setting to 0") % angle)
angle = 0
try:
filename = f.next()
except StopIteration:
log.error(_("unexpected end of file while reading picture file name"))
filename = ''
else:
if filename.endswith('\n'):
filename = filename[:-1]
# Handle empty filenames
if not filename:
log.warn(_("image has no filename"))
filename = None
if embedded != 1:
if embedded != 0:
log.warn(_("picture has wrong 'embedded' parameter (%d), "
"setting to not embedded") % embedded)
pixmap = load_pixmap(filename, False)
assert not pixmap.embedded
else:
pixmap = xorn.geda.ref.Pixmap(filename, None, True)
# Read the encoded picture
try:
pixmap.data = xorn.base64.decode(f, delim = '.')
except xorn.base64.DecodingError as e:
log.error(_("failed to load image from embedded data: %s")
% e.message)
pixmap.data = ''
return xorn.storage.Picture(
x = x1,
y = y1,
width = width,
height = height,
angle = angle,
mirror = mirrored,
pixmap = pixmap)
## Read a pin object from a string in gEDA format.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw ValueError if \a buf doesn't describe a pin object
def read_pin(buf, format, log):
try:
if not format.enhanced_pinbus_format:
type, x1, y1, x2, y2, color = sscanf(buf, "%c %d %d %d %d %d\n")
pin_type = 0
whichend = -1
else:
type, x1, y1, x2, y2, color, pin_type, whichend = sscanf(
buf, "%c %d %d %d %d %d %d %d\n")
except ValueError:
log.error(_("failed to parse pin object"))
return None
if type != OBJ_PIN:
raise ValueError
if whichend == -1:
log.warn(_("pin does not have the whichone field set--"
"verify and correct manually!"))
elif whichend < -1 or whichend > 1:
log.warn(_("pin has invalid whichend (%d), "
"setting to first end") % whichend)
whichend = 0
if color < 0 or color > MAX_COLORS:
log.warn(_("pin has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
if pin_type == 0:
is_bus = False
elif pin_type == 1:
is_bus = True
else:
log.warn(_("pin has invalid type (%d), setting to 0") % pin_type)
is_bus = False
if whichend != 1:
is_inverted = False
else:
x1, y1, x2, y2 = x2, y2, x1, y1
is_inverted = True
return xorn.storage.Net(
x = x1,
y = y1,
width = x2 - x1,
height = y2 - y1,
color = color,
is_bus = is_bus,
is_pin = True,
is_inverted = is_inverted)
## Read a text object from a string and a file in gEDA format.
#
# Creates a text object from the string \a first_line and reads as
# many lines of text as specified there from \a f.
#
# \throw xorn.geda.read.ParseError if the string could not be parsed
# \throw xorn.geda.read.ParseError if not enough lines could be read
# from the file
# \throw ValueError if \a first_line doesn't describe a
# text object
def read_text(first_line, f, format, log):
try:
if format.supports_multiline_text:
type, x, y, color, size, visibility, show_name_value, angle, \
alignment, num_lines = sscanf(
first_line, "%c %d %d %d %d %d %d %d %d %d\n")
elif not format.supports_text_alignment:
type, x, y, color, size, visibility, show_name_value, angle = \
sscanf(first_line, "%c %d %d %d %d %d %d %d\n")
alignment = LOWER_LEFT # older versions didn't have this
num_lines = 1 # only support a single line
else:
type, x, y, color, size, visibility, show_name_value, angle, \
alignment = sscanf(
first_line, "%c %d %d %d %d %d %d %d %d\n")
num_lines = 1 # only support a single line
except ValueError:
log.error(_("failed to parse text object"))
return None
if type != OBJ_TEXT:
raise ValueError
if size == 0:
log.warn(_("text has size zero"))
if angle not in [0, 90, 180, 270]:
log.warn(_("text has unsupported angle (%d), setting to 0") % angle)
angle = 0
if alignment not in [LOWER_LEFT, MIDDLE_LEFT, UPPER_LEFT,
LOWER_MIDDLE, MIDDLE_MIDDLE, UPPER_MIDDLE,
LOWER_RIGHT, MIDDLE_RIGHT, UPPER_RIGHT]:
log.warn(_("text has unsupported alignment (%d), "
"setting to LOWER_LEFT") % alignment)
alignment = LOWER_LEFT
if color < 0 or color > MAX_COLORS:
log.warn(_("text has invalid color (%d), setting to %d")
% (color, DEFAULT_COLOR))
color = DEFAULT_COLOR
if num_lines <= 0:
log.error(_("text has invalid number of lines (%d)") % num_lines)
text = ''
for i in xrange(0, num_lines):
try:
line = f.next()
except StopIteration:
log.error(_("unexpected end of file after %d lines of text") % i)
break
text += line
if text.endswith('\n'):
text = text[:-1]
tmp = text.replace('\\\\', '')
if tmp.count('\\_') % 2:
log.warn(_("mismatched overbar markers"))
if '\\' in tmp.replace('\\_', ''):
log.warn(_("stray backslash character(s)"))
return xorn.storage.Text(
x = x,
y = y,
color = color,
text_size = size,
visibility = visibility,
show_name_value = show_name_value,
angle = angle,
alignment = alignment,
text = text.encode('utf-8'))
|
bert/geda-gaf
|
xorn/src/python/geda/plainread.py
|
Python
|
gpl-2.0
| 34,369
|
__author__ = 'alforro'
from django.conf.urls import patterns, url
from proyectos import views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^$', login_required(views.IndexView.as_view()), name='lista_proyecto'),
url(r'^crear$', login_required(views.CreateProyecto.as_view()), name='crear_proyecto'), #new line
url(r'^configurar/(?P<pk>\d+)$', login_required(views.ConfigurarProyecto.as_view()), name='configurar'),
url(r'^modificar/(?P<pk>\d+)$', login_required(views.UpdateProyecto.as_view()), name='modificar_proyecto'),
url(r'^buscar/$', views.search, name='buscar_proyecto'),
url(r'^kanban/(?P<pk>\d+)$', login_required(views.Kanban.as_view()), name='kanban'),
url(r'^iniciar/(?P<pk>\d+)$', login_required(views.IniciarProyecto.as_view()), name='iniciar_proyecto'),
url(r'^reporte_pdf/(?P<pk>\d+)$', views.reporte_pdf, name='generar_pdf'),
url(r'^finalizar/(?P<pk>\d+)$', login_required(views.FinalizarProyecto.as_view()), name='finalizar_proyecto'),
)
|
alforro/sgpa2015
|
proyectos/urls.py
|
Python
|
gpl-2.0
| 1,040
|
from django.shortcuts import render, render_to_response
from django.contrib import auth
from django.core.context_processors import csrf
from django.contrib.auth.decorators import login_required
from .models import Ticket, UserProfile, Order, TicketType, OrderType
from django.core.mail import send_mail
from django.template import RequestContext
from validation_utils import *
from entities_utils import *
# Home Page
@login_required
def index(request):
return render(request, "index.html")
# Login view
def login(request):
c = {}
c.update(csrf(request))
return render_to_response("login.html", c)
# Login authview
def login_auth(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
return render(request, "index.html")
else:
return render(request, "login.html", {'change_succeded':False})
# Logout view
def logout(request):
auth.logout(request)
return render(request, "login.html", {'change_succeded':True})
# Account Page
@login_required
def myaccount(request):
# User Auth object
user = request.user
# Retrieving User
user_profile = get_profile_for_user(user)
# Retrieving Tickets
tickets = get_tickets_for_user_profile(user_profile)
# Retrieving orders
orders = get_orders_for_user_profile(user_profile)
return render(request, "myaccount.html", {'user':user, 'user_profile':user_profile, 'tickets':tickets, 'orders':orders})
# Contact Page
def contact(request):
if request.method == "POST":
name = request.POST.get("name")
subject = request.POST.get("subject")
email = request.POST.get("email")
phone = request.POST.get("phone", "-")
company_name = request.POST.get("company_name", "-")
message = request.POST.get("message")
send_message = "Message received from: " + name + "\n" \
+ "Phone Number: " + phone + "\n" + "Company: " + company_name + "\n" \
+ "Message:\n" + message
send_result = send_mail(
subject,
send_message,
email,
['abordioaie.george@yahoo.com'],
fail_silently=False,
)
if send_result == 1:
return render(request, "contact.html", {'sent':True})
else:
return render(request, "contact.html", {'sent':False})
else:
return render(request, "contact.html")
# 404 Handler
def handler404(request):
response = render_to_response('404.html', {}, context_instance=RequestContext(request))
response.status_code = 404
return response
# Services Page
@login_required
def services(request):
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
user_role = get_user_type(user_profile)
subalterns = get_subalterns_number(user_profile)
return render(request, "services.html", {'subalterns':subalterns, 'user_role':user_role})
# Create a Ticket Page
@login_required
def create_ticket(request):
if request.method == "POST":
# Validation Flag
valid = True
# Collecting Form Data
title = request.POST.get("title")
valid = validateMinCharLength(title)
description = request.POST.get("description", "-")
priority = request.POST.get("priority")
type = request.POST.get("type")
# Retrieving Ticket Type
try:
ticket_type = TicketType.objects.get(pk=int(type))
except TicketType.DoesNotExist:
valid = False
# Retrieving User Profile
user = request.user
try:
user_profile = user.userprofile
except UserProfile.DoesNotExist:
valid = False
if valid is True:
ticket = Ticket.objects.create(
title = title,
description = description,
comments = "-",
priority = int(priority),
status = 0,
ticket_type = ticket_type,
user_type = user_profile
)
return render(request, "ticketcreate.html", {'sent':True})
else:
fail_message = "Invalid data provided, please try again! The title of the ticket must be at least 5 characters long."
return render(request, "ticketcreate.html", {'sent':False, 'fail_message':fail_message})
else:
return render(request, "ticketcreate.html")
# Active Tickets
@login_required
def active_tickets(request):
# Validation Flag
valid = True
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving Tickets
tickets = Ticket.objects.filter(user_type = user_profile).exclude(status = 3)
if len(tickets) == 0:
tickets = False
return render(request, "ticketsactive.html", {'user':user, 'user_profile':user_profile, 'tickets':tickets})
# Closed Tickets
@login_required
def closed_tickets(request):
# Validation Flag
valid = True
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving Tickets
tickets = Ticket.objects.filter(user_type = user_profile).filter(status = 3)
if len(tickets) == 0:
tickets = False
if request.method == "POST":
selected = request.POST.getlist("checks")
if len(selected) == 0:
fail_message = "You must select at least one value from the table!"
return render(request, "ticketsclosed.html", {'user':user, 'user_profile':user_profile, 'tickets':tickets, 'sent':False, 'fail_message':fail_message})
else:
# Updating selected tickets
for sel_ticket in selected:
ticket = Ticket.objects.get(pk=int(sel_ticket))
ticket.status = 4
ticket.save(update_fields=['status'])
# Re-Retrieving Tickets to refresh list
tickets = Ticket.objects.filter(user_type = user_profile).filter(status = 3)
if len(tickets) == 0:
tickets = False
return render(request, "ticketsclosed.html", {'user':user, 'user_profile':user_profile, 'tickets':tickets, 'sent':True})
else:
return render(request, "ticketsclosed.html", {'user':user, 'user_profile':user_profile, 'tickets':tickets})
# Create an Order
@login_required
def create_order(request):
if request.method == "POST":
# Validation Flag, if any of the validations are failing, the flag receives the False value
valid = True
# Collecting Form Data
title = request.POST.get("title")
valid = validateMinCharLength(title)
if valid == False:
fail_message = "Invalid data provided, please try again! The title of the order must be at least 5 " \
"characters long."
return render(request, "ordercreate.html", {'sent':False, 'fail_message':fail_message})
description = request.POST.get("description", "-")
# Converting and validating value
value_str = request.POST.get("value")
value = 0.0
try:
value = float(value_str)
except ValueError:
valid = False
valid = validateValue(value)
if valid == False:
fail_message = "Invalid data provided, please try again! The price must be a numerical value greater than 0.0."
return render(request, "ordercreate.html", {'sent':False, 'fail_message':fail_message})
# Converting and validating units
units_str = request.POST.get("units")
units = 0
try:
units = int(units_str)
except ValueError:
valid = False
valid = validateUnits(units)
delivery_office = request.POST.get("delivery_office")
priority = request.POST.get("priority")
type = request.POST.get("type")
# Retrieving Ticket Type
try:
order_type = OrderType.objects.get(pk=int(type))
except OrderType.DoesNotExist:
valid = False
# Retrieving User Profile
user = request.user
try:
user_profile = user.userprofile
except UserProfile.DoesNotExist:
valid = False
if valid is True:
order = Order.objects.create(
title = title,
description = description,
comments = "-",
value_per_unit = value,
units = units,
delivery_office = delivery_office,
status = 0,
priority = int(priority),
order_type = order_type,
user_type = user_profile
)
return render(request, "ordercreate.html", {'sent':True})
else:
fail_message = "Invalid data provided, please try again! The title of the order must be at least 5 " \
"characters long and the price must be a numerical value greater than 0.0."
return render(request, "ordercreate.html", {'sent':False, 'fail_message':fail_message})
else:
return render(request, "ordercreate.html")
# Active Orders
@login_required
def active_orders(request):
# Validation Flag
valid = True
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving Orders
orders = Order.objects.filter(user_type = user_profile).exclude(status = 3)
if len(orders) == 0:
orders = False
return render(request, "ordersactive.html", {'user':user, 'user_profile':user_profile, 'orders':orders})
# Closed Tickets
@login_required
def closed_orders(request):
# Validation Flag
valid = True
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving Orders
orders = Order.objects.filter(user_type = user_profile).filter(status = 3)
if len(orders) == 0:
orders = False
if request.method == "POST":
selected = request.POST.getlist("checks")
if len(selected) == 0:
fail_message = "You must select at least one value from the table!"
return render(request, "ordersclosed.html", {'user':user, 'user_profile':user_profile, 'orders':orders, 'sent':False, 'fail_message':fail_message})
else:
# Updating selected orders
for sel_order in selected:
order = Order.objects.get(pk=int(sel_order))
order.status = 4
order.save(update_fields=['status'])
# Re-Retrieving Tickets to refresh list
orders = Order.objects.filter(user_type = user_profile).filter(status = 3)
if len(orders) == 0:
orders = False
return render(request, "ordersclosed.html", {'user':user, 'user_profile':user_profile, 'orders':orders, 'sent':True})
else:
return render(request, "ordersclosed.html", {'user':user, 'user_profile':user_profile, 'orders':orders})
# Subalterns Page
@login_required
def subalterns(request):
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving subalterns
subalterns = get_subalterns(user_profile)
if len(subalterns) == 0:
subalterns = False
return render(request, "subalterns.html", {'subalterns':subalterns, 'user_profile':user_profile})
# Subalterns Tickets
@login_required
def subalterns_tickets(request):
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving subalterns
subalterns = get_subalterns(user_profile)
# Retrieving open tickets for subalterns
tickets = []
for subaltern in subalterns:
tickets_sub = Ticket.objects.filter(user_type = subaltern).filter(status = 0)
for ticket_sub in tickets_sub:
tickets.append(ticket_sub)
# If the view is accessed as a POST request:
if request.method == "POST":
# Loop to identify the selected ticket
for ticket in tickets:
if str(ticket.id) in request.POST:
# Add ticket to session request
request.session['selected_ticket'] = ticket.id
# Load change status page if the ticket is identified
return render(request, "subalterns_tickets_cs.html", {'ticket':ticket})
if len(tickets) == 0:
tickets = False
# Load the opened tickets page
return render(request, "subalterns_tickets.html", {'tickets':tickets, 'user_profile':user_profile})
else:
if len(tickets) == 0:
tickets = False
return render(request, "subalterns_tickets.html", {'tickets':tickets, 'user_profile':user_profile})
# Subalterns approve tickets page
@login_required
def subalterns_ticket_cs(request):
# Retrieving ticket
ticket_id = request.session.get('selected_ticket')
ticket = Ticket.objects.get(pk=ticket_id)
status = request.POST.get("status")
comments = request.POST.get("comments")
ticket.status = status
ticket.comments = comments
ticket.save()
ticket = Ticket.objects.get(pk=ticket_id)
return render(request, "subalterns_tickets_cs.html", {'ticket':ticket, 'change_succeded':True})
# Subalterns Orders
@login_required
def subalterns_orders(request):
# Retrieving User Profile
user = request.user
user_profile = user.userprofile
# Retrieving subalterns
subalterns = get_subalterns(user_profile)
# Retrieving open orders for subalterns
orders = []
for subaltern in subalterns:
orders_sub = Order.objects.filter(user_type = subaltern).filter(status = 0)
for order_sub in orders_sub:
orders.append(order_sub)
# If the view is accessed as a POST request:
if request.method == "POST":
# Loop to identify the selected ticket
for order in orders:
if str(order.id) in request.POST:
# Add order to session request
request.session['selected_order'] = order.id
# Load change status page if the order is identified
return render(request, "subalterns_orders_cs.html", {'order':order})
if len(orders) == 0:
orders = False
# Load the opened orders page
return render(request, "subalterns_orders.html", {'orders':orders, 'user_profile':user_profile})
else:
if len(orders) == 0:
orders = False
return render(request, "subalterns_orders.html", {'orders':orders, 'user_profile':user_profile})
# Subalterns approve orders page
@login_required
def subalterns_order_cs(request):
# Retrieving order
order_id = request.session.get('selected_order')
order = Order.objects.get(pk=order_id)
status = request.POST.get("status")
comments = request.POST.get("comments")
order.status = status
order.comments = comments
order.save()
order = Order.objects.get(pk=order_id)
return render(request, "subalterns_orders_cs.html", {'order':order, 'change_succeded':True})
|
abogeorge/simpleTicket
|
simpleTicket/siteEngine/views.py
|
Python
|
mit
| 15,240
|
# LexGen.py - implemented 2002 by Neil Hodgson neilh@scintilla.org
# Released to the public domain.
# Regenerate the Scintilla and SciTE source files that list
# all the lexers and all the properties files.
# Should be run whenever a new lexer is added or removed.
# Requires Python 2.4 or later
# Most files are regenerated in place with templates stored in comments.
# The VS .NET project file is generated into a different file as the
# VS .NET environment will not retain comments when modifying the file.
# The files are copied to a string apart from sections between a
# ++Autogenerated comment and a --Autogenerated comment which is
# generated by the CopyWithInsertion function. After the whole
# string is instantiated, it is compared with the target file and
# if different the file is rewritten.
# Does not regenerate the Visual C++ 6 project files but does the VS .NET
# project file.
import string
import sys
import os
import glob
# EOL constants
CR = "\r"
LF = "\n"
CRLF = "\r\n"
if sys.platform == "win32":
NATIVE = CRLF
else:
# Yes, LF is the native EOL even on Mac OS X. CR is just for
# Mac OS <=9 (a.k.a. "Mac Classic")
NATIVE = LF
# Automatically generated sections contain start and end comments,
# a definition line and the results.
# The results are replaced by regenerating based on the definition line.
# The definition line is a comment prefix followed by "**".
# If there is a digit after the ** then this indicates which list to use
# and the digit and next character are not part of the definition
# Backslash is used as an escape within the definition line.
# The part between \( and \) is repeated for each item in the list.
# \* is replaced by each list item. \t, and \n are tab and newline.
def CopyWithInsertion(input, commentPrefix, retainDefs, eolType, *lists):
copying = 1
listid = 0
output = []
for line in input.splitlines(0):
isStartGenerated = line.startswith(commentPrefix + "++Autogenerated")
if copying and not isStartGenerated:
output.append(line)
if isStartGenerated:
if retainDefs:
output.append(line)
copying = 0
definition = ""
elif not copying and line.startswith(commentPrefix + "**"):
if retainDefs:
output.append(line)
definition = line[len(commentPrefix + "**"):]
listid = 0
if definition[0] in string.digits:
listid = int(definition[:1])
definition = definition[2:]
# Hide double slashes as a control character
definition = definition.replace("\\\\", "\001")
# Do some normal C style transforms
definition = definition.replace("\\n", "\n")
definition = definition.replace("\\t", "\t")
# Get the doubled backslashes back as single backslashes
definition = definition.replace("\001", "\\")
startRepeat = definition.find("\\(")
endRepeat = definition.find("\\)")
intro = definition[:startRepeat]
out = ""
if intro.endswith("\n"):
pos = 0
else:
pos = len(intro)
out += intro
middle = definition[startRepeat+2:endRepeat]
for i in lists[listid]:
item = middle.replace("\\*", i)
if pos and (pos + len(item) >= 80):
out += "\\\n"
pos = 0
out += item
pos += len(item)
if item.endswith("\n"):
pos = 0
outro = definition[endRepeat+2:]
out += outro
out = out.replace("\n", eolType) # correct EOLs in generated content
output.append(out)
elif line.startswith(commentPrefix + "--Autogenerated"):
copying = 1
if retainDefs:
output.append(line)
output = [line.rstrip(" \t") for line in output] # trim trailing whitespace
return eolType.join(output) + eolType
def UpdateFile(filename, updated):
""" If the file is different to updated then copy updated
into the file else leave alone so CVS and make don't treat
it as modified. """
try:
infile = open(filename, "rb")
except IOError: # File is not there yet
out = open(filename, "wb")
out.write(updated)
out.close()
print "New", filename
return
original = infile.read()
infile.close()
if updated != original:
os.unlink(filename)
out = open(filename, "wb")
out.write(updated)
out.close()
print "Changed", filename
#~ else:
#~ print "Unchanged", filename
def Generate(inpath, outpath, commentPrefix, eolType, *lists):
"""Generate 'outpath' from 'inpath'.
"eolType" indicates the type of EOLs to use in the generated
file. It should be one of following constants: LF, CRLF,
CR, or NATIVE.
"""
#print "generate '%s' -> '%s' (comment prefix: %r, eols: %r)"\
# % (inpath, outpath, commentPrefix, eolType)
try:
infile = open(inpath, "r")
except IOError:
print "Can not open", inpath
return
original = infile.read()
infile.close()
updated = CopyWithInsertion(original, commentPrefix,
inpath == outpath, eolType, *lists)
UpdateFile(outpath, updated)
def Regenerate(filename, commentPrefix, eolType, *lists):
"""Regenerate the given file.
"eolType" indicates the type of EOLs to use in the generated
file. It should be one of following constants: LF, CRLF,
CR, or NATIVE.
"""
Generate(filename, filename, commentPrefix, eolType, *lists)
def FindModules(lexFile):
modules = []
f = open(lexFile)
for l in f.readlines():
if l.startswith("LexerModule"):
l = l.replace("(", " ")
modules.append(l.split()[1])
return modules
knownIrregularProperties = [
"fold",
"styling.within.preprocessor",
"tab.timmy.whinge.level",
"asp.default.language",
"html.tags.case.sensitive",
"ps.level",
"ps.tokenize",
"sql.backslash.escapes",
"nsis.uservars",
"nsis.ignorecase"
]
def FindProperties(lexFile):
properties = set()
f = open(lexFile)
for l in f.readlines():
if "GetProperty" in l:
l = l.strip()
if not l.startswith("//"): # Drop comments
propertyName = l.split("\"")[1]
if propertyName.lower() == propertyName:
# Only allow lower case property names
if propertyName in knownIrregularProperties or \
propertyName.startswith("fold.") or \
propertyName.startswith("lexer."):
properties.add(propertyName)
return properties
def ciCompare(a,b):
return cmp(a.lower(), b.lower())
def RegenerateAll():
root="../../"
# Find all the lexer source code files
lexFilePaths = glob.glob(root + "scintilla/src/Lex*.cxx")
lexFiles = [os.path.basename(f)[:-4] for f in lexFilePaths]
print lexFiles
lexerModules = []
lexerProperties = set()
for lexFile in lexFilePaths:
lexerModules.extend(FindModules(lexFile))
lexerProperties.update(FindProperties(lexFile))
lexerModules.sort(ciCompare)
lexerProperties.remove("fold.comment.python")
lexerProperties = list(lexerProperties)
lexerProperties.sort(ciCompare)
# Find all the SciTE properties files
otherProps = ["abbrev.properties", "Embedded.properties", "SciTEGlobal.properties", "SciTE.properties"]
if os.path.exists(root + "scite"):
propFilePaths = glob.glob(root + "scite/src/*.properties")
propFiles = [os.path.basename(f) for f in propFilePaths if os.path.basename(f) not in otherProps]
propFiles.sort(ciCompare)
print propFiles
Regenerate(root + "scintilla/src/KeyWords.cxx", "//", NATIVE, lexerModules)
Regenerate(root + "scintilla/win32/makefile", "#", NATIVE, lexFiles)
Regenerate(root + "scintilla/win32/scintilla.mak", "#", NATIVE, lexFiles)
Regenerate(root + "scintilla/win32/scintilla_vc6.mak", "#", NATIVE, lexFiles)
# Use Unix EOLs for gtk Makefiles so they work for Linux users when
# extracted from the Scintilla source ZIP (typically created on
# Windows).
Regenerate(root + "scintilla/gtk/makefile", "#", LF, lexFiles)
Regenerate(root + "scintilla/gtk/scintilla.mak", "#", NATIVE, lexFiles)
Regenerate(root + "scintilla/macosx/makefile", "#", LF, lexFiles)
if os.path.exists(root + "scite"):
Regenerate(root + "scite/win32/makefile", "#", NATIVE, lexFiles, propFiles)
Regenerate(root + "scite/win32/scite.mak", "#", NATIVE, lexFiles, propFiles)
Generate(root + "scite/boundscheck/vcproj.gen",
root + "scite/boundscheck/SciTE.vcproj", "#", NATIVE, lexFiles)
RegenerateAll()
|
downpoured/lnzscript
|
lnzscript/lnzeditor/app/scintilla/src/LexGen.py
|
Python
|
gpl-3.0
| 9,047
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.extensions import vnfm
from tacker.vnfm.lcm_user_data.abstract_user_data import AbstractUserData
class SampleUserData(AbstractUserData):
@staticmethod
def instantiate(base_hot_dict=None,
vnfd_dict=None,
inst_req_info=None,
grant_info=None):
error_reason = _(
"invalid user data script.")
raise vnfm.LCMUserDataFailed(reason=error_reason)
|
stackforge/tacker
|
tacker/tests/etc/samples/etsi/nfv/user_data_sample_userdata_invalid_script/UserData/lcm_user_data_invalid_script.py
|
Python
|
apache-2.0
| 993
|
#!/usr/bin/env python2
import argparse
import os
from fedora_elections import APP
parser = argparse.ArgumentParser(description="Run the Fedora election app")
parser.add_argument(
"--config", "-c", dest="config", help="Configuration file to use for packages."
)
parser.add_argument(
"--debug",
dest="debug",
action="store_true",
default=False,
help="Expand the level of data returned.",
)
parser.add_argument(
"--profile",
dest="profile",
action="store_true",
default=False,
help="Profile the application.",
)
parser.add_argument(
"--port", "-p", default=5005, help="Port for the flask application."
)
parser.add_argument(
"--cert", "-s", default=None, help="Filename of SSL cert for the flask application."
)
parser.add_argument(
"--key",
"-k",
default=None,
help="Filename of the SSL key for the flask application.",
)
parser.add_argument(
"--host",
default="127.0.0.1",
help="Hostname to listen on. When set to 0.0.0.0 the server is available \
externally. Defaults to 127.0.0.1 making the it only visable on localhost",
)
args = parser.parse_args()
if args.profile:
from werkzeug.contrib.profiler import ProfilerMiddleware
APP.config["PROFILE"] = True
APP.wsgi_app = ProfilerMiddleware(APP.wsgi_app, restrictions=[30])
if args.config:
config = args.config
if not config.startswith("/"):
here = os.path.join(os.path.dirname(os.path.abspath(__file__)))
config = os.path.join(here, config)
os.environ["FEDORA_ELECTIONS_CONFIG"] = config
APP.debug = True
if args.cert and args.key:
APP.run(host=args.host, port=int(args.port), ssl_context=(args.cert, args.key))
else:
APP.run(host=args.host, port=int(args.port))
|
fedora-infra/elections
|
runserver.py
|
Python
|
gpl-2.0
| 1,753
|
# Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
# Database setup
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/sketchy-tests.db'
# Broker configuration information, currently only supporting Redis
CELERY_BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
# Set scheme and hostname:port of your server.
# Alterntively, you can export the 'host' variable on your system to set the
# host and port.
# If you are using Nginx with SSL, change the scheme to https.
BASE_URL = 'http://%s' % os.getenv('host', '127.0.0.1:8000')
# Local Screenshot storage
LOCAL_STORAGE_FOLDER = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files')
# Maxiumin number of Celery Job retries on failure
MAX_RETRIES = 2
# Seconds to sleep before retrying the task
COOLDOWN = 5
# Path to Phanotom JS
PHANTOMJS = '/usr/local/bin/phantomjs'
# S3 Specific configurations
USE_S3 = False
S3_BUCKET_PREFIX = 'your_bucket.s3.here.test'
S3_LINK_EXPIRATION = 6000000
S3_BUCKET_REGION_NAME = 'us-east-1'
# Token Auth Setup
REQUIRE_AUTH = False
AUTH_TOKEN = os.getenv('auth_token', 'test')
# Log file configuration (currenlty only logs errors)
SKETCHY_LOG_FILE = "sketchy.log"
|
gorcz/sketchy
|
config-test.py
|
Python
|
apache-2.0
| 1,838
|
"""
WSGI config for test_server_django_logging project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_server_django_logging.settings")
application = get_wsgi_application()
|
dlparker/test_project_django_logging_dlp
|
test_server_django_logging/wsgi.py
|
Python
|
mit
| 429
|
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# This case corresponds to: /visu/StreamLines/A3 case
# Create Stream Lines for all fields of the the given MED file
import sys
from paravistest import datadir, pictureext, get_picture_dir
from presentations import CreatePrsForFile, PrsTypeEnum
import pvserver as paravis
# Directory for saving snapshots
picturedir = get_picture_dir("StreamLines/A3")
# Create presentations
myParavis = paravis.myParavis
file = datadir + "Fields_group3D.med"
print " --------------------------------- "
print "file ", file
print " --------------------------------- "
print "\nCreatePrsForFile..."
CreatePrsForFile(myParavis, file, [PrsTypeEnum.STREAMLINES], picturedir, pictureext)
|
FedoraScientific/salome-paravis
|
test/VisuPrs/StreamLines/A3.py
|
Python
|
lgpl-2.1
| 1,528
|
import numpy as np
from timeit import default_timer as timer
from skimage.io import imsave
from shapely.geometry import Polygon
from PIL.Image import fromarray
from PIL.ImageDraw import ImageDraw
from sldc import Image, Segmenter
from sldc.builder import SSLWorkflowBuilder
class NumpyImage(Image):
def __init__(self, np_image):
"""An image represented as a numpy ndarray"""
self._np_image = np_image
@property
def np_image(self):
return self._np_image
@property
def channels(self):
shape = self._np_image.shape
return shape[2] if len(shape) == 3 else 1
@property
def width(self):
return self._np_image.shape[1]
@property
def height(self):
return self._np_image.shape[0]
def draw_poly(image, polygon, color=255):
"""Draw a polygon in the given color at the given location"""
pil_image = fromarray(image)
validated_color = color
draw = ImageDraw(pil_image)
if len(image.shape) > 2 and image.shape[2] > 1:
validated_color = tuple(color)
draw.polygon(polygon.boundary.coords, fill=validated_color, outline=validated_color)
return np.asarray(pil_image)
def draw_square_by_corner(image, side, top_left, color):
top_left = (top_left[1], top_left[0])
top_right = (top_left[0] + side, top_left[1])
bottom_left = (top_left[0], top_left[1] + side)
bottom_right = (top_left[0] + side, top_left[1] + side)
p = Polygon([top_left, top_right, bottom_right, bottom_left, top_left])
return draw_poly(image, p, color)
def draw_all_poly(image, dim=10, vgap=2):
h, w = image.shape
i = 0
shifted = False
print("drawing", end="", flush=True)
pcount = 0
while i < h - dim:
j = 0
while j < w - dim:
image = draw_square_by_corner(image, dim, (i, j + (dim if shifted else 0)), color=255)
pcount += 1
j += 2 * dim
i += dim + vgap
shifted = shifted ^ True
print("\rdrawing: {:3.2f}%".format(100 * i / h), end="", flush=True)
print()
print(pcount)
return image, pcount
class MySegmenter(Segmenter):
def segment(self, mask):
return (mask > 0).astype(np.uint8)
def benchmark(img, tile_div=10):
image = NumpyImage(img)
builder = SSLWorkflowBuilder()
builder.set_distance_tolerance(1)
builder.set_overlap(0)
builder.set_tile_size(img.shape[0] // tile_div, img.shape[1] // tile_div)
builder.set_background_class(0)
builder.set_n_jobs(1)
builder.set_segmenter(MySegmenter())
workflow = builder.get()
times = list()
n_tests = 10
for _ in range(n_tests):
start = timer()
results = workflow.process(image)
times.append(timer() - start)
print("processed in {}s".format(sum(times) / n_tests))
def many_small():
h, w = 2000, 2000
np_image = np.zeros([h, w], dtype=np.uint8)
np_image, _ = draw_all_poly(np_image)
print("-------------------------------------")
print("img 500x500, tiles 100x100")
benchmark(np_image[:500, :500], tile_div=5)
print("--")
print("img 1000x1000, tiles 100x100")
benchmark(np_image[:1000, :1000], tile_div=10)
print("--")
print("img 2000x2000, tiles 100x100")
benchmark(np_image, tile_div=20)
print("--")
print("img 2000x2000, tiles 200x200")
benchmark(np_image, tile_div=10)
print("--")
def few_large():
h, w = 2000, 2000
np_image = np.zeros([h, w], dtype=np.uint8)
np_image, _ = draw_all_poly(np_image, dim=h // 20)
print("-------------------------------------")
print("img 500x500, tiles 100x100")
benchmark(np_image[:500, :500], tile_div=5)
print("--")
print("img 1000x1000, tiles 100x100")
benchmark(np_image[:1000, :1000], tile_div=10)
print("--")
print("img 2000x2000, tiles 100x100")
benchmark(np_image, tile_div=20)
print("--")
print("img 2000x2000, tiles 200x200")
benchmark(np_image, tile_div=10)
print("--")
if __name__ == "__main__":
few_large()
many_small()
"""
| SLDC | Poly | 1.3 | 1.4 |
---|------|------|---------|---------|
M | (a) | ~1k | 1.324s | 0.254s |
A | (b) | ~4k | 5.576s | 1.062s |
N | (c) | ~17k | 25.430s | 4.494s |
Y | (d) | ~17k | 59.562s | 3.110s |
---|------|------|---------|---------|
F | (a) | ~12 | 0.051s | 0.055s |
E | (b) | ~50 | 0.182s | 0.223s |
W | (c) | ~200 | 0.791s | 1.001s |
| (d) | ~200 | 0.371s | 0.402s |
(a) 500 x 500 pxls, 100 x 100 tile size
(b) 1000 x 1000 pxls, 100 x 100 tile size
(d) 2000 x 2000 pxls, 100 x 100 tile size
(c) 2000 x 2000 pxls, 200 x 200 tile size
"""
|
waliens/sldc
|
benchmark/tile_size_poly_count.py
|
Python
|
mit
| 4,493
|
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Author: Christian Schwede <christian.schwede@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from swift.common.swob import wsgify
from swift.common.utils import split_path
from swift.proxy.controllers.base import get_container_info, get_object_info
class MetadataHeadersMiddleware(object):
def __init__(self, app, conf, *args, **kwargs):
self.app = app
header_container_metadata = conf.get('header_container_metadata', '')
self.header_container_metadata = [
name.strip()
for name in header_container_metadata.split(',')
if name.strip()]
header_object_metadata = conf.get('header_object_metadata', '')
self.header_object_metadata = [
name.strip()
for name in header_object_metadata.split(',')
if name.strip()]
@wsgify
def __call__(self, request):
try:
(version, account, container, objname) = split_path(
request.path_info, 1, 4, True)
except ValueError:
return self.app
if container and self.header_container_metadata:
container_info = get_container_info(request.environ, self.app)
for key in self.header_container_metadata:
value = container_info.get('meta', {}).get(key)
if value:
keyname = 'X-CONTAINER-METADATA-%s' % key.upper()
request.headers[keyname] = value
if objname and self.header_object_metadata:
object_info = get_object_info(request.environ, self.app)
for key in self.header_object_metadata:
value = object_info.get('meta', {}).get(key)
if value:
keyname = 'X-OBJECT-METADATA-%s' % key
request.headers[keyname] = value
return self.app
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def metadata_header_filter(app):
return MetadataHeadersMiddleware(app, conf)
return metadata_header_filter
|
cschwede/swift-metadataheaders
|
metadataheaders/middleware.py
|
Python
|
apache-2.0
| 2,723
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2004 Robert Kaye
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from heapq import (
heappop,
heappush,
)
import ntpath
from operator import itemgetter
import re
import sys
from PyQt5 import QtCore
from picard import config
from picard.const import QUERY_LIMIT
from picard.metadata import Metadata
from picard.similarity import similarity
from picard.util import (
album_artist_from_path,
format_time,
)
from picard.util.imagelist import (
add_metadata_images,
remove_metadata_images,
update_metadata_images,
)
from picard.ui.item import Item
class Cluster(QtCore.QObject, Item):
# Weights for different elements when comparing a cluster to a release
comparison_weights = {
'album': 17,
'albumartist': 6,
'totaltracks': 5,
'releasecountry': 2,
'format': 2,
}
def __init__(self, name, artist="", special=False, related_album=None, hide_if_empty=False):
QtCore.QObject.__init__(self)
self.item = None
self.metadata = Metadata()
self.metadata['album'] = name
self.metadata['albumartist'] = artist
self.metadata['totaltracks'] = 0
self.special = special
self.hide_if_empty = hide_if_empty
self.related_album = related_album
self.files = []
self.lookup_task = None
def __repr__(self):
if self.related_album:
return '<Cluster %s %r>' % (
self.related_album.id,
self.related_album.metadata[u"album"] + '/' + self.metadata['album']
)
return '<Cluster %r>' % self.metadata['album']
def __len__(self):
return len(self.files)
def _update_related_album(self, added_files=None, removed_files=None):
if self.related_album:
if added_files:
add_metadata_images(self.related_album, added_files)
if removed_files:
remove_metadata_images(self.related_album, removed_files)
self.related_album.update()
def add_files(self, files):
for file in files:
self.metadata.length += file.metadata.length
file._move(self)
file.update(signal=False)
if self.can_show_coverart:
file.metadata_images_changed.connect(self.update_metadata_images)
self.files.extend(files)
self.metadata['totaltracks'] = len(self.files)
self.item.add_files(files)
if self.can_show_coverart:
add_metadata_images(self, files)
self._update_related_album(added_files=files)
def add_file(self, file):
self.metadata.length += file.metadata.length
self.files.append(file)
self.metadata['totaltracks'] = len(self.files)
file._move(self)
file.update(signal=False)
if self.can_show_coverart:
file.metadata_images_changed.connect(self.update_metadata_images)
add_metadata_images(self, [file])
self.item.add_file(file)
self._update_related_album(added_files=[file])
def remove_file(self, file):
self.metadata.length -= file.metadata.length
self.files.remove(file)
self.metadata['totaltracks'] = len(self.files)
self.item.remove_file(file)
if not self.special and self.get_num_files() == 0:
self.tagger.remove_cluster(self)
if self.can_show_coverart:
file.metadata_images_changed.disconnect(self.update_metadata_images)
remove_metadata_images(self, [file])
self._update_related_album(removed_files=[file])
def update(self):
if self.item:
self.item.update()
def get_num_files(self):
return len(self.files)
def iterfiles(self, save=False):
for file in self.files:
yield file
def can_save(self):
"""Return if this object can be saved."""
if self.files:
return True
else:
return False
def can_remove(self):
"""Return if this object can be removed."""
return not self.special
def can_edit_tags(self):
"""Return if this object supports tag editing."""
return True
def can_analyze(self):
"""Return if this object can be fingerprinted."""
return any([_file.can_analyze() for _file in self.files])
def can_autotag(self):
return True
def can_refresh(self):
return False
def can_browser_lookup(self):
return not self.special
def can_view_info(self):
if self.files:
return True
else:
return False
def is_album_like(self):
return True
def column(self, column):
if column == 'title':
return '%s (%d)' % (self.metadata['album'], len(self.files))
elif (column == '~length' and self.special) or column == 'album':
return ''
elif column == '~length':
return format_time(self.metadata.length)
elif column == 'artist':
return self.metadata['albumartist']
return self.metadata[column]
def _lookup_finished(self, document, http, error):
self.lookup_task = None
try:
releases = document['releases']
except (KeyError, TypeError):
releases = None
mparms = {
'album': self.metadata['album']
}
# no matches
if not releases:
self.tagger.window.set_statusbar_message(
N_("No matching releases for cluster %(album)s"),
mparms,
timeout=3000
)
return
# multiple matches -- calculate similarities to each of them
match = sorted((self.metadata.compare_to_release(
release, Cluster.comparison_weights) for release in releases),
reverse=True, key=itemgetter(0))[0]
if match[0] < config.setting['cluster_lookup_threshold']:
self.tagger.window.set_statusbar_message(
N_("No matching releases for cluster %(album)s"),
mparms,
timeout=3000
)
return
self.tagger.window.set_statusbar_message(
N_("Cluster %(album)s identified!"),
mparms,
timeout=3000
)
self.tagger.move_files_to_album(self.files, match[1]['id'])
def lookup_metadata(self):
"""Try to identify the cluster using the existing metadata."""
if self.lookup_task:
return
self.tagger.window.set_statusbar_message(
N_("Looking up the metadata for cluster %(album)s..."),
{'album': self.metadata['album']}
)
self.lookup_task = self.tagger.mb_api.find_releases(self._lookup_finished,
artist=self.metadata['albumartist'],
release=self.metadata['album'],
tracks=str(len(self.files)),
limit=QUERY_LIMIT)
def clear_lookup_task(self):
if self.lookup_task:
self.tagger.webservice.remove_task(self.lookup_task)
self.lookup_task = None
@staticmethod
def cluster(files, threshold):
artistDict = ClusterDict()
albumDict = ClusterDict()
tracks = []
for file in files:
artist = file.metadata["albumartist"] or file.metadata["artist"]
album = file.metadata["album"]
# Improve clustering from directory structure if no existing tags
# Only used for grouping and to provide cluster title / artist - not added to file tags.
filename = file.filename
if config.setting["windows_compatibility"] or sys.platform == "win32":
filename = ntpath.splitdrive(filename)[1]
album, artist = album_artist_from_path(filename, album, artist)
# For each track, record the index of the artist and album within the clusters
tracks.append((artistDict.add(artist),
albumDict.add(album)))
artist_cluster_engine = ClusterEngine(artistDict)
artist_cluster_engine.cluster(threshold)
album_cluster_engine = ClusterEngine(albumDict)
album_cluster_engine.cluster(threshold)
# Arrange tracks into albums
albums = {}
for i, track in enumerate(tracks):
cluster = album_cluster_engine.getClusterFromId(track[1])
if cluster is not None:
albums.setdefault(cluster, []).append(i)
# Now determine the most prominent names in the cluster and build the
# final cluster list
for album_id, album in albums.items():
album_name = album_cluster_engine.getClusterTitle(album_id)
artist_max = 0
artist_id = None
artist_hist = {}
for track_id in album:
cluster = artist_cluster_engine.getClusterFromId(
tracks[track_id][0])
if cluster is not None:
cnt = artist_hist.get(cluster, 0) + 1
if cnt > artist_max:
artist_max = cnt
artist_id = cluster
artist_hist[cluster] = cnt
if artist_id is None:
artist_name = "Various Artists"
else:
artist_name = artist_cluster_engine.getClusterTitle(artist_id)
yield album_name, artist_name, (files[i] for i in album)
def update_metadata_images(self):
if self.can_show_coverart:
update_metadata_images(self)
class UnclusteredFiles(Cluster):
"""Special cluster for 'Unmatched Files' which have no PUID and have not been clustered."""
def __init__(self):
super().__init__(_("Unclustered Files"), special=True)
def add_files(self, files):
Cluster.add_files(self, files)
self.tagger.window.enable_cluster(self.get_num_files() > 0)
def add_file(self, file):
Cluster.add_file(self, file)
self.tagger.window.enable_cluster(self.get_num_files() > 0)
def remove_file(self, file):
Cluster.remove_file(self, file)
self.tagger.window.enable_cluster(self.get_num_files() > 0)
def lookup_metadata(self):
self.tagger.autotag(self.files)
def can_edit_tags(self):
return False
def can_autotag(self):
return len(self.files) > 0
def can_view_info(self):
return False
def can_remove(self):
return len(self.files) > 0
@property
def can_show_coverart(self):
return False
class ClusterList(list, Item):
"""A list of clusters."""
def __init__(self):
super().__init__()
def __hash__(self):
return id(self)
def iterfiles(self, save=False):
for cluster in self:
for file in cluster.iterfiles(save):
yield file
def can_save(self):
return len(self) > 0
def can_analyze(self):
return any([cluster.can_analyze() for cluster in self])
def can_autotag(self):
return len(self) > 0
def can_browser_lookup(self):
return False
def lookup_metadata(self):
for cluster in self:
cluster.lookup_metadata()
class ClusterDict(object):
def __init__(self):
# word -> id index
self.words = {}
# id -> word, token index
self.ids = {}
# counter for new id generation
self.id = 0
self.regexp = re.compile(r'\W', re.UNICODE)
self.spaces = re.compile(r'\s', re.UNICODE)
def getSize(self):
return self.id
def tokenize(self, word):
token = self.regexp.sub('', word.lower())
return token if token else self.spaces.sub('', word.lower())
def add(self, word):
"""
Add a new entry to the cluster if it does not exist. If it
does exist, increment the count. Return the index of the word
in the dictionary or -1 is the word is empty.
"""
if word == '':
return -1
token = self.tokenize(word)
if token == '':
return -1
try:
index, count = self.words[word]
self.words[word] = (index, count + 1)
except KeyError:
index = self.id
self.words[word] = (self.id, 1)
self.ids[index] = (word, token)
self.id = self.id + 1
return index
def getWord(self, index):
word = None
try:
word, token = self.ids[index]
except KeyError:
pass
return word
def getToken(self, index):
token = None
try:
word, token = self.ids[index]
except KeyError:
pass
return token
def getWordAndCount(self, index):
word = None
count = 0
try:
word, token = self.ids[index]
index, count = self.words[word]
except KeyError:
pass
return word, count
class ClusterEngine(object):
def __init__(self, clusterDict):
# the cluster dictionary we're using
self.clusterDict = clusterDict
# keeps track of unique cluster index
self.clusterCount = 0
# Keeps track of the clusters we've created
self.clusterBins = {}
# Index the word ids -> clusters
self.idClusterIndex = {}
def getClusterFromId(self, clusterid):
return self.idClusterIndex.get(clusterid)
def printCluster(self, cluster):
if cluster < 0:
print("[no such cluster]")
return
cluster_bin = self.clusterBins[cluster]
print(cluster, " -> ", ", ".join([("'" + self.clusterDict.getWord(i) + "'") for i in cluster_bin]))
def getClusterTitle(self, cluster):
if cluster < 0:
return ""
cluster_max = 0
maxWord = ''
for cluster_bin in self.clusterBins[cluster]:
word, count = self.clusterDict.getWordAndCount(cluster_bin)
if count >= cluster_max:
maxWord = word
cluster_max = count
return maxWord
def cluster(self, threshold):
# Keep the matches sorted in a heap
heap = []
for y in range(self.clusterDict.getSize()):
for x in range(y):
if x != y:
c = similarity(self.clusterDict.getToken(x).lower(),
self.clusterDict.getToken(y).lower())
if c >= threshold:
heappush(heap, ((1.0 - c), [x, y]))
QtCore.QCoreApplication.processEvents()
for i in range(self.clusterDict.getSize()):
word, count = self.clusterDict.getWordAndCount(i)
if word and count > 1:
self.clusterBins[self.clusterCount] = [i]
self.idClusterIndex[i] = self.clusterCount
self.clusterCount = self.clusterCount + 1
for i in range(len(heap)):
c, pair = heappop(heap)
c = 1.0 - c
try:
match0 = self.idClusterIndex[pair[0]]
except BaseException:
match0 = -1
try:
match1 = self.idClusterIndex[pair[1]]
except BaseException:
match1 = -1
# if neither item is in a cluster, make a new cluster
if match0 == -1 and match1 == -1:
self.clusterBins[self.clusterCount] = [pair[0], pair[1]]
self.idClusterIndex[pair[0]] = self.clusterCount
self.idClusterIndex[pair[1]] = self.clusterCount
self.clusterCount = self.clusterCount + 1
continue
# If cluster0 is in a bin, stick the other match into that bin
if match0 >= 0 and match1 < 0:
self.clusterBins[match0].append(pair[1])
self.idClusterIndex[pair[1]] = match0
continue
# If cluster1 is in a bin, stick the other match into that bin
if match1 >= 0 and match0 < 0:
self.clusterBins[match1].append(pair[0])
self.idClusterIndex[pair[0]] = match1
continue
# If both matches are already in two different clusters, merge the clusters
if match1 != match0:
self.clusterBins[match0].extend(self.clusterBins[match1])
for match in self.clusterBins[match1]:
self.idClusterIndex[match] = match0
del self.clusterBins[match1]
def can_refresh(self):
return False
|
mineo/picard
|
picard/cluster.py
|
Python
|
gpl-2.0
| 17,531
|
from django.shortcuts import render
def home_page(request):
return render(request, 'launchers/launcher_home_page.html')
|
yurychu/business_manager
|
business_manager/launchers/views.py
|
Python
|
mit
| 126
|
# Standard Library Imports
import collections
from datetime import datetime, timedelta
import logging
import time
import json
import traceback
# 3rd Party Imports
import requests
from requests.packages.urllib3.util.retry import Retry
from gevent.lock import Semaphore
# Local Imports
from PokeAlarm import Unknown
from PokeAlarm.Utilities.GenUtils import synchronize_with
log = logging.getLogger('Gmaps')
class GMaps(object):
# Available travel modes for Distance Matrix calls
TRAVEL_MODES = frozenset(['walking', 'biking', 'driving', 'transit'])
# Maximum number of requests per second
_queries_per_second = 50
# How often to warn about going over query limit
_warning_window = timedelta(minutes=1)
def __init__(self, api_key):
self._key = api_key
self._lock = Semaphore
# Create a session to handle connections
self._session = self._create_session()
# Sliding window for rate limiting
self._window = collections.deque(maxlen=self._queries_per_second)
self._time_limit = datetime.utcnow()
# Memoization dicts
self._geocode_hist = {}
self._reverse_geocode_hist = {}
self._dm_hist = {key: dict() for key in self.TRAVEL_MODES}
# TODO: Move into utilities
@staticmethod
def _create_session(retry_count=3, pool_size=3, backoff=.25):
""" Create a session to use connection pooling. """
# Create a session for connection pooling and
session = requests.Session()
# Reattempt connection on these statuses
status_forcelist = [500, 502, 503, 504]
# Define a Retry object to handle failures
retry_policy = Retry(
total=retry_count,
backoff_factor=backoff,
status_forcelist=status_forcelist
)
# Define an Adapter, to limit pool and implement retry policy
adapter = requests.adapters.HTTPAdapter(
max_retries=retry_policy,
pool_connections=pool_size,
pool_maxsize=pool_size
)
# Apply Adapter for all HTTPS (no HTTP for you!)
session.mount('https://', adapter)
return session
def _make_request(self, service, params=None):
""" Make a request to the GMAPs API. """
# Rate Limit - All APIs use the same quota
if len(self._window) == self._queries_per_second:
# Calculate elapsed time since start of window
elapsed_time = time.time() - self._window[0]
if elapsed_time < 1:
# Sleep off the difference
time.sleep(1 - elapsed_time)
# Create the correct url
url = u'https://maps.googleapis.com/maps/api/{}/json'.format(service)
# Add in the API key
if params is None:
params = {}
params['key'] = self._key
# Use the session to send the request
log.debug(u'{} request sending.'.format(service))
self._window.append(time.time())
request = self._session.get(url, params=params, timeout=3)
if not request.ok:
log.debug(u'Response body: {}'.format(
json.dumps(request.json(), indent=4, sort_keys=True)))
# Raise HTTPError
request.raise_for_status()
log.debug(u'{} request completed successfully with response {}.'
u''.format(service, request.status_code))
body = request.json()
if body['status'] == "OK" or body['status'] == "ZERO_RESULTS":
return body
elif body['status'] == "OVER_QUERY_LIMIT":
# self._time_limit = datetime.utcnow() + _warning_window
raise UserWarning(u'API Quota exceeded.')
else:
raise ValueError(u'Unexpected response status:\n {}'.format(body))
@synchronize_with()
def geocode(self, address, language='en'):
# type: (str, str) -> tuple
""" Returns 'lat,lng' associated with the name of the place. """
# Check for memoized results
address = address.lower()
if address in self._geocode_hist:
return self._geocode_hist[address]
# Set default in case something happens
latlng = None
try:
# Set parameters and make the request
params = {'address': address, 'language': language}
response = self._make_request('geocode', params)
# Extract the results and format into a dict
response = response.get('results', [])
response = response[0] if len(response) > 0 else {}
response = response.get('geometry', {})
response = response.get('location', {})
if 'lat' in response and 'lng' in response:
latlng = float(response['lat']), float(response['lng'])
# Memoize the results
self._geocode_hist[address] = latlng
except requests.exceptions.HTTPError as e:
log.error(u"Geocode failed with "
u"HTTPError: {}".format(e.message))
except requests.exceptions.Timeout as e:
log.error(u"Geocode failed with "
u"connection issues: {}".format(e.message))
except UserWarning:
log.error(u"Geocode failed because of exceeded quota.")
except Exception as e:
log.error(u"Geocode failed because "
u"unexpected error has occurred: "
u"{} - {}".format(type(e).__name__, e.message))
log.error(u"Stack trace: \n {}".format(traceback.format_exc()))
# Send back tuple
return latlng
_reverse_geocode_defaults = {
'street_num': Unknown.SMALL,
'street': Unknown.REGULAR,
'address': Unknown.REGULAR,
'address_eu': Unknown.REGULAR,
'postal': Unknown.REGULAR,
'neighborhood': Unknown.REGULAR,
'sublocality': Unknown.REGULAR,
'city': Unknown.REGULAR,
'county': Unknown.REGULAR,
'state': Unknown.REGULAR,
'country': Unknown.REGULAR
}
@synchronize_with()
def reverse_geocode(self, latlng, language='en'):
# type: (tuple) -> dict
""" Returns the reverse geocode DTS associated with 'lat,lng'. """
latlng = u'{:.5f},{:.5f}'.format(latlng[0], latlng[1])
# Check for memoized results
if latlng in self._reverse_geocode_hist:
return self._reverse_geocode_hist[latlng]
# Get defaults in case something happens
dts = self._reverse_geocode_defaults.copy()
try:
# Set parameters and make the request
params = {'latlng': latlng, 'language': language}
response = self._make_request('geocode', params)
# Extract the results and format into a dict
response = response.get('results', [])
response = response[0] if len(response) > 0 else {}
details = {}
for item in response.get('address_components'):
for category in item['types']:
details[category] = item['short_name']
# Note: for addresses on unnamed roads, EMPTY is preferred for
# 'street_num' and 'street' to avoid DTS looking weird
dts['street_num'] = details.get('street_number', Unknown.EMPTY)
dts['street'] = details.get('route', Unknown.EMPTY)
dts['address'] = u"{} {}".format(dts['street_num'], dts['street'])
dts['address_eu'] = u"{} {}".format(
dts['street'], dts['street_num']) # Europeans are backwards
dts['postal'] = details.get('postal_code', Unknown.REGULAR)
dts['neighborhood'] = details.get('neighborhood', Unknown.REGULAR)
dts['sublocality'] = details.get('sublocality', Unknown.REGULAR)
dts['city'] = details.get(
'locality', details.get('postal_town', Unknown.REGULAR))
dts['county'] = details.get(
'administrative_area_level_2', Unknown.REGULAR)
dts['state'] = details.get(
'administrative_area_level_1', Unknown.REGULAR)
dts['country'] = details.get('country', Unknown.REGULAR)
# Memoize the results
self._reverse_geocode_hist[latlng] = dts
except requests.exceptions.HTTPError as e:
log.error(u"Reverse Geocode failed with "
u"HTTPError: {}".format(e.message))
except requests.exceptions.Timeout as e:
log.error(u"Reverse Geocode failed with "
u"connection issues: {}".format(e.message))
except UserWarning:
log.error(u"Reverse Geocode failed because of exceeded quota.")
except Exception as e:
log.error(u"Reverse Geocode failed because "
u"unexpected error has occurred: "
u"{} - {}".format(type(e).__name__, e.message))
log.error(u"Stack trace: \n {}".format(traceback.format_exc()))
# Send back dts
return dts
@synchronize_with()
def distance_matrix(self, mode, origin, dest, lang, units):
# Check for valid mode
if mode not in self.TRAVEL_MODES:
raise ValueError(u"DM doesn't support mode '{}'.".format(mode))
# Estimate to about ~1 meter of accuracy
origin = u'{:.5f},{:.5f}'.format(origin[0], origin[1])
dest = u'{:.5f},{:.5f}'.format(dest[0], dest[1])
# Check for memoized results
key = origin + u':' + dest
if key in self._dm_hist:
return self._dm_hist[key]
# Set defaults in case something happens
dist_key = '{}_distance'.format(mode)
dur_key = '{}_duration'.format(mode)
dts = {dist_key: Unknown.REGULAR, dur_key: Unknown.REGULAR}
try:
# Set parameters and make the request
params = {
'mode': mode, 'origins': origin, 'destinations': dest,
'language': lang, 'units': units
}
# Extract the results and format into a dict
response = self._make_request('distancematrix', params)
response = response.get('rows', [])
response = response[0] if len(response) > 0 else {}
response = response.get('elements', [])
response = response[0] if len(response) > 0 else {}
# Set the DTS
dts[dist_key] = response.get(
'distance', {}).get('text', Unknown.REGULAR)
dts[dur_key] = response.get(
'duration', {}).get('text', Unknown.REGULAR)
except requests.exceptions.HTTPError as e:
log.error(u"Distance Matrix failed with "
u"HTTPError: {}".format(e.message))
except requests.exceptions.Timeout as e:
log.error(u"Distance Matrix failed with "
u"connection issues: {}".format(e.message))
except UserWarning:
log.error(u"Distance Matrix failed because of exceeded quota.")
except Exception as e:
log.error(u"Distance Matrix failed because "
u"unexpected error has occurred: "
u"{} - {}".format(type(e).__name__, e.message))
log.error(u"Stack trace: \n {}".format(traceback.format_exc()))
# Send back DTS
return dts
|
blsmit5728/PokeAlarm
|
PokeAlarm/LocationServices/GMaps.py
|
Python
|
agpl-3.0
| 11,432
|
import numpy as np
import json
with open('db/cpt.json', 'rb') as outfile:
procHier = json.load(outfile)
outfile.close()
with open('db/icd.json', 'rb') as outfile:
icdHier = json.load(outfile)
outfile.close()
icdMap=dict([(icdHier[x]['level2'],{'desc':icdHier[x]['desc'],'code':x}) for x in icdHier.keys()])
procMap=dict([(procHier[x]['level2'],{'desc':procHier[x]['desc'],'code':x}) for x in procHier.keys()])
pcs=np.loadtxt('results/cmsQOrder2.txt')
p,k=pcs.shape
# Get the
l=5
print
print
for c in range(k):
print
print "[Component", c+1, "]"
comp=pcs[:,c]
#comp=pcs[:,c]
#ind=abs(comp).argsort()[-l:]
if c>0:
print "Positive Pole"
ind=comp.argsort()[-l:]
ind=ind.tolist()
ind.reverse()
for id,magnitude in [(x,comp[x]) for x in ind]:
if id < 132:
# ICD
print " ICD9", icdMap[id]['desc'].ljust(70), magnitude
else:
# Procedure
id-=132
print " Proc", procMap[id]['desc'].ljust(70), magnitude
if c>0:
print "Negative Pole"
ind=comp.argsort()[:l]
ind=ind.tolist()
for id,magnitude in [(x,comp[x]) for x in ind]:
if id < 132:
# ICD
print " ICD9", icdMap[id]['desc'].ljust(70), magnitude
else:
# Procedure
id-=132
print " Proc", procMap[id]['desc'].ljust(70), magnitude
pcs=np.loadtxt('results/cmsCompOrder3.txt')
pcs=np.loadtxt('results/cmsQOrder2.txt')
p,k=pcs.shape
l=5
print
print
for c in range(k):
print
print "[Component", c+1, "]"
comp=pcs[:,c]
#comp=pcs[:,c]
#ind=abs(comp).argsort()[-l:]
if c>0:
print "Positive Pole"
ind=comp.argsort()[-l:]
ind=ind.tolist()
ind.reverse()
for id,magnitude in [(x,comp[x]) for x in ind]:
if id < 132:
# ICD
print " ICD9", icdMap[id]['desc'].ljust(70), magnitude
else:
# Procedure
id-=132
print " Proc", procMap[id]['desc'].ljust(70), magnitude
if c>0:
print "Negative Pole"
ind=comp.argsort()[:l]
ind=ind.tolist()
for id,magnitude in [(x,comp[x]) for x in ind]:
if id < 132:
# ICD
print " ICD9", icdMap[id]['desc'].ljust(70), magnitude
else:
# Procedure
id-=132
print " Proc", procMap[id]['desc'].ljust(70), magnitude
|
mitliagkas/pyliakmon
|
getTopics.py
|
Python
|
mit
| 2,508
|
""" Theano wrapper for your machine learning needs.
Available demos:
Classification:
1: Epoch-based Logistic Regression on the iris dataset.
2: Logistic Regression with Stohastic Gradient Descent
Regression:
1: Epoch-based Linear Regression on the boston housing dataset.
2: Linear Regression with Stohastic Gradient Descent
"""
# Names like X,y, X_train, y_train etc. are common in machine learning
# tasks. For better readability and comprehension, disable pylint on
# invalid names.
# pylint: disable=invalid-name
import time
import sys
import numpy as np
from sklearn.datasets import (fetch_mldata, load_boston, load_iris,
load_linnerud)
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import classification_report, mean_squared_error
import theano
from theano_wrapper.layers import (LogisticRegression, LinearRegression,
MultiLayerPerceptron, MultiLayerRegression)
from theano_wrapper.trainers import EpochTrainer, SGDTrainer
RANDOM_STATE = 42
EQ_BAR = ''.join(['=' for i in range(50)])
HASH_BAR = ''.join(['#' for i in range(50)])
# HELPER UTILITIES ###########################################################
def load_mnist_data():
""" Load the mnist handwritten digits using scikit-learn """
mnist = fetch_mldata('MNIST Original')
return train_test_split(
MinMaxScaler().fit_transform(mnist.data.astype(theano.config.floatX)),
mnist.target.astype(np.int32), test_size=0.25,
random_state=RANDOM_STATE)
def load_boston_data():
""" Load the boston house prices dataset using scikit-learn """
boston = load_boston()
return train_test_split(
MinMaxScaler().fit_transform(boston.data.astype(theano.config.floatX)),
boston.target.astype(theano.config.floatX), test_size=0.25,
random_state=RANDOM_STATE)
def load_iris_data():
""" Load the iris dataset using scikit-learn """
iris = load_iris()
return train_test_split(
MinMaxScaler().fit_transform(iris.data.astype(theano.config.floatX)),
iris.target.astype(np.int32), test_size=0.25,
random_state=RANDOM_STATE), iris.target_names
def load_linnerud_data():
""" Load the linnerud dataset using scikit-learn """
linnerud = load_linnerud()
return train_test_split(
MinMaxScaler().fit_transform(
linnerud.data.astype(theano.config.floatX)),
linnerud.target.astype(theano.config.floatX), test_size=0.25,
random_state=RANDOM_STATE)
# INTERACTIVE MODE ###########################################################
def demo(choice=None, __test=False):
""" Interactive Demo.
Usage:
demo(), and follow on-screen instructions
OR
demo(arg), where arg is the desired demo in the form "<c,r><0-9>"
and the first letter representing the task
(r for regression, c for classification) and the last
integer the n-th example.
ex. demo('c2')
"""
if not choice:
print("Hello, please make a demo choice:")
while True:
print("\t\t\t[r] for Regression, [c] for Classificaton, "
"[q] for quit")
choice = input().lower()
if choice == 'q':
break
elif choice in ('c', 'r'):
run_demo(choice, __test=__test)
elif choice == '':
print("\rPlease make a choice.")
else:
print("\rInvalid choice.")
else:
if isinstance(choice, str):
if len(choice) == 2 and choice[0] in ('c', 'r'):
try:
int(choice[1])
run_demo(choice[0], choice[1], __test)
except ValueError:
pass
def run_demo(task, example=None, __test=False):
""" Run a given demo interactive session or a specific example.
Arguements:
task: (str) 'c' or 'r' for Classification or Regression
example: (str) number of the example to run
"""
if task == 'r':
regression_demos(example, __test)
elif task == 'c':
classification_demos(example, __test)
else:
return 1
return 0
# HELPER FUNCTIONS ###########################################################
def classification_demos(example=None, __test=False):
""" Run a classification demos interactive session or a specific example.
Arguements:
example: (str) number of example to run. if None, run the interactive
session
"""
def run_example(ex, __test):
""" Run an examples. Any addition examples should be added here """
examples = {
'1': epoch_logreg,
'2': sgd_logreg,
'3': mnist_epoch_logreg,
'4': mnist_sgd_logreg,
'5': mnist_mlp}
examples[ex](__test)
if example:
run_example(example, __test)
else:
while True:
print("\t\t\tEnter a choice, [p] for a list of available "
"demos, or [b] "
"to go back.")
choice = input().lower()
if choice == 'b':
return
elif choice == 'p':
print_available('clf')
elif choice in ['1', '2', '3', '4', '5']:
run_example(choice, False)
else:
print("Invalid choice.")
def regression_demos(example=None, __test=False):
""" Run a classification demos interactive session or a specific example.
Arguements:
example: (str) number of example to run. if None, run the interactive
session
"""
def run_example(ex, __test):
""" Run an example. Any additional examples should be added here. """
examples = {
'1': epoch_linear,
'2': sgd_linear,
'3': linnerud_linear_sgd,
'4': linnerud_mlr}
examples[ex](__test)
if example:
run_example(example, __test)
else:
while True:
print("\t\t\tEnter a choice, [p] for a list of available demos, "
"or [b] to go back.")
choice = input().lower()
if choice == 'b':
return
elif choice == 'p':
print_available('reg')
elif choice in ['1', '2', '3', '4']:
run_example(choice, False)
else:
print("Invalid choice.")
def print_available(wat):
""" Print available classification or regression demos """
if wat == 'clf':
# Print available classification demos
print("\nAvailable Classification demos:")
print(HASH_BAR)
print("==> 1:")
print("\tEpoch-based Logistic Regression on the Iris Dataset.")
print("==> 2:")
print("\tLogistic Regression with Stohastic Gradient Descent "
"on the Iris dataset.")
print("==> 3:")
print("\tEpoch-based Logistic Regression on the MNIST Dataset.")
print("\t\t*It has many more samples than the Iris dataset, so it ")
print("\t\t is a good example of why we need Stohastic "
"Gradient Descent")
print("==> 4:")
print("\tLogistic Regression with Stohastic Gradient Descent "
"on the MNIST dataset.")
print("==> 5:")
print("\tSingle layer multilayer perceptron on the MNIST dataset.")
print("")
elif wat == 'reg':
# Print available regression demos
print("\nAvailable Regression demos:")
print(HASH_BAR)
print("==> 1:")
print("\tEpoch-based Linear Regression on the Boston")
print("\thousing dataset.")
print("==> 2:")
print("\tLinear Regression with Stohastic Gradient Descent on the "
"Boston housing dataset.")
print("==> 3:")
print("\tLinear Regression with Stohastic Gradient Descent on the "
"linnerud multivariate dataset.")
print("==> 4:")
print("\tSingle layer multilayer regresson on the "
"linnerud multivariate dataset.")
print("")
else:
print_available('clf')
print_available('reg')
def epoch_logreg(__test):
""" Epoch-based Logistic Regression on the iris dataset """
print(EQ_BAR)
print("Classification demo using Logistic Regression and an "
"epoch-based trainer on the Iris dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 200000
data, target_names = load_iris_data()
X_train, X_test, y_train, y_test = data
n_in = X_test.shape[1]
n_out = len(np.unique(y_train))
clf = LogisticRegression(n_in, n_out)
trainer = EpochTrainer(clf, alpha=0.004, patience=12000, max_iter=max_iter,
imp_thresh=0.986, random=RANDOM_STATE, verbose=10)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\n"+classification_report(y_test, y_pred,
target_names=target_names))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def sgd_logreg(__test):
""" Stohastic Gradient Descent Logistic Regression on the iris dataset """
print(EQ_BAR)
print("Classification demo using Logistic Regression and Stohastic "
"Gradient Descent on the Iris dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 200000
data, target_names = load_iris_data()
X_train, X_test, y_train, y_test = data
n_in = X_test.shape[1]
n_out = len(np.unique(y_train))
clf = LogisticRegression(n_in, n_out)
trainer = SGDTrainer(clf, batch_size=2, alpha=0.03, patience=10000,
max_iter=max_iter, imp_thresh=0.999,
random=RANDOM_STATE, verbose=3)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\n"+classification_report(y_test, y_pred,
target_names=target_names))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def mnist_epoch_logreg(__test):
""" Epoch-based Logistic Regression on the MNIST dataset """
print(EQ_BAR)
print("Classification demo using Logistic Regression and an "
"epoch-based trainer on the MNIST dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 200000
X_train, X_test, y_train, y_test = load_mnist_data()
n_in = X_test.shape[1]
n_out = len(np.unique(y_train))
clf = LogisticRegression(n_in, n_out)
trainer = EpochTrainer(clf, alpha=0.9, patience=50, max_iter=max_iter,
imp_thresh=0.95, random=RANDOM_STATE, verbose=1)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\n"+classification_report(y_test, y_pred))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def mnist_sgd_logreg(__test):
""" Stohastic Gradient Descent Logistic Regression on the MNIST digits
database
"""
print(EQ_BAR)
print("Classification demo using Logistic Regression and Stohastic "
"Gradient Descent on the MNIST digits dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 100000
X_train, X_test, y_train, y_test = load_mnist_data()
n_in = X_test.shape[1]
n_out = len(np.unique(y_train))
clf = LogisticRegression(n_in, n_out)
trainer = SGDTrainer(clf, batch_size=100, alpha=0.5, patience=1000,
max_iter=max_iter,
imp_thresh=0.99, random=RANDOM_STATE, verbose=3)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\n"+classification_report(y_test, y_pred))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def mnist_mlp(__test):
""" Stohastic Gradient Descent Multilayer Perceptron on the MNIST digits
database
"""
print(EQ_BAR)
print("Classification demo using a Multilayer Perceptron and Stohastic "
"Gradient Descent on the MNIST digits dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 100000
X_train, X_test, y_train, y_test = load_mnist_data()
n_in = X_test.shape[1]
n_out = len(np.unique(y_train))
clf = MultiLayerPerceptron(n_in, int(n_in/8), n_out)
trainer = SGDTrainer(clf, batch_size=100, alpha=0.7, patience=500,
max_iter=max_iter,
imp_thresh=0.9, random=RANDOM_STATE, verbose=3)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\n"+classification_report(y_test, y_pred))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def epoch_linear(__test):
""" Epoch-based Linear Regression on the Boston housing dataset. """
print(EQ_BAR)
print("Regression demo using Linear Regression and an "
"epoch-based trainer on the Boston Housing dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 100000
X_train, X_test, y_train, y_test = load_boston_data()
n_in = X_test.shape[1]
clf = LinearRegression(n_in, 1)
trainer = EpochTrainer(clf, alpha=0.1, patience=500, max_iter=max_iter,
imp_thresh=1, random=RANDOM_STATE, verbose=10)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\nRMSE: {}".format(np.sqrt(mean_squared_error(y_test, y_pred))))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def sgd_linear(__test):
""" Stohastic Gradient Descent Linear Regression on the
Boston housing dataset.
"""
print(EQ_BAR)
print("Regression demo using Linear Regression and a "
"Stohastic Gradient Descent trainer on the Boston Housing dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 100000
X_train, X_test, y_train, y_test = load_boston_data()
n_in = X_test.shape[1]
clf = LinearRegression(n_in, 1)
trainer = SGDTrainer(clf, batch_size=10, alpha=0.08, patience=200,
imp_thresh=0.999999, max_iter=max_iter,
random=RANDOM_STATE, verbose=10)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\nRMSE: {}".format(np.sqrt(mean_squared_error(y_test, y_pred))))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def linnerud_linear_sgd(__test):
""" Stohastic Gradient Descent Linear Regression on the
linnerud multivariate dataset.
"""
print(EQ_BAR)
print("Regression demo using Linear Regression and a "
"Stohastic Gradient Descent trainer on the linnerud "
"multivariate dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 100000
X_train, X_test, y_train, y_test = load_linnerud_data()
n_in = X_test.shape[1]
n_out = y_test.shape[1]
clf = LinearRegression(n_in, n_out)
trainer = SGDTrainer(clf, batch_size=3, alpha=0.0005, patience=100,
imp_thresh=1, max_iter=max_iter,
random=RANDOM_STATE, verbose=3)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\nRMSE: {}".format(np.sqrt(mean_squared_error(y_test, y_pred))))
print("Took {:.1f} seconds\n".format(time.time()-begin))
def linnerud_mlr(__test):
""" Stohastic Gradient Descent Multilayer Regression on the
linnerud multivariate dataset.
"""
print(EQ_BAR)
print("Regression demo using Multilayer Regression and a "
"Stohastic Gradient Descent trainer on the linnerud "
"multivariate dataset.")
print(EQ_BAR)
max_iter = 1 if __test else 100000
X_train, X_test, y_train, y_test = load_linnerud_data()
n_in = X_test.shape[1]
n_out = y_test.shape[1]
clf = MultiLayerRegression(n_in, 10, n_out)
trainer = SGDTrainer(clf, batch_size=1, alpha=0.01, patience=1000,
imp_thresh=0.99999, max_iter=max_iter,
random=RANDOM_STATE, verbose=1)
begin = time.time()
trainer.fit(X_train, y_train)
y_pred = trainer.predict(X_test)
print("\nRMSE: {}".format(np.sqrt(mean_squared_error(y_test, y_pred))))
print("Took {:.1f} seconds\n".format(time.time()-begin))
if __name__ == "__main__":
demo()
sys.exit(0)
|
sotlampr/theano-wrapper
|
theano_wrapper/demo.py
|
Python
|
mit
| 16,526
|
"""
Stack based upon linked list
基于链表实现的栈
Author: Wenru
"""
from typing import Optional
class Node:
def __init__(self, data: int, next=None):
self._data = data
self._next = next
class LinkedStack:
"""A stack based upon singly-linked list.
"""
def __init__(self):
self._top: Node = None
def push(self, value: int):
new_top = Node(value)
new_top._next = self._top
self._top = new_top
def pop(self) -> Optional[int]:
if self._top:
value = self._top._data
self._top = self._top._next
return value
def __repr__(self) -> str:
current = self._top
nums = []
while current:
nums.append(current._data)
current = current._next
return " ".join(f"{num}]" for num in nums)
if __name__ == "__main__":
stack = LinkedStack()
for i in range(9):
stack.push(i)
print(stack)
for _ in range(3):
stack.pop()
print(stack)
|
wangzheng0822/algo
|
python/08_stack/linked_stack.py
|
Python
|
apache-2.0
| 1,071
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio-Query-Parser.
# Copyright (C) 2014, 2015, 2016 CERN.
#
# Invenio-Query-Parser is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio-Query-Parser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Version information for *Invenio-Query-Parser* package.
This file is imported by ``invenio_query_parser.__init__``, and parsed by
``setup.py`` as well as ``docs/conf.py``.
"""
# Do not change the format of this next line. Doing so risks breaking
# setup.py and docs/conf.py
__version__ = "0.6.1.dev20160418"
|
tiborsimko/invenio-query-parser
|
invenio_query_parser/version.py
|
Python
|
gpl-2.0
| 1,335
|
# -*- coding: utf-8 -*-
"""
Photo Catalog v 1.0 (photocat)
Copyright (c) Karol Będkowski, 2004-2007
This file is part of Photo Catalog
"""
__author__ = 'Karol Będkowski'
__copyright__ = 'Copyright (C) Karol Będkowski 2006'
__revision__ = '$Id$'
import weakref
def create_weakref_proxy(obj):
if not obj or isinstance(obj, weakref.ProxyType):
return obj
return weakref.proxy(obj)
# vim: encoding=utf8: ff=unix:
|
KarolBedkowski/photocat
|
photocat/lib/hlpweakref.py
|
Python
|
gpl-2.0
| 426
|
from __future__ import absolute_import
import unittest
import time
from testutils import ADMIN_CLIENT, suppress_urllib3_warning
from testutils import TEARDOWN
from testutils import harbor_server
from library.user import User
from library.project import Project
from library.repository import Repository
from library.base import _assert_status_code
from library.repository import push_special_image_to_project
from library.artifact import Artifact
from library.gc import GC
class TestProjects(unittest.TestCase):
@suppress_urllib3_warning
def setUp(self):
self.gc = GC()
self.project = Project()
self.user = User()
self.repo = Repository()
self.artifact = Artifact()
self.repo_name = "test_repo"
self.repo_name_untag = "test_untag"
self.tag = "v1.0"
@unittest.skipIf(TEARDOWN == False, "Test data won't be erased.")
def tearDown(self):
#2. Delete project(PA);
self.project.delete_project(TestProjects.project_gc_id, **TestProjects.USER_GC_CLIENT)
#3. Delete user(UA);
self.user.delete_user(TestProjects.user_gc_id, **ADMIN_CLIENT)
def testGarbageCollection(self):
"""
Test case:
Garbage Collection
Test step and expected result:
1. Create a new user(UA);
2. Create project(PA) and project(PB) by user(UA);
3. Push a image in project(PA) and then delete repository by admin;
4. Get repository by user(UA), it should get nothing;
5. Tigger garbage collection operation;
6. Check garbage collection job was finished;
7. Get garbage collection log, check there is a number of files was deleted;
8. Push a image in project(PB) by admin and delete the only tag;
9. Tigger garbage collection operation;
10. Check garbage collection job was finished;
11. Repository with untag image should be still there;
12. But no any artifact in repository anymore.
Tear down:
1. Delete project(PA);
2. Delete user(UA).
"""
url = ADMIN_CLIENT["endpoint"]
admin_name = ADMIN_CLIENT["username"]
admin_password = ADMIN_CLIENT["password"]
user_gc_password = "Aa123456"
#1. Create a new user(UA);
TestProjects.user_gc_id, user_gc_name = self.user.create_user(user_password = user_gc_password, **ADMIN_CLIENT)
TestProjects.USER_GC_CLIENT=dict(endpoint = url, username = user_gc_name, password = user_gc_password)
#2. Create project(PA) and project(PB) by user(UA);
TestProjects.project_gc_id, TestProjects.project_gc_name = self.project.create_project(metadata = {"public": "false"}, **TestProjects.USER_GC_CLIENT)
TestProjects.project_gc_untag_id, TestProjects.project_gc_untag_name = self.project.create_project(metadata = {"public": "false"}, **TestProjects.USER_GC_CLIENT)
#3. Push a image in project(PA) and then delete repository by admin;
push_special_image_to_project(TestProjects.project_gc_name, harbor_server, admin_name, admin_password, self.repo_name, ["latest", "v1.2.3"])
self.repo.delete_repository(TestProjects.project_gc_name, self.repo_name, **TestProjects.USER_GC_CLIENT)
#4. Get repository by user(UA), it should get nothing;
repo_data = self.repo.list_repositories(TestProjects.project_gc_name, **TestProjects.USER_GC_CLIENT)
_assert_status_code(len(repo_data), 0)
#8. Push a image in project(PB) by admin and delete the only tag;
push_special_image_to_project(TestProjects.project_gc_untag_name, harbor_server, admin_name, admin_password, self.repo_name_untag, [self.tag])
self.artifact.delete_tag(TestProjects.project_gc_untag_name, self.repo_name_untag, self.tag, self.tag, **ADMIN_CLIENT)
#5. Tigger garbage collection operation;
gc_id = self.gc.gc_now(**ADMIN_CLIENT)
#6. Check garbage collection job was finished;
self.gc.validate_gc_job_status(gc_id, "Success", **ADMIN_CLIENT)
#7. Get garbage collection log, check there is a number of files was deleted;
self.gc.validate_deletion_success(gc_id, **ADMIN_CLIENT)
artifacts = self.artifact.list_artifacts(TestProjects.project_gc_untag_name, self.repo_name_untag, **TestProjects.USER_GC_CLIENT)
_assert_status_code(len(artifacts), 1)
time.sleep(5)
#9. Tigger garbage collection operation;
gc_id = self.gc.gc_now(is_delete_untagged=True, **ADMIN_CLIENT)
#10. Check garbage collection job was finished;
self.gc.validate_gc_job_status(gc_id, "Success", **ADMIN_CLIENT)
#7. Get garbage collection log, check there is a number of files was deleted;
self.gc.validate_deletion_success(gc_id, **ADMIN_CLIENT)
#11. Repository with untag image should be still there;
repo_data_untag = self.repo.list_repositories(TestProjects.project_gc_untag_name, **TestProjects.USER_GC_CLIENT)
_assert_status_code(len(repo_data_untag), 1)
self.assertEqual(TestProjects.project_gc_untag_name + "/" + self.repo_name_untag , repo_data_untag[0].name)
#12. But no any artifact in repository anymore.
artifacts = self.artifact.list_artifacts(TestProjects.project_gc_untag_name, self.repo_name_untag, **TestProjects.USER_GC_CLIENT)
self.assertEqual(artifacts,[])
if __name__ == '__main__':
unittest.main()
|
wy65701436/harbor
|
tests/apitests/python/test_garbage_collection.py
|
Python
|
apache-2.0
| 5,516
|
from google.appengine.ext import db
class Comment(db.Model):
content = db.TextProperty(required=True)
created = db.DateTimeProperty(auto_now_add=True)
last_modified = db.DateTimeProperty(auto_now=True)
user_id = db.IntegerProperty(required=True)
user_name = db.TextProperty(required=True)
|
mr-karan/Udacity-FullStack-ND004
|
Project3/udacityblog-159515/models/comment.py
|
Python
|
mit
| 309
|
#!/usr/bin/env python3
import os
import sys
import xml.dom
from xml.dom import minidom
# STRINGTABLE DIAG TOOL
# Author: KoffeinFlummi
# ---------------------
# Counts duplicates stringtable entries
def check_module(projectpath, module):
""" Checks the given module for all the different languages. """
localized = []
stringtablepath = os.path.join(projectpath, module, "stringtable.xml")
try:
xmldoc = minidom.parse(stringtablepath)
except IOError:
return 0
keys = xmldoc.getElementsByTagName("Key")
duplicates = 0
for key in keys:
children = key.childNodes
entries = []
for c in range(children.length):
entries.append(children.item(c))
entries = list(filter(lambda x: x.nodeType == x.ELEMENT_NODE, entries))
entries = list(map(lambda x: str(x.nodeName).lower(), entries))
diff = len(entries) - len(list(set(entries)))
duplicates += diff
if diff > 0:
print(key.getAttribute("ID"))
return duplicates
def main():
scriptpath = os.path.realpath(__file__)
projectpath = os.path.dirname(os.path.dirname(scriptpath))
projectpath = os.path.join(projectpath, "addons")
print("###############################")
print("# Stringtable Duplicates Tool #")
print("###############################\n")
duplicates = 0
for module in os.listdir(projectpath):
d = check_module(projectpath, module)
print("# {} {}".format(module.ljust(20), d))
duplicates += d
print("\nTotal number of duplicates: {}".format(duplicates))
if __name__ == "__main__":
main()
|
MikeMatrix/ACE3
|
tools/stringtableduplicates.py
|
Python
|
gpl-2.0
| 1,652
|
#
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import threading
import time
import typing
from dataclasses import dataclass
from datetime import datetime
from enum import Enum, auto
from random import randrange
TEST_NODE_ID = '0x12344321'
class App:
def __init__(self, runner, command):
self.process = None
self.outpipe = None
self.runner = runner
self.command = command
self.cv_stopped = threading.Condition()
self.stopped = False
self.lastLogIndex = 0
def start(self, discriminator):
if not self.process:
self.process = None
process, outpipe, errpipe = self.__startServer(
self.runner, self.command, discriminator)
self.waitForAnyAdvertisement(process, outpipe)
self.__updateSetUpCode(outpipe)
self.process = process
self.outpipe = outpipe
with self.cv_stopped:
self.stopped = False
self.cv_stopped.notify()
return True
return False
def stop(self):
if self.process:
with self.cv_stopped:
self.stopped = True
self.cv_stopped.notify()
self.process.kill()
self.process.wait(10)
self.process = None
self.outpipe = None
return True
return False
def reboot(self, discriminator):
if self.process:
self.stop()
self.start(discriminator)
return True
return False
def factoryReset(self):
storage = '/tmp/chip_kvs'
if os.path.exists(storage):
os.unlink(storage)
return True
def waitForAnyAdvertisement(self, process, outpipe):
self.__waitFor("mDNS service published:", process, outpipe)
def waitForCommissionableAdvertisement(self):
self.__waitFor("mDNS service published: _matterc._udp",
self.process, self.outpipe)
return True
def waitForOperationalAdvertisement(self):
self.__waitFor("mDNS service published: _matter._tcp",
self.process, self.outpipe)
return True
def kill(self):
if self.process:
self.process.kill()
def wait(self, timeout=None):
while True:
code = self.process.wait(timeout)
with self.cv_stopped:
if not self.stopped:
return code
# When the server is manually stopped, process waiting is
# overridden so the other processes that depends on the
# accessory beeing alive does not stop.
while self.stopped:
self.cv_stopped.wait()
def __startServer(self, runner, command, discriminator):
logging.debug(
'Executing application under test with discriminator %s.' %
discriminator)
app_cmd = command + ['--discriminator', str(discriminator)]
app_cmd = app_cmd + ['--interface-id', str(-1)]
return runner.RunSubprocess(app_cmd, name='APP ', wait=False)
def __waitFor(self, waitForString, server_process, outpipe):
logging.debug('Waiting for %s' % waitForString)
start_time = time.time()
ready, self.lastLogIndex = outpipe.CapturedLogContains(
waitForString, self.lastLogIndex)
while not ready:
if server_process.poll() is not None:
died_str = ('Server died while waiting for %s, returncode %d' %
(waitForString, server_process.returncode))
logging.error(died_str)
raise Exception(died_str)
if time.time() - start_time > 10:
raise Exception('Timeout while waiting for %s' % waitForString)
time.sleep(0.1)
ready, self.lastLogIndex = outpipe.CapturedLogContains(
waitForString, self.lastLogIndex)
logging.debug('Success waiting for: %s' % waitForString)
def __updateSetUpCode(self, outpipe):
qrLine = outpipe.FindLastMatchingLine('.*SetupQRCode: *\\[(.*)]')
if not qrLine:
raise Exception("Unable to find QR code")
self.setupCode = qrLine.group(1)
class TestTarget(Enum):
ALL_CLUSTERS = auto()
TV = auto()
DOOR_LOCK = auto()
@dataclass
class ApplicationPaths:
chip_tool: typing.List[str]
all_clusters_app: typing.List[str]
door_lock_app: typing.List[str]
tv_app: typing.List[str]
@dataclass
class CaptureLine:
when: datetime
source: str
line: str
class ExecutionCapture:
"""
Keeps track of output lines in a process, to help debug failures.
"""
def __init__(self):
self.lock = threading.Lock()
self.captures = []
def Log(self, source, line):
with self.lock:
self.captures.append(CaptureLine(
when=datetime.now(),
source=source,
line=line.strip('\n')
))
def LogContents(self):
logging.error('================ CAPTURED LOG START ==================')
with self.lock:
for entry in self.captures:
logging.error('%02d:%02d:%02d.%03d - %-10s: %s',
entry.when.hour,
entry.when.minute,
entry.when.second,
entry.when.microsecond/1000,
entry.source,
entry.line
)
logging.error('================ CAPTURED LOG END ====================')
@dataclass
class TestDefinition:
name: str
run_name: str
target: TestTarget
def Run(self, runner, apps_register, paths: ApplicationPaths):
"""
Executes the given test case using the provided runner for execution.
"""
runner.capture_delegate = ExecutionCapture()
try:
if self.target == TestTarget.ALL_CLUSTERS:
app_cmd = paths.all_clusters_app
elif self.target == TestTarget.TV:
app_cmd = paths.tv_app
elif self.target == TestTarget.DOOR_LOCK:
app_cmd = paths.door_lock_app
else:
raise Exception("Unknown test target - "
"don't know which application to run")
tool_cmd = paths.chip_tool
files_to_unlink = [
'/tmp/chip_tool_config.ini',
'/tmp/chip_tool_config.alpha.ini',
'/tmp/chip_tool_config.beta.ini',
'/tmp/chip_tool_config.gamma.ini',
]
for f in files_to_unlink:
if os.path.exists(f):
os.unlink(f)
app = App(runner, app_cmd)
# Remove server application storage (factory reset),
# so it will be commissionable again.
app.factoryReset()
app.start(str(randrange(1, 4096)))
apps_register.add("default", app)
runner.RunSubprocess(
tool_cmd + ['pairing', 'qrcode', TEST_NODE_ID, app.setupCode],
name='PAIR', dependencies=[apps_register])
runner.RunSubprocess(
tool_cmd + ['tests', self.run_name],
name='TEST', dependencies=[apps_register])
except Exception:
logging.error("!!!!!!!!!!!!!!!!!!!! ERROR !!!!!!!!!!!!!!!!!!!!!!")
runner.capture_delegate.LogContents()
raise
finally:
apps_register.killAll()
apps_register.factoryResetAll()
apps_register.removeAll()
|
project-chip/connectedhomeip
|
scripts/tests/chiptest/test_definition.py
|
Python
|
apache-2.0
| 8,373
|
# _*_ coding:utf-8 _*_
#-------------------------------------------------------------------------------
# Name: albonazionalegestoriambientali_it
# Purpose: Parse linked in given a list of companies and write users to a csv file
#
# Author: Ramakrishna
#
# Created: 21/Feb/2016
# Copyright: (c) Ramakrishna 2016
# Licence: <your licence>
#-------------------------------------------------------------------------------
import requests, json, time, sqlite3, random
import socks, socket
socks.setdefaultproxy(proxy_type=socks.PROXY_TYPE_SOCKS5, addr="127.0.0.1", port=9150)
socket.socket = socks.socksocket
MIN_WAIT = 1
MAX_WAIT = 2
URL = 'http://www.albonazionalegestoriambientali.it/Services/GetRicerche.asmx/GetImpresaMezzi'
def main():
conn = None
try:
conn = sqlite3.connect("comp.db3")
cur = conn.cursor()
results = cur.execute("select impresa from impresas where impresa not in (select impresa from media_list) order by impresa")
impresa_list = results.fetchall()
for impresa_id in impresa_list:
try:
headers = {'User-Agent': 'Mozilla/5.0', 'Content-Type':'application/json'}
data = "{'lang':'it', 'idImpresa':" + str(impresa_id[0]) + "}"
r = requests.post(URL, headers=headers, data=data)
time.sleep(random.randint(MIN_WAIT, MAX_WAIT))
if r.status_code == 200:
records = r.json()
rows = []
record_count = len(records['d']['itemsList'])
for i in range(0, record_count):
impresa = targa = tipo_mezzo = catg_attive = ''
try:
impresa = str(impresa_id[0])
if impresa == 0:
continue
except Exception as e:
pass
try:
targa = records['d']['itemsList'][i]['Targa']
except Exception as e:
pass
try:
catg_attive = records['d']['itemsList'][i]['CategorieAttive']
except Exception as e:
pass
try:
tipo_mezzo = records['d']['itemsList'][i]['TipoMezzo']
except Exception as e:
pass
rows.append((impresa, targa, tipo_mezzo, catg_attive,))
try:
if len(rows) > 0:
sql = "insert into media_list(impresa, targa, tipo_mezzo, catg_attive) values (?,?,?,?)"
cur.executemany(sql, rows)
else:
sql = "insert into media_list(impresa) values (?)"
cur.execute(sql, (impresa_id[0],))
except Exception as e:
print(e.__doc__)
print(e.args)
pass
conn.commit()
except Exception as e:
print(e.__doc__)
print(e.args)
except Exception as e:
print(e.__doc__)
print(e.args)
finally:
if conn != None:
conn.commit()
conn = None
if __name__ == '__main__':
main()
|
brkrishna/freelance
|
albonaziona/media_list.py
|
Python
|
gpl-2.0
| 3,510
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-26 17:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('splitbill', '0009_transaction_raw'),
]
operations = [
migrations.CreateModel(
name='Statement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_added', models.DateTimeField()),
('date_from', models.DateField()),
('date_to', models.DateField()),
],
),
migrations.AddField(
model_name='rawtransaction',
name='statement',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='splitbill.Statement'),
preserve_default=False,
),
migrations.AddField(
model_name='transaction',
name='statement',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='splitbill.Statement'),
preserve_default=False,
),
]
|
Breakthru/splitbills
|
splitbill/migrations/0010_auto_20161226_1706.py
|
Python
|
gpl-3.0
| 1,239
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Profile.age_accompanying_children'
db.add_column('accounts_profile', 'age_accompanying_children',
self.gf('django.db.models.fields.CharField')(default='', max_length=20, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Profile.age_accompanying_children'
db.delete_column('accounts_profile', 'age_accompanying_children')
models = {
'accounts.profile': {
'Meta': {'object_name': 'Profile'},
'age_accompanying_children': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_accompanying_children': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'short_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts']
|
EuroPython/djep
|
pyconde/accounts/migrations/0006_auto__add_field_profile_age_accompanying_children.py
|
Python
|
bsd-3-clause
| 4,777
|
import os
from docutils import nodes, utils
def file_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Role for files.
:file:`./path/to/moo` ->
text: ./path/to/moo (italicized + file icon)
:file:`./path/to/moo/` ->
text: ./path/to/moo/ (italicized + directory icon)
"""
name = name.lower()
title = utils.unescape(text)
# 'file' would collide with bulma, so we use 'filepath'
# https://github.com/jgthms/bulma/blob/c2fae71/sass/elements/form.sass#L218
# https://github.com/jgthms/bulma/issues/1442
classes = []
# add .fa class since this isn't a link
classes.append('far')
if title.endswith('/'):
classes.append('fa-folder')
else:
classes.append('fa-file-alt')
extension = os.path.splitext(title)[1]
if extension:
classes.append(extension.lstrip('.'))
sn = nodes.emphasis(title, title)
# insert <span class="fa ..."> inside the <em>
sn.insert(0, nodes.inline('', '', classes=classes))
return [sn], []
# TODO: Let font-awesome classes be configured via settings
def manifest_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Role for manifests (package.json, file outputs)
:manifest:`package.json` ->
text: package.json (italicized + file icon)
"""
name = name.lower()
title = utils.unescape(text)
classes = ['manifest']
# add .fa class since this isn't a link
classes.append('fa-file-alt far')
sn = nodes.emphasis(title, title)
# insert <span class="fa ..."> inside the <em>
sn.insert(0, nodes.inline('', '', classes=classes))
return [sn], []
def exe_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Role for executables.
:exe:`./path/to/webpack` ->
text: ./path/to/webpack (italicized + file icon)
"""
name = name.lower()
title = utils.unescape(text)
classes = ['exe', 'fa']
sn = nodes.emphasis(title, title)
# insert <span class="fa ..."> inside the <em>
sn.insert(0, nodes.inline('', '', classes=classes))
return [sn], []
|
tony/django-docutils
|
django_docutils/lib/roles/file.py
|
Python
|
mit
| 2,146
|
#Starts with a BOM
|
github/codeql
|
python/ql/test/library-tests/encoding/utf8_bom.py
|
Python
|
mit
| 22
|
#bolttools - a framework for creation of part libraries
#Copyright (C) 2013 Johannes Reinhardt <jreinhardt@ist-dein-freund.de>
#
#This library is free software; you can redistribute it and/or
#modify it under the terms of the GNU Lesser General Public
#License as published by the Free Software Foundation; either
#version 2.1 of the License, or any later version.
#
#This library is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#Lesser General Public License for more details.
#
#You should have received a copy of the GNU Lesser General Public
#License along with this library; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from errors import *
FREECADPATH = '/usr/lib/freecad/lib/' # path to your FreeCAD.so or FreeCAD.dll file
import sys
sys.path.append(FREECADPATH)
try:
import FreeCAD
import Part
import Sketcher
except:
raise MissingFreeCADError()
from os.path import join, exists
from os import makedirs, remove
from datetime import datetime
import importlib
from common import Backend
def add_part(base,params,doc):
module = importlib.import_module(base.module_name)
module.__dict__[base.name](params,doc)
class IGESBackend(Backend):
def __init__(self,repo,databases):
Backend.__init__(self,repo,"iges",databases,["freecad"])
def write_output(self,out_path,version,stable=False):
self.clear_output_dir(out_path)
ver_root = join(out_path,version)
makedirs(ver_root)
#generate version file
date = datetime.now()
version_file = open(join(ver_root,"VERSION"),"w")
version_file.write("%s\n%d-%d-%d\n" %
(version, date.year, date.month, date.day))
version_file.close()
#Disable writing bytecode to avoid littering the freecad database with pyc files
write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
for coll in self.repo.itercollections():
if not exists(join(ver_root,coll.id)):
makedirs(join(ver_root,coll.id))
sys.path.append(join(self.repo.path,"freecad",coll.id))
for cl,base in self.dbs["freecad"].iterclasses(["class","base"],filter_collection=coll):
if cl.parameters.common is None:
continue
for free in cl.parameters.common:
try:
params = cl.parameters.collect(dict(zip(cl.parameters.free,free)))
except:
print("A problem occurred when parameters for %s where collected for %s" % (free,cl.id))
raise
for std, in self.dbs["freecad"].iterstandards(filter_class=cl):
params['name'] = std.labeling.get_nice(params)
filename = std.labeling.get_safe(params) + ".igs"
doc = FreeCAD.newDocument()
add_part(base,params,doc)
shape = doc.ActiveObject.Shape
shape.exportIges(join(ver_root,coll.id,filename))
FreeCAD.closeDocument(doc.Name)
for name, in self.dbs["freecad"].iternames(filter_class=cl):
params['name'] = name.labeling.get_nice(params)
filename = name.labeling.get_safe(params) + ".igs"
doc = FreeCAD.newDocument()
add_part(base,params,doc)
shape = doc.ActiveObject.Shape
shape.exportIges(join(ver_root,coll.id,filename))
FreeCAD.closeDocument(doc.Name)
sys.path.pop()
#restore byte code writing
sys.dont_write_bytecode = write_bytecode
|
jreinhardt/BOLTS
|
backends/exchange.py
|
Python
|
gpl-3.0
| 3,384
|
from nose.tools import assert_equal, assert_greater
from unittest.mock import patch
from ... import db
from ..sync import sync
from ..notice import send_notice
from ..backends.postgresql import PostgreSQL
from . import g
def send_notice_email(conf, target, notice):
assert_equal(target, 'answeror+foo@gmail.com')
@patch('torabot.core.notice.send_notice_email', send_notice_email)
def test_send_notice():
with g.connection.begin_nested() as trans:
query_id = db.add_query(g.connection, kind='tora', text='东方')
user_id = db.add_user(
g.connection,
name='answeror',
email='answerro@gmail.com',
openid='foo'
)
email_id = db.add_email_bi_user_id(
g.connection,
id=user_id,
email='answeror+foo@gmail.com',
label=''
)
db.activate_email_bi_id(g.connection, email_id)
db.watch(
g.connection,
user_id=user_id,
query_id=query_id,
email_id=email_id
)
sync(
kind='tora',
text='东方',
timeout=60,
sync_interval=300,
backend=PostgreSQL(conn=g.connection)
)
notices = db.get_pending_notices(g.connection)
assert_greater(len(notices), 0)
for notice in notices:
assert send_notice(
conf={},
notice=notice,
conn=g.connection,
)
trans.rollback()
|
Answeror/torabot
|
torabot/core/test/test_notice.py
|
Python
|
mit
| 1,532
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Google Drive service"""
from io import TextIOWrapper
from typing import Any, Optional, Sequence, Union
from googleapiclient.discovery import Resource, build
from googleapiclient.http import HttpRequest, MediaFileUpload
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleDriveHook(GoogleBaseHook):
"""
Hook for the Google Drive APIs.
:param api_version: API version used (for example v3).
:type api_version: str
:param gcp_conn_id: The connection ID to use when fetching connection info.
:type gcp_conn_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
:type impersonation_chain: Union[str, Sequence[str]]
"""
_conn = None # type: Optional[Resource]
def __init__(
self,
api_version: str = "v3",
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
def get_conn(self) -> Any:
"""
Retrieves the connection to Google Drive.
:return: Google Drive services object.
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build("drive", self.api_version, http=http_authorized, cache_discovery=False)
return self._conn
def _ensure_folders_exists(self, path: str) -> str:
service = self.get_conn()
current_parent = "root"
folders = path.split("/")
depth = 0
# First tries to enter directories
for current_folder in folders:
self.log.debug("Looking for %s directory with %s parent", current_folder, current_parent)
conditions = [
"mimeType = 'application/vnd.google-apps.folder'",
f"name='{current_folder}'",
f"'{current_parent}' in parents",
]
result = (
service.files() # pylint: disable=no-member
.list(q=" and ".join(conditions), spaces="drive", fields="files(id, name)")
.execute(num_retries=self.num_retries)
)
files = result.get("files", [])
if not files:
self.log.info("Not found %s directory", current_folder)
# If the directory does not exist, break loops
break
depth += 1
current_parent = files[0].get("id")
# Check if there are directories to process
if depth != len(folders):
# Create missing directories
for current_folder in folders[depth:]:
file_metadata = {
"name": current_folder,
"mimeType": "application/vnd.google-apps.folder",
"parents": [current_parent],
}
file = (
service.files() # pylint: disable=no-member
.create(body=file_metadata, fields="id")
.execute(num_retries=self.num_retries)
)
self.log.info("Created %s directory", current_folder)
current_parent = file.get("id")
# Return the ID of the last directory
return current_parent
def get_media_request(self, file_id: str) -> HttpRequest:
"""
Returns a get_media http request to a Google Drive object.
:param file_id: The Google Drive file id
:type file_id: str
:return: request
:rtype: HttpRequest
"""
service = self.get_conn()
request = service.files().get_media(fileId=file_id) # pylint: disable=no-member
return request
def exists(self, folder_id: str, file_name: str, drive_id: Optional[str] = None):
"""
Checks to see if a file exists within a Google Drive folder
:param folder_id: The id of the Google Drive folder in which the file resides
:type folder_id: str
:param file_name: The name of a file in Google Drive
:type file_name: str
:param drive_id: Optional. The id of the shared Google Drive in which the file resides.
:type drive_id: str
:return: True if the file exists, False otherwise
:rtype: bool
"""
return bool(self.get_file_id(folder_id=folder_id, file_name=file_name, drive_id=drive_id))
def get_file_id(self, folder_id: str, file_name: str, drive_id: Optional[str] = None):
"""
Returns the file id of a Google Drive file
:param folder_id: The id of the Google Drive folder in which the file resides
:type folder_id: str
:param file_name: The name of a file in Google Drive
:type file_name: str
:param drive_id: Optional. The id of the shared Google Drive in which the file resides.
:type drive_id: str
:return: Google Drive file id if the file exists, otherwise None
:rtype: str if file exists else None
"""
query = f"name = '{file_name}'"
if folder_id:
query += f" and parents in '{folder_id}'"
service = self.get_conn()
if drive_id:
files = (
service.files() # pylint: disable=no-member
.list(
q=query,
spaces="drive",
fields="files(id, mimeType)",
orderBy="modifiedTime desc",
driveId=drive_id,
includeItemsFromAllDrives=True,
supportsAllDrives=True,
corpora="drive",
)
.execute(num_retries=self.num_retries)
)
else:
files = (
service.files() # pylint: disable=no-member
.list(q=query, spaces="drive", fields="files(id, mimeType)", orderBy="modifiedTime desc")
.execute(num_retries=self.num_retries)
)
file_metadata = {}
if files['files']:
file_metadata = {"id": files['files'][0]['id'], "mime_type": files['files'][0]['mimeType']}
return file_metadata
def upload_file(self, local_location: str, remote_location: str) -> str:
"""
Uploads a file that is available locally to a Google Drive service.
:param local_location: The path where the file is available.
:type local_location: str
:param remote_location: The path where the file will be send
:type remote_location: str
:return: File ID
:rtype: str
"""
service = self.get_conn()
directory_path, _, file_name = remote_location.rpartition("/")
if directory_path:
parent = self._ensure_folders_exists(directory_path)
else:
parent = "root"
file_metadata = {"name": file_name, "parents": [parent]}
media = MediaFileUpload(local_location)
file = (
service.files() # pylint: disable=no-member
.create(body=file_metadata, media_body=media, fields="id")
.execute(num_retries=self.num_retries)
)
self.log.info("File %s uploaded to gdrive://%s.", local_location, remote_location)
return file.get("id")
def download_file(self, file_id: str, file_handle: TextIOWrapper, chunk_size: int = 104857600):
"""
Download a file from Google Drive.
:param file_id: the id of the file
:type file_id: str
:param file_handle: file handle used to write the content to
:type file_handle: io.TextIOWrapper
"""
request = self.get_media_request(file_id=file_id)
self.download_content_from_request(file_handle=file_handle, request=request, chunk_size=chunk_size)
|
nathanielvarona/airflow
|
airflow/providers/google/suite/hooks/drive.py
|
Python
|
apache-2.0
| 9,602
|
import ast
import json
import unicodedata
import base64
def elicit_intent(message, image_url, title):
return {
'dialogAction' : {
'type' : 'ElicitIntent',
'message' : message,
'responseCard' : {
'version' : 1,
'contentType' : 'application/vnd.amazonaws.card.generic',
'genericAttachments' : [
{
'title' : title,
'imageUrl' : image_url,
'buttons' : [
{
'text' : 'More options',
'value' : 'view more'
},
{
'text' : 'Reviews',
'value' : 'reviews?'
},
{
'text' : 'Pages',
'value' : 'pages??'
}
]
}
]
}
}
}
def elicit(message):
return {
'dialogAction' : {
'type' : 'ElicitIntent',
'message' : message,
'responseCard' : {
'version' : 1,
'contentType' : 'application/vnd.amazonaws.card.generic',
'genericAttachments' : [
{
'title' : 'These are the trending technologies (as of TIOBE index)',
'buttons' : [
{
'text' : 'Java',
'value' : 'show java book'
},
{
'text' : 'C',
'value' : 'show C book'
},
{
'text' : 'Python',
'value' : 'show python book'
}
]
}
]
}
}
}
def lambda_handler(event, context):
print event
try:
session_attributes = event['sessionAttributes']['data']
decoded = base64.b64decode(session_attributes)
print 'decoded', decoded
data = json.loads(decoded)
message = {'contentType': 'PlainText', 'content': """{}""".format(data[0]['publisher'])}
return elicit_intent(message, data[0]['image_url'], data[0]['title'])
except:
message = {'contentType': 'PlainText', 'content': """:-O Oops I forgot what we were talking about. How about these trending ones?"""}
return elicit(message)
|
CodeOpsTechnologies/BookBot
|
get_publisher.py
|
Python
|
mit
| 2,409
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""HierarchicalController Class.
The HierarchicalController encompasses the entire lifecycle of training the
device placement policy, including generating op embeddings, getting groups for
each op, placing those groups and running the predicted placements.
Different assignment models can inherit from this class.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
import six
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.grappler.controller import Controller
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
from tensorflow.python.training import adam
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import learning_rate_decay
from tensorflow.python.training import training_util
class PlacerParams(object):
"""Class to hold a set of placement parameters as name-value pairs.
A typical usage is as follows:
```python
# Create a PlacerParams object specifying names and values of the model
# parameters:
params = PlacerParams(hidden_size=128, decay_steps=50)
# The parameters are available as attributes of the PlacerParams object:
hparams.hidden_size ==> 128
hparams.decay_steps ==> 50
```
"""
def __init__(self, **kwargs):
"""Create an instance of `PlacerParams` from keyword arguments.
The keyword arguments specify name-values pairs for the parameters.
The parameter types are inferred from the type of the values passed.
The parameter names are added as attributes of `PlacerParams` object,
and they can be accessed directly with the dot notation `params._name_`.
Example:
```python
# Define 1 parameter: 'hidden_size'
params = PlacerParams(hidden_size=128)
params.hidden_size ==> 128
```
Args:
**kwargs: Key-value pairs where the key is the parameter name and
the value is the value for the parameter.
"""
for name, value in six.iteritems(kwargs):
self.add_param(name, value)
def add_param(self, name, value):
"""Adds {name, value} pair to hyperparameters.
Args:
name: Name of the hyperparameter.
value: Value of the hyperparameter. Can be one of the following types:
int, float, string, int list, float list, or string list.
Raises:
ValueError: if one of the arguments is invalid.
"""
# Keys in kwargs are unique, but 'name' could be the name of a pre-existing
# attribute of this object. In that case we refuse to use it as a
# parameter name.
if getattr(self, name, None) is not None:
raise ValueError("Parameter name is reserved: %s" % name)
setattr(self, name, value)
def hierarchical_controller_hparams():
"""Hyperparameters for hierarchical planner."""
return PlacerParams(
hidden_size=512,
forget_bias_init=1.0,
temperature=1.0,
logits_std_noise=0.5,
stop_noise_step=750,
decay_steps=50,
max_num_outputs=5,
max_output_size=5,
tanh_constant=1.0,
adj_embed_dim=20,
grouping_hidden_size=64,
num_groups=None,
bi_lstm=True,
failing_signal=100,
stop_sampling=500,
start_with_failing_signal=True,
always_update_baseline=False,
bl_dec=0.9,
grad_bound=1.0,
lr=0.1,
lr_dec=0.95,
start_decay_step=400,
optimizer_type="adam",
stop_updating_after_steps=1000,
name="hierarchical_controller",
keep_prob=1.0,
reward_function="sqrt",
seed=1234,
# distributed training params
num_children=1)
class HierarchicalController(Controller):
"""HierarchicalController class."""
def __init__(self, hparams, item, cluster, controller_id=0):
"""HierarchicalController class initializer.
Args:
hparams: All hyper-parameters.
item: The metagraph to place.
cluster: The cluster of hardware devices to optimize for.
controller_id: the id of the controller in a multi-controller setup.
"""
super(HierarchicalController, self).__init__(item, cluster)
self.ctrl_id = controller_id
self.hparams = hparams
if self.hparams.num_groups is None:
self.num_groups = min(256, 20 * self.num_devices)
else:
self.num_groups = self.hparams.num_groups
# creates self.op_embeddings and self.type_dict
self.create_op_embeddings(verbose=False)
# TODO(azalia) clean up embedding/group_embedding_size names
self.group_emb_size = (
2 * self.num_groups + len(self.type_dict) +
self.hparams.max_num_outputs * self.hparams.max_output_size)
self.embedding_size = self.group_emb_size
self.initializer = init_ops.glorot_uniform_initializer(
seed=self.hparams.seed)
with variable_scope.variable_scope(
self.hparams.name,
initializer=self.initializer,
reuse=variable_scope.AUTO_REUSE):
# define parameters of feedforward
variable_scope.get_variable("w_grouping_ff", [
1 + self.hparams.max_num_outputs * self.hparams.max_output_size +
self.hparams.adj_embed_dim, self.hparams.grouping_hidden_size
])
variable_scope.get_variable(
"w_grouping_softmax",
[self.hparams.grouping_hidden_size, self.num_groups])
if self.hparams.bi_lstm:
variable_scope.get_variable("encoder_lstm_forward", [
self.embedding_size + self.hparams.hidden_size / 2,
2 * self.hparams.hidden_size
])
variable_scope.get_variable("encoder_lstm_backward", [
self.embedding_size + self.hparams.hidden_size / 2,
2 * self.hparams.hidden_size
])
variable_scope.get_variable(
"device_embeddings", [self.num_devices, self.hparams.hidden_size])
variable_scope.get_variable(
"decoder_lstm",
[2 * self.hparams.hidden_size, 4 * self.hparams.hidden_size])
variable_scope.get_variable(
"device_softmax", [2 * self.hparams.hidden_size, self.num_devices])
variable_scope.get_variable("device_go_embedding",
[1, self.hparams.hidden_size])
variable_scope.get_variable(
"encoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"decoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"attn_w_1", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable(
"attn_w_2", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable("attn_v", [self.hparams.hidden_size, 1])
else:
variable_scope.get_variable("encoder_lstm", [
self.embedding_size + self.hparams.hidden_size,
4 * self.hparams.hidden_size
])
variable_scope.get_variable(
"device_embeddings", [self.num_devices, self.hparams.hidden_size])
variable_scope.get_variable(
"decoder_lstm",
[2 * self.hparams.hidden_size, 4 * self.hparams.hidden_size])
variable_scope.get_variable(
"device_softmax", [2 * self.hparams.hidden_size, self.num_devices])
variable_scope.get_variable("device_go_embedding",
[1, self.hparams.hidden_size])
variable_scope.get_variable(
"encoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"decoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"attn_w_1", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable(
"attn_w_2", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable("attn_v", [self.hparams.hidden_size, 1])
seq2seq_input_layer = array_ops.placeholder_with_default(
array_ops.zeros([self.hparams.num_children,
self.num_groups,
self.group_emb_size],
dtypes.float32),
shape=(self.hparams.num_children, self.num_groups, self.group_emb_size))
self.seq2seq_input_layer = seq2seq_input_layer
def compute_reward(self, run_time):
if self.hparams.reward_function == "id":
reward = run_time
elif self.hparams.reward_function == "sqrt":
reward = math.sqrt(run_time)
elif self.hparams.reward_function == "log":
reward = math.log1p(run_time)
else:
raise NotImplementedError(
"Unrecognized reward function '%s', consider your "
"--reward_function flag value." % self.hparams.reward_function)
return reward
def build_controller(self):
"""RL optimization interface.
Returns:
ops: A dictionary holding handles of the model used for training.
"""
self._global_step = training_util.get_or_create_global_step()
ops = {}
ops["loss"] = 0
failing_signal = self.compute_reward(self.hparams.failing_signal)
ctr = {}
with tf_ops.name_scope("controller_{}".format(self.ctrl_id)):
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
ctr["reward"] = {"value": [], "ph": [], "update": []}
ctr["ready"] = {"value": [], "ph": [], "update": []}
ctr["best_reward"] = {"value": [], "update": []}
for i in range(self.hparams.num_children):
reward_value = variable_scope.get_local_variable(
"reward_{}".format(i),
initializer=0.0,
dtype=dtypes.float32,
trainable=False)
reward_ph = array_ops.placeholder(
dtypes.float32, shape=(), name="reward_ph_{}".format(i))
reward_update = state_ops.assign(
reward_value, reward_ph, use_locking=True)
ctr["reward"]["value"].append(reward_value)
ctr["reward"]["ph"].append(reward_ph)
ctr["reward"]["update"].append(reward_update)
best_reward = variable_scope.get_local_variable(
"best_reward_{}".format(i),
initializer=failing_signal,
dtype=dtypes.float32,
trainable=False)
ctr["best_reward"]["value"].append(best_reward)
ctr["best_reward"]["update"].append(
state_ops.assign(best_reward,
math_ops.minimum(best_reward, reward_update)))
ready_value = variable_scope.get_local_variable(
"ready_{}".format(i),
initializer=True,
dtype=dtypes.bool,
trainable=False)
ready_ph = array_ops.placeholder(
dtypes.bool, shape=(), name="ready_ph_{}".format(i))
ready_update = state_ops.assign(
ready_value, ready_ph, use_locking=True)
ctr["ready"]["value"].append(ready_value)
ctr["ready"]["ph"].append(ready_ph)
ctr["ready"]["update"].append(ready_update)
ctr["grouping_y_preds"], ctr["grouping_log_probs"] = self.get_groupings()
summary.histogram(
"grouping_actions",
array_ops.slice(ctr["grouping_y_preds"]["sample"], [0, 0],
[1, array_ops.shape(self.op_embeddings)[0]]))
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
ctr["baseline"] = variable_scope.get_local_variable(
"baseline",
initializer=failing_signal
if self.hparams.start_with_failing_signal else 0.0,
dtype=dtypes.float32,
trainable=False)
new_baseline = self.hparams.bl_dec * ctr["baseline"] + (
1 - self.hparams.bl_dec) * math_ops.reduce_mean(
ctr["reward"]["value"])
if not self.hparams.always_update_baseline:
baseline_mask = math_ops.less(ctr["reward"]["value"], failing_signal)
selected_reward = array_ops.boolean_mask(ctr["reward"]["value"],
baseline_mask)
selected_baseline = control_flow_ops.cond(
math_ops.reduce_any(baseline_mask),
lambda: math_ops.reduce_mean(selected_reward),
lambda: constant_op.constant(0, dtype=dtypes.float32))
ctr["pos_reward"] = selected_baseline
pos_ = math_ops.less(
constant_op.constant(0, dtype=dtypes.float32), selected_baseline)
selected_baseline = self.hparams.bl_dec * ctr["baseline"] + (
1 - self.hparams.bl_dec) * selected_baseline
selected_baseline = control_flow_ops.cond(
pos_, lambda: selected_baseline, lambda: ctr["baseline"])
new_baseline = control_flow_ops.cond(
math_ops.less(self.global_step,
self.hparams.stop_updating_after_steps),
lambda: new_baseline, lambda: selected_baseline)
ctr["baseline_update"] = state_ops.assign(
ctr["baseline"], new_baseline, use_locking=True)
ctr["y_preds"], ctr["log_probs"] = self.get_placements()
summary.histogram("actions", ctr["y_preds"]["sample"])
mask = math_ops.less(ctr["reward"]["value"], failing_signal)
ctr["loss"] = ctr["reward"]["value"] - ctr["baseline"]
ctr["loss"] *= (
ctr["log_probs"]["sample"] + ctr["grouping_log_probs"]["sample"])
selected_loss = array_ops.boolean_mask(ctr["loss"], mask)
selected_loss = control_flow_ops.cond(
math_ops.reduce_any(mask),
lambda: math_ops.reduce_mean(-selected_loss),
lambda: constant_op.constant(0, dtype=dtypes.float32))
ctr["loss"] = control_flow_ops.cond(
math_ops.less(self.global_step,
self.hparams.stop_updating_after_steps),
lambda: math_ops.reduce_mean(-ctr["loss"]), lambda: selected_loss)
ctr["reward_s"] = math_ops.reduce_mean(ctr["reward"]["value"])
summary.scalar("loss", ctr["loss"])
summary.scalar("avg_reward", ctr["reward_s"])
summary.scalar("best_reward_so_far", best_reward)
summary.scalar(
"advantage",
math_ops.reduce_mean(ctr["reward"]["value"] - ctr["baseline"]))
with variable_scope.variable_scope(
"optimizer", reuse=variable_scope.AUTO_REUSE):
(ctr["train_op"], ctr["lr"], ctr["grad_norm"],
ctr["grad_norms"]) = self._get_train_ops(
ctr["loss"],
tf_ops.get_collection(tf_ops.GraphKeys.TRAINABLE_VARIABLES),
self.global_step,
grad_bound=self.hparams.grad_bound,
lr_init=self.hparams.lr,
lr_dec=self.hparams.lr_dec,
start_decay_step=self.hparams.start_decay_step,
decay_steps=self.hparams.decay_steps,
optimizer_type=self.hparams.optimizer_type)
summary.scalar("gradnorm", ctr["grad_norm"])
summary.scalar("lr", ctr["lr"])
ctr["summary"] = summary.merge_all()
ops["controller"] = ctr
self.ops = ops
return ops
@property
def global_step(self):
return self._global_step
def create_op_embeddings(self, verbose=False):
if verbose:
print("process input graph for op embeddings")
self.num_ops = len(self.important_ops)
# topological sort of important nodes
topo_order = [op.name for op in self.important_ops]
# create index to name for topologicaly sorted important nodes
name_to_topo_order_index = {}
for idx, x in enumerate(topo_order):
name_to_topo_order_index[x] = idx
self.name_to_topo_order_index = name_to_topo_order_index
# create adj matrix
adj_dict = {}
for idx, op in enumerate(self.important_ops):
for output_op in self.get_node_fanout(op):
output_op_name = output_op.name
if output_op_name in self.important_op_names:
if name_to_topo_order_index[op.name] not in adj_dict:
adj_dict[name_to_topo_order_index[op.name]] = []
adj_dict[name_to_topo_order_index[op.name]].extend(
[name_to_topo_order_index[output_op_name], 1])
if output_op_name not in adj_dict:
adj_dict[name_to_topo_order_index[output_op_name]] = []
adj_dict[name_to_topo_order_index[output_op_name]].extend(
[name_to_topo_order_index[op.name], -1])
# get op_type op_output_shape, and adj info
output_embed_dim = (self.hparams.max_num_outputs *
self.hparams.max_output_size)
# TODO(bsteiner): don't filter based on used ops so that we can generalize
# to models that use other types of ops.
used_ops = set()
for node in self.important_ops:
op_type = str(node.op)
used_ops.add(op_type)
self.type_dict = {}
for op_type in self.cluster.ListAvailableOps():
if op_type in used_ops:
self.type_dict[op_type] = len(self.type_dict)
op_types = np.zeros([self.num_ops], dtype=np.int32)
op_output_shapes = np.full(
[self.num_ops, output_embed_dim], -1.0, dtype=np.float32)
for idx, node in enumerate(self.important_ops):
op_types[idx] = self.type_dict[node.op]
# output shape
op_name = node.name
for i, output_prop in enumerate(self.node_properties[op_name]):
if output_prop.shape.__str__() == "<unknown>":
continue
shape = output_prop.shape
for j, dim in enumerate(shape.dim):
if dim.size >= 0:
if i * self.hparams.max_output_size + j >= output_embed_dim:
break
op_output_shapes[idx,
i * self.hparams.max_output_size + j] = dim.size
# adj for padding
op_adj = np.full(
[self.num_ops, self.hparams.adj_embed_dim], 0, dtype=np.float32)
for idx in adj_dict:
neighbors = adj_dict[int(idx)]
min_dim = min(self.hparams.adj_embed_dim, len(neighbors))
padding_size = self.hparams.adj_embed_dim - min_dim
neighbors = neighbors[:min_dim] + [0] * padding_size
op_adj[int(idx)] = neighbors
# op_embedding starts here
op_embeddings = np.zeros(
[
self.num_ops,
1 + self.hparams.max_num_outputs * self.hparams.max_output_size +
self.hparams.adj_embed_dim
],
dtype=np.float32)
for idx, op_name in enumerate(topo_order):
op_embeddings[idx] = np.concatenate(
(np.array([op_types[idx]]), op_output_shapes[idx], op_adj[int(idx)]))
self.op_embeddings = constant_op.constant(
op_embeddings, dtype=dtypes.float32)
if verbose:
print("num_ops = {}".format(self.num_ops))
print("num_types = {}".format(len(self.type_dict)))
def get_groupings(self, *args, **kwargs):
num_children = self.hparams.num_children
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
grouping_actions_cache = variable_scope.get_local_variable(
"grouping_actions_cache",
initializer=init_ops.zeros_initializer,
dtype=dtypes.int32,
shape=[num_children, self.num_ops],
trainable=False)
input_layer = self.op_embeddings
input_layer = array_ops.expand_dims(input_layer, 0)
feed_ff_input_layer = array_ops.tile(input_layer, [num_children, 1, 1])
grouping_actions, grouping_log_probs = {}, {}
grouping_actions["sample"], grouping_log_probs[
"sample"] = self.make_grouping_predictions(feed_ff_input_layer)
grouping_actions["sample"] = state_ops.assign(grouping_actions_cache,
grouping_actions["sample"])
self.grouping_actions_cache = grouping_actions_cache
return grouping_actions, grouping_log_probs
def make_grouping_predictions(self, input_layer, reuse=None):
"""model that predicts grouping (grouping_actions).
Args:
input_layer: group_input_layer
reuse: reuse
Returns:
grouping_actions: actions
grouping_log_probs: log probabilities corresponding to actions
"""
with variable_scope.variable_scope(self.hparams.name, reuse=True):
# input_layer: tensor of size [1, num_ops, hidden_size]
w_grouping_ff = variable_scope.get_variable("w_grouping_ff")
w_grouping_softmax = variable_scope.get_variable("w_grouping_softmax")
batch_size = array_ops.shape(input_layer)[0]
embedding_dim = array_ops.shape(input_layer)[2]
reshaped = array_ops.reshape(input_layer,
[batch_size * self.num_ops, embedding_dim])
ff_output = math_ops.matmul(reshaped, w_grouping_ff)
logits = math_ops.matmul(ff_output, w_grouping_softmax)
if self.hparams.logits_std_noise > 0:
num_in_logits = math_ops.cast(
array_ops.size(logits), dtype=dtypes.float32)
avg_norm = math_ops.divide(
linalg_ops.norm(logits), math_ops.sqrt(num_in_logits))
logits_noise = random_ops.random_normal(
array_ops.shape(logits),
stddev=self.hparams.logits_std_noise * avg_norm)
logits = control_flow_ops.cond(
self.global_step > self.hparams.stop_noise_step, lambda: logits,
lambda: logits + logits_noise)
logits = array_ops.reshape(logits,
[batch_size * self.num_ops, self.num_groups])
actions = random_ops.multinomial(logits, 1, seed=self.hparams.seed)
actions = math_ops.to_int32(actions)
actions = array_ops.reshape(actions, [batch_size, self.num_ops])
action_label = array_ops.reshape(actions, [-1])
log_probs = nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=action_label)
log_probs = array_ops.reshape(log_probs, [batch_size, -1])
log_probs = math_ops.reduce_sum(log_probs, 1)
grouping_actions = actions
grouping_log_probs = log_probs
return grouping_actions, grouping_log_probs
def create_group_embeddings(self, grouping_actions, verbose=False):
"""Approximating the blocks of a TF graph from a graph_def.
Args:
grouping_actions: grouping predictions.
verbose: print stuffs.
Returns:
groups: list of groups.
"""
groups = [
self._create_group_embeddings(grouping_actions, i, verbose) for
i in range(self.hparams.num_children)
]
return np.stack(groups, axis=0)
def _create_group_embeddings(self, grouping_actions, child_id, verbose=False):
"""Approximating the blocks of a TF graph from a graph_def for each child.
Args:
grouping_actions: grouping predictions.
child_id: child_id for the group.
verbose: print stuffs.
Returns:
groups: group embedding for the child_id.
"""
if verbose:
print("Processing input_graph")
# TODO(azalia): Build inter-adjacencies dag matrix.
# record dag_matrix
dag_matrix = np.zeros([self.num_groups, self.num_groups], dtype=np.float32)
for op in self.important_ops:
topo_op_index = self.name_to_topo_order_index[op.name]
group_index = grouping_actions[child_id][topo_op_index]
for output_op in self.get_node_fanout(op):
if output_op.name not in self.important_op_names:
continue
output_group_index = (
grouping_actions[child_id][self.name_to_topo_order_index[
output_op.name]])
dag_matrix[group_index, output_group_index] += 1.0
num_connections = np.sum(dag_matrix)
num_intra_group_connections = dag_matrix.trace()
num_inter_group_connections = num_connections - num_intra_group_connections
if verbose:
print("grouping evaluation metric")
print(("num_connections={} num_intra_group_connections={} "
"num_inter_group_connections={}").format(
num_connections, num_intra_group_connections,
num_inter_group_connections))
self.dag_matrix = dag_matrix
# output_shape
op_output_shapes = np.zeros(
[
len(self.important_ops),
self.hparams.max_num_outputs * self.hparams.max_output_size
],
dtype=np.float32)
for idx, op in enumerate(self.important_ops):
for i, output_properties in enumerate(self.node_properties[op.name]):
if output_properties.shape.__str__() == "<unknown>":
continue
if i > self.hparams.max_num_outputs:
break
shape = output_properties.shape
for j, dim in enumerate(shape.dim):
if dim.size > 0:
k = i * self.hparams.max_output_size + j
if k >= self.hparams.max_num_outputs * self.hparams.max_output_size:
break
op_output_shapes[idx, k] = dim.size
# group_embedding
group_embedding = np.zeros(
[
self.num_groups, len(self.type_dict) +
self.hparams.max_num_outputs * self.hparams.max_output_size
],
dtype=np.float32)
for op_index, op in enumerate(self.important_ops):
group_index = grouping_actions[child_id][
self.name_to_topo_order_index[op.name]]
type_name = str(op.op)
type_index = self.type_dict[type_name]
group_embedding[group_index, type_index] += 1
group_embedding[group_index, :self.hparams.max_num_outputs * self.hparams.
max_output_size] += (
op_output_shapes[op_index])
grouping_adjacencies = np.concatenate(
[dag_matrix, np.transpose(dag_matrix)], axis=1)
group_embedding = np.concatenate(
[grouping_adjacencies, group_embedding], axis=1)
group_normalizer = np.amax(group_embedding, axis=1, keepdims=True)
group_embedding /= (group_normalizer + 1.0)
if verbose:
print("Finished Processing Input Graph")
return group_embedding
def get_placements(self, *args, **kwargs):
num_children = self.hparams.num_children
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
actions_cache = variable_scope.get_local_variable(
"actions_cache",
initializer=init_ops.zeros_initializer,
dtype=dtypes.int32,
shape=[num_children, self.num_groups],
trainable=False)
x = self.seq2seq_input_layer
last_c, last_h, attn_mem = self.encode(x)
actions, log_probs = {}, {}
actions["sample"], log_probs["sample"] = (
self.decode(
x, last_c, last_h, attn_mem, mode="sample"))
actions["target"], log_probs["target"] = (
self.decode(
x,
last_c,
last_h,
attn_mem,
mode="target",
y=actions_cache))
actions["greedy"], log_probs["greedy"] = (
self.decode(
x, last_c, last_h, attn_mem, mode="greedy"))
actions["sample"] = control_flow_ops.cond(
self.global_step < self.hparams.stop_sampling,
lambda: state_ops.assign(actions_cache, actions["sample"]),
lambda: state_ops.assign(actions_cache, actions["target"]))
self.actions_cache = actions_cache
return actions, log_probs
def encode(self, x):
"""Encoder using LSTM.
Args:
x: tensor of size [num_children, num_groups, embedding_size]
Returns:
last_c, last_h: tensors of size [num_children, hidden_size], the final
LSTM states
attn_mem: tensor of size [num_children, num_groups, hidden_size], the
attention
memory, i.e. concatenation of all hidden states, linearly transformed by
an attention matrix attn_w_1
"""
if self.hparams.bi_lstm:
with variable_scope.variable_scope(self.hparams.name, reuse=True):
w_lstm_forward = variable_scope.get_variable("encoder_lstm_forward")
w_lstm_backward = variable_scope.get_variable("encoder_lstm_backward")
forget_bias = variable_scope.get_variable("encoder_forget_bias")
attn_w_1 = variable_scope.get_variable("attn_w_1")
else:
with variable_scope.variable_scope(self.hparams.name, reuse=True):
w_lstm = variable_scope.get_variable("encoder_lstm")
forget_bias = variable_scope.get_variable("encoder_forget_bias")
attn_w_1 = variable_scope.get_variable("attn_w_1")
embedding_size = array_ops.shape(x)[2]
signals = array_ops.split(x, self.num_groups, axis=1)
for i in range(len(signals)):
signals[i] = array_ops.reshape(
signals[i], [self.hparams.num_children, embedding_size])
if self.hparams.bi_lstm:
def body(i, prev_c_forward, prev_h_forward, prev_c_backward,
prev_h_backward):
"""while loop for LSTM."""
signal_forward = signals[i]
next_c_forward, next_h_forward = lstm(signal_forward, prev_c_forward,
prev_h_forward, w_lstm_forward,
forget_bias)
signal_backward = signals[self.num_groups - 1 - i]
next_c_backward, next_h_backward = lstm(
signal_backward, prev_c_backward, prev_h_backward, w_lstm_backward,
forget_bias)
next_h = array_ops.concat([next_h_forward, next_h_backward], axis=1)
all_h.append(next_h)
return (next_c_forward, next_h_forward, next_c_backward,
next_h_backward)
c_forward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
h_forward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
c_backward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
h_backward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
all_h = []
for i in range(0, self.num_groups):
c_forward, h_forward, c_backward, h_backward = body(
i, c_forward, h_forward, c_backward, h_backward)
last_c = array_ops.concat([c_forward, c_backward], axis=1)
last_h = array_ops.concat([h_forward, h_backward], axis=1)
attn_mem = array_ops.stack(all_h)
else:
def body(i, prev_c, prev_h):
signal = signals[i]
next_c, next_h = lstm(signal, prev_c, prev_h, w_lstm, forget_bias)
all_h.append(next_h)
return next_c, next_h
c = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size],
dtype=dtypes.float32)
h = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size],
dtype=dtypes.float32)
all_h = []
for i in range(0, self.num_groups):
c, h = body(i, c, h)
last_c = c
last_h = h
attn_mem = array_ops.stack(all_h)
attn_mem = array_ops.transpose(attn_mem, [1, 0, 2])
attn_mem = array_ops.reshape(
attn_mem,
[self.hparams.num_children * self.num_groups, self.hparams.hidden_size])
attn_mem = math_ops.matmul(attn_mem, attn_w_1)
attn_mem = array_ops.reshape(
attn_mem,
[self.hparams.num_children, self.num_groups, self.hparams.hidden_size])
return last_c, last_h, attn_mem
def decode(self,
x,
last_c,
last_h,
attn_mem,
mode="target",
y=None):
"""Decoder using LSTM.
Args:
x: tensor of size [num_children, num_groups, embedding_size].
last_c: tensor of size [num_children, hidden_size], the final LSTM states
computed by self.encoder.
last_h: same as last_c.
attn_mem: tensor of size [num_children, num_groups, hidden_size].
mode: "target" or "sample".
y: tensor of size [num_children, num_groups], the device placements.
Returns:
actions: tensor of size [num_children, num_groups], the placements of
devices
"""
with variable_scope.variable_scope(self.hparams.name, reuse=True):
w_lstm = variable_scope.get_variable("decoder_lstm")
forget_bias = variable_scope.get_variable("decoder_forget_bias")
device_embeddings = variable_scope.get_variable("device_embeddings")
device_softmax = variable_scope.get_variable("device_softmax")
device_go_embedding = variable_scope.get_variable("device_go_embedding")
attn_w_2 = variable_scope.get_variable("attn_w_2")
attn_v = variable_scope.get_variable("attn_v")
actions = tensor_array_ops.TensorArray(
dtypes.int32,
size=self.num_groups,
infer_shape=False,
clear_after_read=False)
# pylint: disable=unused-argument
def condition(i, *args):
return math_ops.less(i, self.num_groups)
# pylint: disable=missing-docstring
def body(i, prev_c, prev_h, actions, log_probs):
# pylint: disable=g-long-lambda
signal = control_flow_ops.cond(
math_ops.equal(i, 0),
lambda: array_ops.tile(device_go_embedding,
[self.hparams.num_children, 1]),
lambda: embedding_ops.embedding_lookup(device_embeddings,
actions.read(i - 1))
)
if self.hparams.keep_prob is not None:
signal = nn_ops.dropout(signal, self.hparams.keep_prob)
next_c, next_h = lstm(signal, prev_c, prev_h, w_lstm, forget_bias)
query = math_ops.matmul(next_h, attn_w_2)
query = array_ops.reshape(
query, [self.hparams.num_children, 1, self.hparams.hidden_size])
query = math_ops.tanh(query + attn_mem)
query = array_ops.reshape(query, [
self.hparams.num_children * self.num_groups, self.hparams.hidden_size
])
query = math_ops.matmul(query, attn_v)
query = array_ops.reshape(query,
[self.hparams.num_children, self.num_groups])
query = nn_ops.softmax(query)
query = array_ops.reshape(query,
[self.hparams.num_children, self.num_groups, 1])
query = math_ops.reduce_sum(attn_mem * query, axis=1)
query = array_ops.concat([next_h, query], axis=1)
logits = math_ops.matmul(query, device_softmax)
logits /= self.hparams.temperature
if self.hparams.tanh_constant > 0:
logits = math_ops.tanh(logits) * self.hparams.tanh_constant
if self.hparams.logits_std_noise > 0:
num_in_logits = math_ops.cast(
array_ops.size(logits), dtype=dtypes.float32)
avg_norm = math_ops.divide(
linalg_ops.norm(logits), math_ops.sqrt(num_in_logits))
logits_noise = random_ops.random_normal(
array_ops.shape(logits),
stddev=self.hparams.logits_std_noise * avg_norm)
logits = control_flow_ops.cond(
self.global_step > self.hparams.stop_noise_step, lambda: logits,
lambda: logits + logits_noise)
if mode == "sample":
next_y = random_ops.multinomial(logits, 1, seed=self.hparams.seed)
elif mode == "greedy":
next_y = math_ops.argmax(logits, 1)
elif mode == "target":
next_y = array_ops.slice(y, [0, i], [-1, 1])
else:
raise NotImplementedError
next_y = math_ops.to_int32(next_y)
next_y = array_ops.reshape(next_y, [self.hparams.num_children])
actions = actions.write(i, next_y)
log_probs += nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=next_y)
return i + 1, next_c, next_h, actions, log_probs
loop_vars = [
constant_op.constant(0, dtype=dtypes.int32), last_c, last_h, actions,
array_ops.zeros([self.hparams.num_children], dtype=dtypes.float32)
]
loop_outputs = control_flow_ops.while_loop(condition, body, loop_vars)
last_c = loop_outputs[-4]
last_h = loop_outputs[-3]
actions = loop_outputs[-2].stack()
actions = array_ops.transpose(actions, [1, 0])
log_probs = loop_outputs[-1]
return actions, log_probs
def eval_placement(self,
sess,
child_id=0,
verbose=False):
grouping_actions, actions = sess.run([
self.grouping_actions_cache,
self.actions_cache
])
grouping_actions = grouping_actions[child_id]
actions = actions[child_id]
if verbose:
global_step = sess.run(self.global_step)
if global_step % 100 == 0:
log_string = "op group assignments: "
for a in grouping_actions:
log_string += "{} ".format(a)
print(log_string[:-1])
log_string = "group device assignments: "
for a in actions:
log_string += "{} ".format(a)
print(log_string[:-1])
for op in self.important_ops:
topo_order_index = self.name_to_topo_order_index[op.name]
group_index = grouping_actions[topo_order_index]
op.device = self.devices[actions[group_index]].name
try:
_, run_time, _ = self.cluster.MeasureCosts(self.item)
except errors.ResourceExhaustedError:
run_time = self.hparams.failing_signal
return run_time
def update_reward(self,
sess,
run_time,
child_id=0,
verbose=False):
reward = self.compute_reward(run_time)
controller_ops = self.ops["controller"]
_, best_reward = sess.run(
[
controller_ops["reward"]["update"][child_id],
controller_ops["best_reward"]["update"][child_id]
],
feed_dict={
controller_ops["reward"]["ph"][child_id]: reward,
})
if verbose:
print(("run_time={:<.5f} reward={:<.5f} "
"best_reward={:<.5f}").format(run_time, reward, best_reward))
# Reward is a double, best_reward a float: allow for some slack in the
# comparison.
updated = abs(best_reward - reward) < 1e-6
return updated
def generate_grouping(self, sess):
controller_ops = self.ops["controller"]
grouping_actions = sess.run(controller_ops["grouping_y_preds"]["sample"])
return grouping_actions
def generate_placement(self, grouping, sess):
controller_ops = self.ops["controller"]
feed_seq2seq_input_dict = {}
feed_seq2seq_input_dict[self.seq2seq_input_layer] = grouping
sess.run(
controller_ops["y_preds"]["sample"], feed_dict=feed_seq2seq_input_dict)
def process_reward(self, sess):
controller_ops = self.ops["controller"]
run_ops = [
controller_ops["loss"], controller_ops["lr"],
controller_ops["grad_norm"], controller_ops["grad_norms"],
controller_ops["train_op"]
]
sess.run(run_ops)
sess.run(controller_ops["baseline_update"])
def _get_train_ops(self,
loss,
tf_variables,
global_step,
grad_bound=1.25,
lr_init=1e-3,
lr_dec=0.9,
start_decay_step=10000,
decay_steps=100,
optimizer_type="adam"):
"""Loss optimizer.
Args:
loss: scalar tf tensor
tf_variables: list of training variables, typically
tf.trainable_variables()
global_step: global_step
grad_bound: max gradient norm
lr_init: initial learning rate
lr_dec: leaning rate decay coefficient
start_decay_step: start decaying learning rate after this many steps
decay_steps: apply decay rate factor at this step intervals
optimizer_type: optimizer type should be either adam or sgd
Returns:
train_op: training op
learning_rate: scalar learning rate tensor
grad_norm: l2 norm of the gradient vector
all_grad_norms: l2 norm of each component
"""
lr_gstep = global_step - start_decay_step
def f1():
return constant_op.constant(lr_init)
def f2():
return learning_rate_decay.exponential_decay(lr_init, lr_gstep,
decay_steps, lr_dec, True)
learning_rate = control_flow_ops.cond(
math_ops.less(global_step, start_decay_step),
f1,
f2,
name="learning_rate")
if optimizer_type == "adam":
opt = adam.AdamOptimizer(learning_rate)
elif optimizer_type == "sgd":
opt = gradient_descent.GradientDescentOptimizer(learning_rate)
grads_and_vars = opt.compute_gradients(loss, tf_variables)
grad_norm = clip_ops.global_norm([g for g, v in grads_and_vars])
all_grad_norms = {}
clipped_grads = []
clipped_rate = math_ops.maximum(grad_norm / grad_bound, 1.0)
for g, v in grads_and_vars:
if g is not None:
if isinstance(g, tf_ops.IndexedSlices):
clipped = g.values / clipped_rate
norm_square = math_ops.reduce_sum(clipped * clipped)
clipped = tf_ops.IndexedSlices(clipped, g.indices)
else:
clipped = g / clipped_rate
norm_square = math_ops.reduce_sum(clipped * clipped)
all_grad_norms[v.name] = math_ops.sqrt(norm_square)
clipped_grads.append((clipped, v))
train_op = opt.apply_gradients(clipped_grads, global_step)
return train_op, learning_rate, grad_norm, all_grad_norms
def lstm(x, prev_c, prev_h, w_lstm, forget_bias):
"""LSTM cell.
Args:
x: tensors of size [num_children, hidden_size].
prev_c: tensors of size [num_children, hidden_size].
prev_h: same as prev_c.
w_lstm: .
forget_bias: .
Returns:
next_c:
next_h:
"""
ifog = math_ops.matmul(array_ops.concat([x, prev_h], axis=1), w_lstm)
i, f, o, g = array_ops.split(ifog, 4, axis=1)
i = math_ops.sigmoid(i)
f = math_ops.sigmoid(f + forget_bias)
o = math_ops.sigmoid(o)
g = math_ops.tanh(g)
next_c = i * g + f * prev_c
next_h = o * math_ops.tanh(next_c)
return next_c, next_h
|
nburn42/tensorflow
|
tensorflow/python/grappler/hierarchical_controller.py
|
Python
|
apache-2.0
| 43,598
|
# (c) 2014, James Tanner <tanner.jc@gmail.com>
# (c) 2014, James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import tempfile
from io import BytesIO, StringIO
import pytest
from units.compat import unittest
from units.compat.mock import patch
from ansible import errors
from ansible.parsing import vault
from ansible.parsing.vault import VaultLib, VaultEditor, match_encrypt_secret
from ansible.module_utils.six import PY3
from ansible.module_utils._text import to_bytes, to_text
from units.mock.vault_helper import TextVaultSecret
v11_data = """$ANSIBLE_VAULT;1.1;AES256
62303130653266653331306264616235333735323636616539316433666463323964623162386137
3961616263373033353631316333623566303532663065310a393036623466376263393961326530
64336561613965383835646464623865663966323464653236343638373165343863623638316664
3631633031323837340a396530313963373030343933616133393566366137363761373930663833
3739"""
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultEditor(unittest.TestCase):
def setUp(self):
self._test_dir = None
self.vault_password = "test-vault-password"
vault_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('vault_secret', vault_secret),
('default', vault_secret)]
@property
def vault_secret(self):
return match_encrypt_secret(self.vault_secrets)[1]
def tearDown(self):
if self._test_dir:
pass
# shutil.rmtree(self._test_dir)
self._test_dir = None
def _secrets(self, password):
vault_secret = TextVaultSecret(password)
vault_secrets = [('default', vault_secret)]
return vault_secrets
def test_methods_exist(self):
v = vault.VaultEditor(None)
slots = ['create_file',
'decrypt_file',
'edit_file',
'encrypt_file',
'rekey_file',
'read_data',
'write_data']
for slot in slots:
assert hasattr(v, slot), "VaultLib is missing the %s method" % slot
def _create_test_dir(self):
suffix = '_ansible_unit_test_%s_' % (self.__class__.__name__)
return tempfile.mkdtemp(suffix=suffix)
def _create_file(self, test_dir, name, content=None, symlink=False):
file_path = os.path.join(test_dir, name)
opened_file = open(file_path, 'wb')
if content:
opened_file.write(content)
opened_file.close()
return file_path
def _vault_editor(self, vault_secrets=None):
if vault_secrets is None:
vault_secrets = self._secrets(self.vault_password)
return VaultEditor(VaultLib(vault_secrets))
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_empty_target(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
mock_sp_call.side_effect = self._faux_command
ve = self._vault_editor()
b_ciphertext = ve._edit_file_helper(src_file_path, self.vault_secret)
self.assertNotEqual(src_contents, b_ciphertext)
def test_stdin_binary(self):
stdin_data = '\0'
if PY3:
fake_stream = StringIO(stdin_data)
fake_stream.buffer = BytesIO(to_bytes(stdin_data))
else:
fake_stream = BytesIO(to_bytes(stdin_data))
with patch('sys.stdin', fake_stream):
ve = self._vault_editor()
data = ve.read_data('-')
self.assertEqual(data, b'\0')
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_call_exception(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
error_txt = 'calling editor raised an exception'
mock_sp_call.side_effect = errors.AnsibleError(error_txt)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
error_txt,
ve._edit_file_helper,
src_file_path,
self.vault_secret)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_symlink_target(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
mock_sp_call.side_effect = self._faux_command
ve = self._vault_editor()
b_ciphertext = ve._edit_file_helper(src_file_link_path, self.vault_secret)
self.assertNotEqual(src_file_contents, b_ciphertext,
'b_ciphertext should be encrypted and not equal to src_contents')
def _faux_editor(self, editor_args, new_src_contents=None):
if editor_args[0] == 'shred':
return
tmp_path = editor_args[-1]
# simulate the tmp file being editted
tmp_file = open(tmp_path, 'wb')
if new_src_contents:
tmp_file.write(new_src_contents)
tmp_file.close()
def _faux_command(self, tmp_path):
pass
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_no_change(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
# editor invocation doesn't change anything
def faux_editor(editor_args):
self._faux_editor(editor_args, src_file_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve._edit_file_helper(src_file_path, self.vault_secret, existing_data=src_file_contents)
new_target_file = open(src_file_path, 'rb')
new_target_file_contents = new_target_file.read()
self.assertEqual(src_file_contents, new_target_file_contents)
def _assert_file_is_encrypted(self, vault_editor, src_file_path, src_contents):
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
# TODO: assert that it is encrypted
self.assertTrue(vault.is_encrypted(new_src_file_contents))
src_file_plaintext = vault_editor.vault.decrypt(new_src_file_contents)
# the plaintext should not be encrypted
self.assertFalse(vault.is_encrypted(src_file_plaintext))
# and the new plaintext should match the original
self.assertEqual(src_file_plaintext, src_contents)
def _assert_file_is_link(self, src_file_link_path, src_file_path):
self.assertTrue(os.path.islink(src_file_link_path),
'The dest path (%s) should be a symlink to (%s) but is not' % (src_file_link_path, src_file_path))
def test_rekey_file(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
# FIXME: update to just set self._secrets or just a new vault secret id
new_password = 'password2:electricbugaloo'
new_vault_secret = TextVaultSecret(new_password)
new_vault_secrets = [('default', new_vault_secret)]
ve.rekey_file(src_file_path, vault.match_encrypt_secret(new_vault_secrets)[1])
# FIXME: can just update self._secrets here
new_ve = vault.VaultEditor(VaultLib(new_vault_secrets))
self._assert_file_is_encrypted(new_ve, src_file_path, src_file_contents)
def test_rekey_file_no_new_password(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
self.assertRaisesRegexp(errors.AnsibleError,
'The value for the new_password to rekey',
ve.rekey_file,
src_file_path,
None)
def test_rekey_file_not_encrypted(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
new_password = 'password2:electricbugaloo'
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.rekey_file,
src_file_path, new_password)
def test_plaintext(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
res = ve.plaintext(src_file_path)
self.assertEqual(src_file_contents, res)
def test_plaintext_not_encrypted(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.plaintext,
src_file_path)
def test_encrypt_file(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
self._assert_file_is_encrypted(ve, src_file_path, src_file_contents)
def test_encrypt_file_symlink(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
ve = self._vault_editor()
ve.encrypt_file(src_file_link_path, self.vault_secret)
self._assert_file_is_encrypted(ve, src_file_path, src_file_contents)
self._assert_file_is_encrypted(ve, src_file_link_path, src_file_contents)
self._assert_file_is_link(src_file_link_path, src_file_path)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_no_vault_id(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
ve.edit_file(src_file_path)
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.1;AES256' in new_src_file_contents)
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self.assertEqual(src_file_plaintext, new_src_contents)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_with_vault_id(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret,
vault_id='vault_secrets')
ve.edit_file(src_file_path)
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.2;AES256;vault_secrets' in new_src_file_contents)
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self.assertEqual(src_file_plaintext, new_src_contents)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_symlink(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
ve.edit_file(src_file_link_path)
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self._assert_file_is_link(src_file_link_path, src_file_path)
self.assertEqual(src_file_plaintext, new_src_contents)
# self.assertEqual(src_file_plaintext, new_src_contents,
# 'The decrypted plaintext of the editted file is not the expected contents.')
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_not_encrypted(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.edit_file,
src_file_path)
def test_create_file_exists(self):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'please use .edit. instead',
ve.create_file,
src_file_path,
self.vault_secret)
def test_decrypt_file_exception(self):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.decrypt_file,
src_file_path)
@patch.object(vault.VaultEditor, '_editor_shell_command')
def test_create_file(self, mock_editor_shell_command):
def sc_side_effect(filename):
return ['touch', filename]
mock_editor_shell_command.side_effect = sc_side_effect
tmp_file = tempfile.NamedTemporaryFile()
os.unlink(tmp_file.name)
_secrets = self._secrets('ansible')
ve = self._vault_editor(_secrets)
ve.create_file(tmp_file.name, vault.match_encrypt_secret(_secrets)[1])
self.assertTrue(os.path.exists(tmp_file.name))
def test_decrypt_1_1(self):
v11_file = tempfile.NamedTemporaryFile(delete=False)
with v11_file as f:
f.write(to_bytes(v11_data))
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
error_hit = False
try:
ve.decrypt_file(v11_file.name)
except errors.AnsibleError:
error_hit = True
# verify decrypted content
f = open(v11_file.name, "rb")
fdata = to_text(f.read())
f.close()
os.unlink(v11_file.name)
assert error_hit is False, "error decrypting 1.1 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_real_path_dash(self):
filename = '-'
ve = self._vault_editor()
res = ve._real_path(filename)
self.assertEqual(res, '-')
def test_real_path_dev_null(self):
filename = '/dev/null'
ve = self._vault_editor()
res = ve._real_path(filename)
self.assertEqual(res, '/dev/null')
def test_real_path_symlink(self):
self._test_dir = os.path.realpath(self._create_test_dir())
file_path = self._create_file(self._test_dir, 'test_file', content=b'this is a test file')
file_link_path = os.path.join(self._test_dir, 'a_link_to_test_file')
os.symlink(file_path, file_link_path)
ve = self._vault_editor()
res = ve._real_path(file_link_path)
self.assertEqual(res, file_path)
|
nitzmahone/ansible
|
test/units/parsing/vault/test_vault_editor.py
|
Python
|
gpl-3.0
| 19,535
|
# -*- coding: utf8 -*-
from __future__ import print_function
import ast
import re
import warnings
import yaml # use yaml instead of json to get non unicode (works with ascii only data)
from rlp.utils import decode_hex, encode_hex
from ethereum import utils
from ethereum.utils import (
big_endian_to_int, ceil32, int_to_big_endian, encode_int, is_numeric, isnumeric, is_string,
rzpad, TT255, TT256, zpad,
)
# The number of bytes is encoded as a uint256
# Type used to encode a string/bytes length
INT256 = 'uint', '256', []
lentyp = INT256 # pylint: disable=invalid-name
class EncodingError(Exception):
pass
class ValueOutOfBounds(EncodingError):
pass
def json_decode(data):
return yaml.safe_load(data)
def split32(data):
""" Split data into pieces of 32 bytes. """
all_pieces = []
for position in range(0, len(data), 32):
piece = data[position:position + 32]
all_pieces.append(piece)
return all_pieces
def _canonical_type(name): # pylint: disable=too-many-return-statements
""" Replace aliases to the corresponding type to compute the ids. """
if name == 'int':
return 'int256'
if name == 'uint':
return 'uint256'
if name == 'fixed':
return 'fixed128x128'
if name == 'ufixed':
return 'ufixed128x128'
if name.startswith('int['):
return 'int256' + name[3:]
if name.startswith('uint['):
return 'uint256' + name[4:]
if name.startswith('fixed['):
return 'fixed128x128' + name[5:]
if name.startswith('ufixed['):
return 'ufixed128x128' + name[6:]
return name
def normalize_name(name):
""" Return normalized event/function name. """
if '(' in name:
return name[:name.find('(')]
return name
def method_id(name, encode_types):
""" Return the unique method id.
The signature is defined as the canonical expression of the basic
prototype, i.e. the function name with the parenthesised list of parameter
types. Parameter types are split by a single comma - no spaces are used.
The method id is defined as the first four bytes (left, high-order in
big-endian) of the Keccak (SHA-3) hash of the signature of the function.
"""
function_types = [
_canonical_type(type_)
for type_ in encode_types
]
function_signature = '{function_name}({canonical_types})'.format(
function_name=name,
canonical_types=','.join(function_types),
)
function_keccak = utils.sha3(function_signature)
first_bytes = function_keccak[:4]
return big_endian_to_int(first_bytes)
def event_id(name, encode_types):
""" Return the event id.
Defined as:
`keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")")`
Where `canonical_type_of` is a function that simply returns the canonical
type of a given argument, e.g. for uint indexed foo, it would return
uint256). Note the lack of spaces.
"""
event_types = [
_canonical_type(type_)
for type_ in encode_types
]
event_signature = '{event_name}({canonical_types})'.format(
event_name=name,
canonical_types=','.join(event_types),
)
return big_endian_to_int(utils.sha3(event_signature))
def decint(n, signed=False): # pylint: disable=invalid-name,too-many-branches
''' Decode an unsigned/signed integer. '''
if isinstance(n, str):
n = utils.to_string(n)
if n is True:
return 1
if n is False:
return 0
if n is None:
return 0
if is_numeric(n):
if signed:
if not -TT255 <= n <= TT255 - 1:
raise EncodingError('Number out of range: %r' % n)
else:
if not 0 <= n <= TT256 - 1:
raise EncodingError('Number out of range: %r' % n)
return n
if is_string(n):
if len(n) > 32:
raise EncodingError('String too long: %r' % n)
if len(n) == 40:
int_bigendian = decode_hex(n)
else:
int_bigendian = n # pylint: disable=redefined-variable-type
result = big_endian_to_int(int_bigendian)
if signed:
if result >= TT255:
result -= TT256
if not -TT255 <= result <= TT255 - 1:
raise EncodingError('Number out of range: %r' % n)
else:
if not 0 <= result <= TT256 - 1:
raise EncodingError('Number out of range: %r' % n)
return result
raise EncodingError('Cannot decode integer: %r' % n)
def encode_single(typ, arg): # pylint: disable=too-many-return-statements,too-many-branches,too-many-statements,too-many-locals
''' Encode `arg` as `typ`.
`arg` will be encoded in a best effort manner, were necessary the function
will try to correctly define the underlying binary representation (ie.
decoding a hex-encoded address/hash).
Args:
typ (Tuple[(str, int, list)]): A 3-tuple defining the `arg` type.
The first element defines the type name.
The second element defines the type length in bits.
The third element defines if it's an array type.
Together the first and second defines the elementary type, the third
element must be present but is ignored.
Valid type names are:
- uint
- int
- bool
- ufixed
- fixed
- string
- bytes
- hash
- address
arg (object): The object to be encoded, it must be a python object
compatible with the `typ`.
Raises:
ValueError: when an invalid `typ` is supplied.
ValueOutOfBounds: when `arg` cannot be encoded as `typ` because of the
binary contraints.
Note:
This function don't work with array types, for that use the `enc`
function.
'''
base, sub, _ = typ
if base == 'uint':
sub = int(sub)
if not (0 < sub <= 256 and sub % 8 == 0):
raise ValueError('invalid unsigned integer bit length {}'.format(sub))
try:
i = decint(arg, signed=False)
except EncodingError:
# arg is larger than 2**256
raise ValueOutOfBounds(repr(arg))
if not 0 <= i < 2 ** sub:
raise ValueOutOfBounds(repr(arg))
value_encoded = int_to_big_endian(i)
return zpad(value_encoded, 32)
if base == 'int':
sub = int(sub)
bits = sub - 1
if not (0 < sub <= 256 and sub % 8 == 0):
raise ValueError('invalid integer bit length {}'.format(sub))
try:
i = decint(arg, signed=True)
except EncodingError:
# arg is larger than 2**255
raise ValueOutOfBounds(repr(arg))
if not -2 ** bits <= i < 2 ** bits:
raise ValueOutOfBounds(repr(arg))
value = i % 2 ** sub # convert negative to "equivalent" positive
value_encoded = int_to_big_endian(value)
return zpad(value_encoded, 32)
if base == 'bool':
if arg is True:
value_encoded = int_to_big_endian(1)
elif arg is False:
value_encoded = int_to_big_endian(0)
else:
raise ValueError('%r is not bool' % arg)
return zpad(value_encoded, 32)
if base == 'ufixed':
sub = str(sub) # pylint: disable=redefined-variable-type
high_str, low_str = sub.split('x')
high = int(high_str)
low = int(low_str)
if not (0 < high + low <= 256 and high % 8 == 0 and low % 8 == 0):
raise ValueError('invalid unsigned fixed length {}'.format(sub))
if not 0 <= arg < 2 ** high:
raise ValueOutOfBounds(repr(arg))
float_point = arg * 2 ** low
fixed_point = int(float_point)
return zpad(int_to_big_endian(fixed_point), 32)
if base == 'fixed':
sub = str(sub) # pylint: disable=redefined-variable-type
high_str, low_str = sub.split('x')
high = int(high_str)
low = int(low_str)
bits = high - 1
if not (0 < high + low <= 256 and high % 8 == 0 and low % 8 == 0):
raise ValueError('invalid unsigned fixed length {}'.format(sub))
if not -2 ** bits <= arg < 2 ** bits:
raise ValueOutOfBounds(repr(arg))
float_point = arg * 2 ** low
fixed_point = int(float_point)
value = fixed_point % 2 ** 256
return zpad(int_to_big_endian(value), 32)
if base == 'string':
if isinstance(arg, utils.unicode):
arg = arg.encode('utf8')
else:
try:
arg.decode('utf8')
except UnicodeDecodeError:
raise ValueError('string must be utf8 encoded')
if len(sub): # fixed length
if not 0 <= len(arg) <= int(sub):
raise ValueError('invalid string length {}'.format(sub))
if not 0 <= int(sub) <= 32:
raise ValueError('invalid string length {}'.format(sub))
return rzpad(arg, 32)
if not 0 <= len(arg) < TT256:
raise Exception('Integer invalid or out of range: %r' % arg)
length_encoded = zpad(int_to_big_endian(len(arg)), 32)
value_encoded = rzpad(arg, utils.ceil32(len(arg)))
return length_encoded + value_encoded
if base == 'bytes':
if not is_string(arg):
raise EncodingError('Expecting string: %r' % arg)
arg = utils.to_string(arg) # py2: force unicode into str
if len(sub): # fixed length
if not 0 <= len(arg) <= int(sub):
raise ValueError('string must be utf8 encoded')
if not 0 <= int(sub) <= 32:
raise ValueError('string must be utf8 encoded')
return rzpad(arg, 32)
if not 0 <= len(arg) < TT256:
raise Exception('Integer invalid or out of range: %r' % arg)
length_encoded = zpad(int_to_big_endian(len(arg)), 32)
value_encoded = rzpad(arg, utils.ceil32(len(arg)))
return length_encoded + value_encoded
if base == 'hash':
if not (int(sub) and int(sub) <= 32):
raise EncodingError('too long: %r' % arg)
if isnumeric(arg):
return zpad(encode_int(arg), 32)
if len(arg) == int(sub):
return zpad(arg, 32)
if len(arg) == int(sub) * 2:
return zpad(decode_hex(arg), 32)
raise EncodingError('Could not parse hash: %r' % arg)
if base == 'address':
assert sub == ''
if isnumeric(arg):
return zpad(encode_int(arg), 32)
if len(arg) == 20:
return zpad(arg, 32)
if len(arg) == 40:
return zpad(decode_hex(arg), 32)
if len(arg) == 42 and arg[:2] == '0x':
return zpad(decode_hex(arg[2:]), 32)
raise EncodingError('Could not parse address: %r' % arg)
raise EncodingError('Unhandled type: %r %r' % (base, sub))
class ContractTranslator(object):
def __init__(self, contract_interface):
if is_string(contract_interface):
contract_interface = json_decode(contract_interface)
self.fallback_data = None
self.constructor_data = None
self.function_data = {}
self.event_data = {}
for description in contract_interface:
entry_type = description.get('type', 'function')
encode_types = []
signature = []
# If it's a function/constructor/event
if entry_type != 'fallback' and 'inputs' in description:
encode_types = [
element['type']
for element in description.get('inputs')
]
signature = [
(element['type'], element['name'])
for element in description.get('inputs')
]
if entry_type == 'function':
normalized_name = normalize_name(description['name'])
decode_types = [
element['type']
for element in description['outputs']
]
self.function_data[normalized_name] = {
'prefix': method_id(normalized_name, encode_types),
'encode_types': encode_types,
'decode_types': decode_types,
'is_constant': description.get('constant', False),
'signature': signature,
'payable': description.get('payable', False),
}
elif entry_type == 'event':
normalized_name = normalize_name(description['name'])
indexed = [
element['indexed']
for element in description['inputs']
]
names = [
element['name']
for element in description['inputs']
]
# event_id == topics[0]
self.event_data[event_id(normalized_name, encode_types)] = {
'types': encode_types,
'name': normalized_name,
'names': names,
'indexed': indexed,
'anonymous': description.get('anonymous', False),
}
elif entry_type == 'constructor':
if self.constructor_data is not None:
raise ValueError('Only one constructor is supported.')
self.constructor_data = {
'encode_types': encode_types,
'signature': signature,
}
elif entry_type == 'fallback':
if self.fallback_data is not None:
raise ValueError('Only one fallback function is supported.')
self.fallback_data = {'payable': description['payable']}
else:
raise ValueError('Unknown type {}'.format(description['type']))
def encode(self, function_name, args):
warnings.warn('encode is deprecated, please use encode_function_call', DeprecationWarning)
return self.encode_function_call(function_name, args)
def decode(self, function_name, data):
warnings.warn('decode is deprecated, please use decode_function_result', DeprecationWarning)
return self.decode_function_result(function_name, data)
def encode_function_call(self, function_name, args):
""" Return the encoded function call.
Args:
function_name (str): One of the existing functions described in the
contract interface.
args (List[object]): The function arguments that wll be encoded and
used in the contract execution in the vm.
Return:
bin: The encoded function name and arguments so that it can be used
with the evm to execute a funcion call, the binary string follows
the Ethereum Contract ABI.
"""
if function_name not in self.function_data:
raise ValueError('Unkown function {}'.format(function_name))
description = self.function_data[function_name]
function_selector = zpad(encode_int(description['prefix']), 4)
arguments = encode_abi(description['encode_types'], args)
return function_selector + arguments
def decode_function_result(self, function_name, data):
""" Return the function call result decoded.
Args:
function_name (str): One of the existing functions described in the
contract interface.
data (bin): The encoded result from calling `function_name`.
Return:
List[object]: The values returned by the call to `function_name`.
"""
description = self.function_data[function_name]
arguments = decode_abi(description['decode_types'], data)
return arguments
def encode_constructor_arguments(self, args):
""" Return the encoded constructor call. """
if self.constructor_data is None:
raise ValueError("The contract interface didn't have a constructor")
return encode_abi(self.constructor_data['encode_types'], args)
def decode_event(self, log_topics, log_data):
""" Return a dictionary representation the log.
Note:
This function won't work with anonymous events.
Args:
log_topics (List[bin]): The log's indexed arguments.
log_data (bin): The encoded non-indexed arguments.
"""
# https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI#function-selector-and-argument-encoding
# topics[0]: keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")")
# If the event is declared as anonymous the topics[0] is not generated;
if not len(log_topics) or log_topics[0] not in self.event_data:
raise ValueError('Unknown log type')
event_id_ = log_topics[0]
event = self.event_data[event_id_]
# data: abi_serialise(EVENT_NON_INDEXED_ARGS)
# EVENT_NON_INDEXED_ARGS is the series of EVENT_ARGS that are not
# indexed, abi_serialise is the ABI serialisation function used for
# returning a series of typed values from a function.
unindexed_types = [
type_
for type_, indexed in zip(event['types'], event['indexed'])
if not indexed
]
unindexed_args = decode_abi(unindexed_types, log_data)
# topics[n]: EVENT_INDEXED_ARGS[n - 1]
# EVENT_INDEXED_ARGS is the series of EVENT_ARGS that are indexed
indexed_count = 1 # skip topics[0]
result = {}
for name, type_, indexed in zip(event['names'], event['types'], event['indexed']):
if indexed:
topic_bytes = utils.zpad(
utils.encode_int(log_topics[indexed_count]),
32,
)
indexed_count += 1
value = decode_single(process_type(type_), topic_bytes)
else:
value = unindexed_args.pop(0)
result[name] = value
result['_event_type'] = utils.to_string(event['name'])
return result
def listen(self, log, noprint=True):
"""
Return a dictionary representation of the Log instance.
Note:
This function won't work with anonymous events.
Args:
log (processblock.Log): The Log instance that needs to be parsed.
noprint (bool): Flag to turn off priting of the decoded log instance.
"""
try:
result = self.decode_event(log.topics, log.data)
except ValueError:
return # api compatibility
if not noprint:
print(result)
return result
def process_type(typ):
# Crazy reg expression to separate out base type component (eg. uint),
# size (eg. 256, 128x128, none), array component (eg. [], [45], none)
regexp = '([a-z]*)([0-9]*x?[0-9]*)((\[[0-9]*\])*)'
base, sub, arr, _ = re.match(regexp, utils.to_string_for_regexp(typ)).groups()
arrlist = re.findall('\[[0-9]*\]', arr)
assert len(''.join(arrlist)) == len(arr), \
"Unknown characters found in array declaration"
# Check validity of string type
if base == 'string' or base == 'bytes':
assert re.match('^[0-9]*$', sub), \
"String type must have no suffix or numerical suffix"
assert not sub or int(sub) <= 32, \
"Maximum 32 bytes for fixed-length str or bytes"
# Check validity of integer type
elif base == 'uint' or base == 'int':
assert re.match('^[0-9]+$', sub), \
"Integer type must have numerical suffix"
assert 8 <= int(sub) <= 256, \
"Integer size out of bounds"
assert int(sub) % 8 == 0, \
"Integer size must be multiple of 8"
# Check validity of fixed type
elif base == 'ufixed' or base == 'fixed':
assert re.match('^[0-9]+x[0-9]+$', sub), \
"Real type must have suffix of form <high>x<low>, eg. 128x128"
high, low = [int(x) for x in sub.split('x')]
assert 8 <= (high + low) <= 256, \
"Real size out of bounds (max 32 bytes)"
assert high % 8 == 0 and low % 8 == 0, \
"Real high/low sizes must be multiples of 8"
# Check validity of hash type
elif base == 'hash':
assert re.match('^[0-9]+$', sub), \
"Hash type must have numerical suffix"
# Check validity of address type
elif base == 'address':
assert sub == '', "Address cannot have suffix"
return base, sub, [ast.literal_eval(x) for x in arrlist]
# Returns the static size of a type, or None if dynamic
def get_size(typ):
base, sub, arrlist = typ
if not len(arrlist):
if base in ('string', 'bytes') and not sub:
return None
return 32
if arrlist[-1] == []:
return None
o = get_size((base, sub, arrlist[:-1]))
if o is None:
return None
return arrlist[-1][0] * o
# Encodes a single value (static or dynamic)
def enc(typ, arg):
base, sub, arrlist = typ
type_size = get_size(typ)
if base in ('string', 'bytes') and not sub:
return encode_single(typ, arg)
# Encode dynamic-sized lists via the head/tail mechanism described in
# https://github.com/ethereum/wiki/wiki/Proposal-for-new-ABI-value-encoding
if type_size is None:
assert isinstance(arg, list), \
"Expecting a list argument"
subtyp = base, sub, arrlist[:-1]
subsize = get_size(subtyp)
myhead, mytail = b'', b''
if arrlist[-1] == []:
myhead += enc(INT256, len(arg))
else:
assert len(arg) == arrlist[-1][0], \
"Wrong array size: found %d, expecting %d" % \
(len(arg), arrlist[-1][0])
for i in range(len(arg)):
if subsize is None:
myhead += enc(INT256, 32 * len(arg) + len(mytail))
mytail += enc(subtyp, arg[i])
else:
myhead += enc(subtyp, arg[i])
return myhead + mytail
# Encode static-sized lists via sequential packing
else:
if arrlist == []:
return utils.to_string(encode_single(typ, arg))
else:
subtyp = base, sub, arrlist[:-1]
o = b''
for x in arg:
o += enc(subtyp, x)
return o
# Encodes multiple arguments using the head/tail mechanism
def encode_abi(types, args):
headsize = 0
proctypes = [process_type(typ) for typ in types]
sizes = [get_size(typ) for typ in proctypes]
for i, arg in enumerate(args):
if sizes[i] is None:
headsize += 32
else:
headsize += sizes[i]
myhead, mytail = b'', b''
for i, arg in enumerate(args):
if sizes[i] is None:
myhead += enc(INT256, headsize + len(mytail))
mytail += enc(proctypes[i], args[i])
else:
myhead += enc(proctypes[i], args[i])
return myhead + mytail
# Decodes a single base datum
def decode_single(typ, data):
base, sub, _ = typ
if base == 'address':
return encode_hex(data[12:])
elif base == 'hash':
return data[32 - int(sub):]
elif base == 'string' or base == 'bytes':
if len(sub):
return data[:int(sub)]
else:
l = big_endian_to_int(data[0:32])
return data[32:][:l]
elif base == 'uint':
return big_endian_to_int(data)
elif base == 'int':
o = big_endian_to_int(data)
return (o - 2 ** int(sub)) if o >= 2 ** (int(sub) - 1) else o
elif base == 'ufixed':
high, low = [int(x) for x in sub.split('x')]
return big_endian_to_int(data) * 1.0 // 2 ** low
elif base == 'fixed':
high, low = [int(x) for x in sub.split('x')]
o = big_endian_to_int(data)
i = (o - 2 ** (high + low)) if o >= 2 ** (high + low - 1) else o
return (i * 1.0 // 2 ** low)
elif base == 'bool':
return bool(int(encode_hex(data), 16))
# Decodes multiple arguments using the head/tail mechanism
def decode_abi(types, data):
# Process types
proctypes = [process_type(typ) for typ in types]
# Get sizes of everything
sizes = [get_size(typ) for typ in proctypes]
# Initialize array of outputs
outs = [None] * len(types)
# Initialize array of start positions
start_positions = [None] * len(types) + [len(data)]
# If a type is static, grab the data directly, otherwise record
# its start position
pos = 0
for i, typ in enumerate(types):
if sizes[i] is None:
start_positions[i] = big_endian_to_int(data[pos:pos + 32])
j = i - 1
while j >= 0 and start_positions[j] is None:
start_positions[j] = start_positions[i]
j -= 1
pos += 32
else:
outs[i] = data[pos:pos + sizes[i]]
pos += sizes[i]
# We add a start position equal to the length of the entire data
# for convenience.
j = len(types) - 1
while j >= 0 and start_positions[j] is None:
start_positions[j] = start_positions[len(types)]
j -= 1
assert pos <= len(data), "Not enough data for head"
# Grab the data for tail arguments using the start positions
# calculated above
for i, typ in enumerate(types):
if sizes[i] is None:
offset = start_positions[i]
next_offset = start_positions[i + 1]
outs[i] = data[offset:next_offset]
# Recursively decode them all
return [dec(proctypes[i], outs[i]) for i in range(len(outs))]
# Decode a single value (static or dynamic)
def dec(typ, arg):
base, sub, arrlist = typ
sz = get_size(typ)
# Dynamic-sized strings are encoded as <len(str)> + <str>
if base in ('string', 'bytes') and not sub:
L = big_endian_to_int(arg[:32])
assert len(arg[32:]) == ceil32(L), "Wrong data size for string/bytes object"
return arg[32:][:L]
# Dynamic-sized arrays
elif sz is None:
L = big_endian_to_int(arg[:32])
subtyp = base, sub, arrlist[:-1]
subsize = get_size(subtyp)
# If children are dynamic, use the head/tail mechanism. Fortunately,
# here the code is simpler since we do not have to worry about
# mixed dynamic and static children, as we do in the top-level multi-arg
# case
if subsize is None:
assert len(arg) >= 32 + 32 * L, "Not enough data for head"
start_positions = [big_endian_to_int(arg[32 + 32 * i: 64 + 32 * i])
for i in range(L)] + [len(arg)]
outs = [arg[start_positions[i]: start_positions[i + 1]]
for i in range(L)]
return [dec(subtyp, out) for out in outs]
# If children are static, then grab the data slice for each one and
# sequentially decode them manually
else:
return [dec(subtyp, arg[32 + subsize * i: 32 + subsize * (i + 1)])
for i in range(L)]
# Static-sized arrays: decode piece-by-piece
elif len(arrlist):
L = arrlist[-1][0]
subtyp = base, sub, arrlist[:-1]
subsize = get_size(subtyp)
return [dec(subtyp, arg[subsize * i:subsize * (i + 1)])
for i in range(L)]
else:
return decode_single(typ, arg)
|
shahankhatch/pyethereum
|
ethereum/abi.py
|
Python
|
mit
| 27,824
|
#!/usr/bin/env python
import sys
warnings = list()
try:
from setuptools import setup, Extension
except ImportError:
warnings.append("warning: using disutils.core.setup, cannot use \"develop\" option")
from disutils.core import setup, Extension
try:
from distutils.command.build_py import build_py_2to3 as build_py
except ImportError:
from distutils.command.build_py import build_py
from distutils.command.build_ext import build_ext
from distutils.errors import CCompilerError, DistutilsExecError, \
DistutilsPlatformError
try:
from pypandoc import convert
except ImportError:
warnings.append("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
else:
read_md = lambda f: convert(f, 'rst')
import deap
if sys.platform == 'win32' and sys.version_info > (2, 6):
# 2.6's distutils.msvc9compiler can raise an IOError when failing to
# find the compiler
# It can also raise ValueError http://bugs.python.org/issue7511
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError,
IOError, ValueError)
else:
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
class BuildFailed(Exception):
pass
class ve_build_ext(build_ext):
# This class allows C extension building to fail.
def run(self):
try:
build_ext.run(self)
except DistutilsPlatformError:
raise BuildFailed()
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except ext_errors:
raise BuildFailed()
def run_setup(build_ext):
extra_modules = None
if build_ext:
extra_modules = list()
hv_module = Extension("deap.tools._hypervolume.hv", sources=["deap/tools/_hypervolume/_hv.c", "deap/tools/_hypervolume/hv.cpp"])
extra_modules.append(hv_module)
setup(name='deap',
version=deap.__revision__,
description='Distributed Evolutionary Algorithms in Python',
long_description=read_md('README.md'),
author='deap Development Team',
author_email='deap-users@googlegroups.com',
url='https://www.github.com/deap',
packages=['deap', 'deap.tools', 'deap.tools._hypervolume', 'deap.benchmarks', 'deap.tests'],
platforms=['any'],
keywords=['evolutionary algorithms','genetic algorithms','genetic programming','cma-es','ga','gp','es','pso'],
license='LGPL',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
],
ext_modules = extra_modules,
cmdclass = {'build_py': build_py, "build_ext" : ve_build_ext}
)
try:
run_setup(True)
except BuildFailed:
print("*" * 75)
print("WARNING: The C extensions could not be compiled, "
"speedups won't be available.")
print("Now building without C extensions.")
print("*" * 75)
run_setup(False)
print("*" * 75)
print("WARNING: The C extensions could not be compiled, "
"speedups won't be available.")
print("Plain-Python installation succeeded.")
print("*" * 75)
print("\n".join(warnings))
|
t2abdulg/deap
|
setup.py
|
Python
|
lgpl-3.0
| 3,638
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ast_edits which is used in tf upgraders.
All of the tests assume that we want to change from an API containing
import foo as f
def f(a, b, kw1, kw2): ...
def g(a, b, kw1, c, kw1_alias): ...
def g2(a, b, kw1, c, d, kw1_alias): ...
def h(a, kw1, kw2, kw1_alias, kw2_alias): ...
and the changes to the API consist of renaming, reordering, and/or removing
arguments. Thus, we want to be able to generate changes to produce each of the
following new APIs:
import bar as f
def f(a, b, kw1, kw3): ...
def f(a, b, kw2, kw1): ...
def f(a, b, kw3, kw1): ...
def g(a, b, kw1, c): ...
def g(a, b, c, kw1): ...
def g2(a, b, kw1, c, d): ...
def g2(a, b, c, d, kw1): ...
def h(a, kw1, kw2): ...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
import os
import six
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test as test_lib
from tensorflow.tools.compatibility import ast_edits
class ModuleDeprecationSpec(ast_edits.NoUpdateSpec):
"""A specification which deprecates 'a.b'."""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.module_deprecations.update({"a.b": (ast_edits.ERROR, "a.b is evil.")})
class RenameKeywordSpec(ast_edits.NoUpdateSpec):
"""A specification where kw2 gets renamed to kw3.
The new API is
def f(a, b, kw1, kw3): ...
"""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.update_renames()
def update_renames(self):
self.function_keyword_renames["f"] = {"kw2": "kw3"}
class ReorderKeywordSpec(ast_edits.NoUpdateSpec):
"""A specification where kw2 gets moved in front of kw1.
The new API is
def f(a, b, kw2, kw1): ...
"""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.update_reorders()
def update_reorders(self):
# Note that these should be in the old order.
self.function_reorders["f"] = ["a", "b", "kw1", "kw2"]
class ReorderAndRenameKeywordSpec(ReorderKeywordSpec, RenameKeywordSpec):
"""A specification where kw2 gets moved in front of kw1 and is changed to kw3.
The new API is
def f(a, b, kw3, kw1): ...
"""
def __init__(self):
ReorderKeywordSpec.__init__(self)
RenameKeywordSpec.__init__(self)
self.update_renames()
self.update_reorders()
class RemoveDeprecatedAliasKeyword(ast_edits.NoUpdateSpec):
"""A specification where kw1_alias is removed in g.
The new API is
def g(a, b, kw1, c): ...
def g2(a, b, kw1, c, d): ...
"""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.function_keyword_renames["g"] = {"kw1_alias": "kw1"}
self.function_keyword_renames["g2"] = {"kw1_alias": "kw1"}
class RemoveDeprecatedAliasAndReorderRest(RemoveDeprecatedAliasKeyword):
"""A specification where kw1_alias is removed in g.
The new API is
def g(a, b, c, kw1): ...
def g2(a, b, c, d, kw1): ...
"""
def __init__(self):
RemoveDeprecatedAliasKeyword.__init__(self)
# Note that these should be in the old order.
self.function_reorders["g"] = ["a", "b", "kw1", "c"]
self.function_reorders["g2"] = ["a", "b", "kw1", "c", "d"]
class RemoveMultipleKeywordArguments(ast_edits.NoUpdateSpec):
"""A specification where both keyword aliases are removed from h.
The new API is
def h(a, kw1, kw2): ...
"""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.function_keyword_renames["h"] = {
"kw1_alias": "kw1",
"kw2_alias": "kw2",
}
class RenameImports(ast_edits.NoUpdateSpec):
"""Specification for renaming imports."""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.import_renames = {
"foo": ast_edits.ImportRename(
"bar",
excluded_prefixes=["foo.baz"])
}
class TestAstEdits(test_util.TensorFlowTestCase):
def _upgrade(self, spec, old_file_text):
in_file = six.StringIO(old_file_text)
out_file = six.StringIO()
upgrader = ast_edits.ASTCodeUpgrader(spec)
count, report, errors = (
upgrader.process_opened_file("test.py", in_file,
"test_out.py", out_file))
return (count, report, errors), out_file.getvalue()
def testModuleDeprecation(self):
text = "a.b.c(a.b.x)"
(_, _, errors), new_text = self._upgrade(ModuleDeprecationSpec(), text)
self.assertEqual(text, new_text)
self.assertIn("Using member a.b.c", errors[0])
self.assertIn("1:0", errors[0])
self.assertIn("Using member a.b.c", errors[0])
self.assertIn("1:6", errors[1])
def testNoTransformIfNothingIsSupplied(self):
text = "f(a, b, kw1=c, kw2=d)\n"
_, new_text = self._upgrade(ast_edits.NoUpdateSpec(), text)
self.assertEqual(new_text, text)
text = "f(a, b, c, d)\n"
_, new_text = self._upgrade(ast_edits.NoUpdateSpec(), text)
self.assertEqual(new_text, text)
def testKeywordRename(self):
"""Test that we get the expected result if renaming kw2 to kw3."""
text = "f(a, b, kw1=c, kw2=d)\n"
expected = "f(a, b, kw1=c, kw3=d)\n"
(_, report, _), new_text = self._upgrade(RenameKeywordSpec(), text)
self.assertEqual(new_text, expected)
self.assertNotIn("Manual check required", report)
# No keywords specified, no reordering, so we should get input as output
text = "f(a, b, c, d)\n"
(_, report, _), new_text = self._upgrade(RenameKeywordSpec(), text)
self.assertEqual(new_text, text)
self.assertNotIn("Manual check required", report)
# Positional *args passed in that we cannot inspect, should warn
text = "f(a, *args)\n"
(_, report, _), _ = self._upgrade(RenameKeywordSpec(), text)
self.assertNotIn("Manual check required", report)
# **kwargs passed in that we cannot inspect, should warn
text = "f(a, b, kw1=c, **kwargs)\n"
(_, report, _), _ = self._upgrade(RenameKeywordSpec(), text)
self.assertIn("Manual check required", report)
def testKeywordReorderWithParens(self):
"""Test that we get the expected result if there are parens around args."""
text = "f((a), ( ( b ) ))\n"
acceptable_outputs = [
# No change is a valid output
text,
# Also cases where all arguments are fully specified are allowed
"f(a=(a), b=( ( b ) ))\n",
# Making the parens canonical is ok
"f(a=(a), b=((b)))\n",
]
_, new_text = self._upgrade(ReorderKeywordSpec(), text)
self.assertIn(new_text, acceptable_outputs)
def testKeywordReorder(self):
"""Test that we get the expected result if kw2 is now before kw1."""
text = "f(a, b, kw1=c, kw2=d)\n"
acceptable_outputs = [
# No change is a valid output
text,
# Just reordering the kw.. args is also ok
"f(a, b, kw2=d, kw1=c)\n",
# Also cases where all arguments are fully specified are allowed
"f(a=a, b=b, kw1=c, kw2=d)\n",
"f(a=a, b=b, kw2=d, kw1=c)\n",
]
(_, report, _), new_text = self._upgrade(ReorderKeywordSpec(), text)
self.assertIn(new_text, acceptable_outputs)
self.assertNotIn("Manual check required", report)
# Keywords are reordered, so we should reorder arguments too
text = "f(a, b, c, d)\n"
acceptable_outputs = [
"f(a, b, d, c)\n",
"f(a=a, b=b, kw1=c, kw2=d)\n",
"f(a=a, b=b, kw2=d, kw1=c)\n",
]
(_, report, _), new_text = self._upgrade(ReorderKeywordSpec(), text)
self.assertIn(new_text, acceptable_outputs)
self.assertNotIn("Manual check required", report)
# Positional *args passed in that we cannot inspect, should warn
text = "f(a, b, *args)\n"
(_, report, _), _ = self._upgrade(ReorderKeywordSpec(), text)
self.assertIn("Manual check required", report)
# **kwargs passed in that we cannot inspect, should warn
text = "f(a, b, kw1=c, **kwargs)\n"
(_, report, _), _ = self._upgrade(ReorderKeywordSpec(), text)
self.assertNotIn("Manual check required", report)
def testKeywordReorderAndRename(self):
"""Test that we get the expected result if kw2 is renamed and moved."""
text = "f(a, b, kw1=c, kw2=d)\n"
acceptable_outputs = [
"f(a, b, kw3=d, kw1=c)\n",
"f(a=a, b=b, kw1=c, kw3=d)\n",
"f(a=a, b=b, kw3=d, kw1=c)\n",
]
(_, report, _), new_text = self._upgrade(
ReorderAndRenameKeywordSpec(), text)
self.assertIn(new_text, acceptable_outputs)
self.assertNotIn("Manual check required", report)
# Keywords are reordered, so we should reorder arguments too
text = "f(a, b, c, d)\n"
acceptable_outputs = [
"f(a, b, d, c)\n",
"f(a=a, b=b, kw1=c, kw3=d)\n",
"f(a=a, b=b, kw3=d, kw1=c)\n",
]
(_, report, _), new_text = self._upgrade(
ReorderAndRenameKeywordSpec(), text)
self.assertIn(new_text, acceptable_outputs)
self.assertNotIn("Manual check required", report)
# Positional *args passed in that we cannot inspect, should warn
text = "f(a, *args, kw1=c)\n"
(_, report, _), _ = self._upgrade(ReorderAndRenameKeywordSpec(), text)
self.assertIn("Manual check required", report)
# **kwargs passed in that we cannot inspect, should warn
text = "f(a, b, kw1=c, **kwargs)\n"
(_, report, _), _ = self._upgrade(ReorderAndRenameKeywordSpec(), text)
self.assertIn("Manual check required", report)
def testRemoveDeprecatedKeywordAlias(self):
"""Test that we get the expected result if a keyword alias is removed."""
text = "g(a, b, kw1=x, c=c)\n"
acceptable_outputs = [
# Not using deprecated alias, so original is ok
text,
"g(a=a, b=b, kw1=x, c=c)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
# No keyword used, should be no change
text = "g(a, b, x, c)\n"
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertEqual(new_text, text)
# If we used the alias, it should get renamed
text = "g(a, b, kw1_alias=x, c=c)\n"
acceptable_outputs = [
"g(a, b, kw1=x, c=c)\n",
"g(a, b, c=c, kw1=x)\n",
"g(a=a, b=b, kw1=x, c=c)\n",
"g(a=a, b=b, c=c, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
# It should get renamed even if it's last
text = "g(a, b, c=c, kw1_alias=x)\n"
acceptable_outputs = [
"g(a, b, kw1=x, c=c)\n",
"g(a, b, c=c, kw1=x)\n",
"g(a=a, b=b, kw1=x, c=c)\n",
"g(a=a, b=b, c=c, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
def testRemoveDeprecatedKeywordAndReorder(self):
"""Test for when a keyword alias is removed and args are reordered."""
text = "g(a, b, kw1=x, c=c)\n"
acceptable_outputs = [
"g(a, b, c=c, kw1=x)\n",
"g(a=a, b=b, kw1=x, c=c)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasAndReorderRest(), text)
self.assertIn(new_text, acceptable_outputs)
# Keywords are reordered, so we should reorder arguments too
text = "g(a, b, x, c)\n"
# Don't accept an output which doesn't reorder c and d
acceptable_outputs = [
"g(a, b, c, x)\n",
"g(a=a, b=b, kw1=x, c=c)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasAndReorderRest(), text)
self.assertIn(new_text, acceptable_outputs)
# If we used the alias, it should get renamed
text = "g(a, b, kw1_alias=x, c=c)\n"
acceptable_outputs = [
"g(a, b, kw1=x, c=c)\n",
"g(a, b, c=c, kw1=x)\n",
"g(a=a, b=b, kw1=x, c=c)\n",
"g(a=a, b=b, c=c, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
# It should get renamed and reordered even if it's last
text = "g(a, b, c=c, kw1_alias=x)\n"
acceptable_outputs = [
"g(a, b, kw1=x, c=c)\n",
"g(a, b, c=c, kw1=x)\n",
"g(a=a, b=b, kw1=x, c=c)\n",
"g(a=a, b=b, c=c, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
def testRemoveDeprecatedKeywordAndReorder2(self):
"""Same as testRemoveDeprecatedKeywordAndReorder but on g2 (more args)."""
text = "g2(a, b, kw1=x, c=c, d=d)\n"
acceptable_outputs = [
"g2(a, b, c=c, d=d, kw1=x)\n",
"g2(a=a, b=b, kw1=x, c=c, d=d)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasAndReorderRest(), text)
self.assertIn(new_text, acceptable_outputs)
# Keywords are reordered, so we should reorder arguments too
text = "g2(a, b, x, c, d)\n"
# Don't accept an output which doesn't reorder c and d
acceptable_outputs = [
"g2(a, b, c, d, x)\n",
"g2(a=a, b=b, kw1=x, c=c, d=d)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasAndReorderRest(), text)
self.assertIn(new_text, acceptable_outputs)
# If we used the alias, it should get renamed
text = "g2(a, b, kw1_alias=x, c=c, d=d)\n"
acceptable_outputs = [
"g2(a, b, kw1=x, c=c, d=d)\n",
"g2(a, b, c=c, d=d, kw1=x)\n",
"g2(a=a, b=b, kw1=x, c=c, d=d)\n",
"g2(a=a, b=b, c=c, d=d, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
# It should get renamed and reordered even if it's not in order
text = "g2(a, b, d=d, c=c, kw1_alias=x)\n"
acceptable_outputs = [
"g2(a, b, kw1=x, c=c, d=d)\n",
"g2(a, b, c=c, d=d, kw1=x)\n",
"g2(a, b, d=d, c=c, kw1=x)\n",
"g2(a=a, b=b, kw1=x, c=c, d=d)\n",
"g2(a=a, b=b, c=c, d=d, kw1=x)\n",
"g2(a=a, b=b, d=d, c=c, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveDeprecatedAliasKeyword(), text)
self.assertIn(new_text, acceptable_outputs)
def testRemoveMultipleKeywords(self):
"""Remove multiple keywords at once."""
# Not using deprecated keywords -> no rename
text = "h(a, kw1=x, kw2=y)\n"
_, new_text = self._upgrade(RemoveMultipleKeywordArguments(), text)
self.assertEqual(new_text, text)
# Using positional arguments (in proper order) -> no change
text = "h(a, x, y)\n"
_, new_text = self._upgrade(RemoveMultipleKeywordArguments(), text)
self.assertEqual(new_text, text)
# Use only the old names, in order
text = "h(a, kw1_alias=x, kw2_alias=y)\n"
acceptable_outputs = [
"h(a, x, y)\n",
"h(a, kw1=x, kw2=y)\n",
"h(a=a, kw1=x, kw2=y)\n",
"h(a, kw2=y, kw1=x)\n",
"h(a=a, kw2=y, kw1=x)\n",
]
_, new_text = self._upgrade(RemoveMultipleKeywordArguments(), text)
self.assertIn(new_text, acceptable_outputs)
# Use only the old names, in reverse order, should give one of same outputs
text = "h(a, kw2_alias=y, kw1_alias=x)\n"
_, new_text = self._upgrade(RemoveMultipleKeywordArguments(), text)
self.assertIn(new_text, acceptable_outputs)
# Mix old and new names
text = "h(a, kw1=x, kw2_alias=y)\n"
_, new_text = self._upgrade(RemoveMultipleKeywordArguments(), text)
self.assertIn(new_text, acceptable_outputs)
def testUnrestrictedFunctionWarnings(self):
class FooWarningSpec(ast_edits.NoUpdateSpec):
"""Usages of function attribute foo() prints out a warning."""
def __init__(self):
ast_edits.NoUpdateSpec.__init__(self)
self.function_warnings = {"*.foo": (ast_edits.WARNING, "not good")}
texts = ["object.foo()", "get_object().foo()",
"get_object().foo()", "object.foo().bar()"]
for text in texts:
(_, report, _), _ = self._upgrade(FooWarningSpec(), text)
self.assertIn("not good", report)
# Note that foo() won't result in a warning, because in this case foo is
# not an attribute, but a name.
false_alarms = ["foo", "foo()", "foo.bar()", "obj.run_foo()", "obj.foo"]
for text in false_alarms:
(_, report, _), _ = self._upgrade(FooWarningSpec(), text)
self.assertNotIn("not good", report)
def testFullNameNode(self):
t = ast_edits.full_name_node("a.b.c")
self.assertEquals(
ast.dump(t),
"Attribute(value=Attribute(value=Name(id='a', ctx=Load()), attr='b', "
"ctx=Load()), attr='c', ctx=Load())"
)
def testImport(self):
# foo should be renamed to bar.
text = "import foo as f"
expected_text = "import bar as f"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "import foo"
expected_text = "import bar as foo"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "import foo.test"
expected_text = "import bar.test"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "import foo.test as t"
expected_text = "import bar.test as t"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "import foo as f, a as b"
expected_text = "import bar as f, a as b"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
def testFromImport(self):
# foo should be renamed to bar.
text = "from foo import a"
expected_text = "from bar import a"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "from foo.a import b"
expected_text = "from bar.a import b"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "from foo import *"
expected_text = "from bar import *"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "from foo import a, b"
expected_text = "from bar import a, b"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
def testImport_NoChangeNeeded(self):
text = "import bar as b"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(text, new_text)
def testFromImport_NoChangeNeeded(self):
text = "from bar import a as b"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(text, new_text)
def testExcludedImport(self):
# foo.baz module is excluded from changes.
text = "import foo.baz"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(text, new_text)
text = "import foo.baz as a"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(text, new_text)
text = "from foo import baz as a"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(text, new_text)
text = "from foo.baz import a"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(text, new_text)
def testMultipleImports(self):
text = "import foo.bar as a, foo.baz as b, foo.baz.c, foo.d"
expected_text = "import bar.bar as a, foo.baz as b, foo.baz.c, bar.d"
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
text = "from foo import baz, a, c"
expected_text = """from foo import baz
from bar import a, c"""
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
def testImportInsideFunction(self):
text = """
def t():
from c import d
from foo import baz, a
from e import y
"""
expected_text = """
def t():
from c import d
from foo import baz
from bar import a
from e import y
"""
_, new_text = self._upgrade(RenameImports(), text)
self.assertEqual(expected_text, new_text)
def testUpgradeInplaceWithSymlink(self):
upgrade_dir = os.path.join(self.get_temp_dir(), "foo")
os.mkdir(upgrade_dir)
file_a = os.path.join(upgrade_dir, "a.py")
file_b = os.path.join(upgrade_dir, "b.py")
with open(file_a, "a") as f:
f.write("import foo as f")
os.symlink(file_a, file_b)
upgrader = ast_edits.ASTCodeUpgrader(RenameImports())
upgrader.process_tree_inplace(upgrade_dir)
self.assertTrue(os.path.islink(file_b))
self.assertEqual(file_a, os.readlink(file_b))
with open(file_a, "r") as f:
self.assertEqual("import bar as f", f.read())
def testUpgradeInPlaceWithSymlinkInDifferentDir(self):
upgrade_dir = os.path.join(self.get_temp_dir(), "foo")
other_dir = os.path.join(self.get_temp_dir(), "bar")
os.mkdir(upgrade_dir)
os.mkdir(other_dir)
file_c = os.path.join(other_dir, "c.py")
file_d = os.path.join(upgrade_dir, "d.py")
with open(file_c, "a") as f:
f.write("import foo as f")
os.symlink(file_c, file_d)
upgrader = ast_edits.ASTCodeUpgrader(RenameImports())
upgrader.process_tree_inplace(upgrade_dir)
self.assertTrue(os.path.islink(file_d))
self.assertEqual(file_c, os.readlink(file_d))
# File pointed to by symlink is in a different directory.
# Therefore, it should not be upgraded.
with open(file_c, "r") as f:
self.assertEqual("import foo as f", f.read())
def testUpgradeCopyWithSymlink(self):
upgrade_dir = os.path.join(self.get_temp_dir(), "foo")
output_dir = os.path.join(self.get_temp_dir(), "bar")
os.mkdir(upgrade_dir)
file_a = os.path.join(upgrade_dir, "a.py")
file_b = os.path.join(upgrade_dir, "b.py")
with open(file_a, "a") as f:
f.write("import foo as f")
os.symlink(file_a, file_b)
upgrader = ast_edits.ASTCodeUpgrader(RenameImports())
upgrader.process_tree(upgrade_dir, output_dir, copy_other_files=True)
new_file_a = os.path.join(output_dir, "a.py")
new_file_b = os.path.join(output_dir, "b.py")
self.assertTrue(os.path.islink(new_file_b))
self.assertEqual(new_file_a, os.readlink(new_file_b))
with open(new_file_a, "r") as f:
self.assertEqual("import bar as f", f.read())
def testUpgradeCopyWithSymlinkInDifferentDir(self):
upgrade_dir = os.path.join(self.get_temp_dir(), "foo")
other_dir = os.path.join(self.get_temp_dir(), "bar")
output_dir = os.path.join(self.get_temp_dir(), "baz")
os.mkdir(upgrade_dir)
os.mkdir(other_dir)
file_a = os.path.join(other_dir, "a.py")
file_b = os.path.join(upgrade_dir, "b.py")
with open(file_a, "a") as f:
f.write("import foo as f")
os.symlink(file_a, file_b)
upgrader = ast_edits.ASTCodeUpgrader(RenameImports())
upgrader.process_tree(upgrade_dir, output_dir, copy_other_files=True)
new_file_b = os.path.join(output_dir, "b.py")
self.assertTrue(os.path.islink(new_file_b))
self.assertEqual(file_a, os.readlink(new_file_b))
with open(file_a, "r") as f:
self.assertEqual("import foo as f", f.read())
if __name__ == "__main__":
test_lib.main()
|
xzturn/tensorflow
|
tensorflow/tools/compatibility/ast_edits_test.py
|
Python
|
apache-2.0
| 23,745
|
# Copyright 2018-2021 Peppy Player peppy.player@gmail.com
#
# This file is part of Peppy Player.
#
# Peppy Player is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Peppy Player is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Peppy Player. If not, see <http://www.gnu.org/licenses/>.
from ui.navigator.navigator import Navigator
from util.keys import KEY_HOME, KEY_PLAYER
from util.config import SLEEP_NOW
class TimerNavigator(Navigator):
""" Timer navigator """
def __init__(self, util, bounding_box, listeners):
""" Initializer
:param util: utility object
:param bounding_box: bounding box
:param listeners: buttons listeners
"""
items = []
self.add_button(items, KEY_HOME, None, [listeners[KEY_HOME]])
self.add_button(items, SLEEP_NOW, None, [listeners[SLEEP_NOW]])
self.add_button(items, KEY_PLAYER, None, [listeners[KEY_PLAYER]])
Navigator.__init__(self, util, bounding_box, "timer.navigator", items, None)
|
project-owner/Peppy
|
ui/navigator/timer.py
|
Python
|
gpl-3.0
| 1,465
|
"""
flp-benders.py: model for solving the capacitated facility location problem using Benders' decomposition
minimize the total (weighted) travel cost from n customers
to some facilities with fixed costs and capacities.
Copyright (c) by Joao Pedro PEDROSO and Mikio KUBO, 2012
"""
from pyscipopt import Model, quicksum, multidict, SCIP_PARAMSETTING, Benders,\
Benderscut, SCIP_RESULT, SCIP_LPSOLSTAT
class testBenders(Benders):
def __init__(self, masterVarDict, I, J, M, c, d, name):
super(testBenders, self).__init__()
self.mpVardict = masterVarDict
self.I, self.J, self.M, self.c, self.d = I, J, M, c, d
self.demand = {}
self.capacity = {}
self.name = name # benders name
def benderscreatesub(self, probnumber):
subprob = Model("flp-subprob")
x, y = {}, {}
for j in self.J:
y[j] = subprob.addVar(vtype="B", name="y(%s)" % j)
for i in self.I:
x[i, j] = subprob.addVar(vtype="C", name="x(%s,%s)" % (i, j))
for i in self.I:
self.demand[i] = subprob.addCons(quicksum(x[i, j] for j in self.J) >= self.d[i], "Demand(%s)" % i)
for j in self.M:
self.capacity[j] = subprob.addCons(quicksum(x[i, j] for i in self.I) <= self.M[j] * y[j], "Capacity(%s)" % j)
subprob.setObjective(
quicksum(self.c[i, j] * x[i, j] for i in self.I for j in self.J),
"minimize")
subprob.data = x, y
#self.model.addBendersSubproblem(self.name, subprob)
self.model.addBendersSubproblem(self, subprob)
self.subprob = subprob
def bendersgetvar(self, variable, probnumber):
try:
if probnumber == -1: # convert to master variable
mapvar = self.mpVardict[variable.name]
else:
mapvar = self.subprob.data[1][variable.name]
except KeyError:
mapvar = None
return {"mappedvar": mapvar}
def benderssolvesubconvex(self, solution, probnumber, onlyconvex):
self.model.setupBendersSubproblem(probnumber, self, solution)
self.subprob.solveProbingLP()
subprob = self.model.getBendersSubproblem(probnumber, self)
assert self.subprob.getObjVal() == subprob.getObjVal()
result_dict = {}
objective = subprob.infinity()
result = SCIP_RESULT.DIDNOTRUN
lpsolstat = self.subprob.getLPSolstat()
if lpsolstat == SCIP_LPSOLSTAT.OPTIMAL:
objective = self.subprob.getObjVal()
result = SCIP_RESULT.FEASIBLE
elif lpsolstat == SCIP_LPSOLSTAT.INFEASIBLE:
objective = self.subprob.infinity()
result = SCIP_RESULT.INFEASIBLE
elif lpsolstat == SCIP_LPSOLSTAT.UNBOUNDEDRAY:
objective = self.subprob.infinity()
result = SCIP_RESULT.UNBOUNDED
result_dict["objective"] = objective
result_dict["result"] = result
return result_dict
def bendersfreesub(self, probnumber):
if self.subprob.inProbing():
self.subprob.endProbing()
class testBenderscut(Benderscut):
def __init__(self, I, J, M, d):
self.I, self.J, self.M, self.d = I, J, M, d
def benderscutexec(self, solution, probnumber, enfotype):
subprob = self.model.getBendersSubproblem(probnumber, benders=self.benders)
membersubprob = self.benders.subprob
# checking whether the subproblem is already optimal, i.e. whether a cut
# needs to be generated
if self.model.checkBendersSubproblemOptimality(solution, probnumber,
benders=self.benders):
return {"result" : SCIP_RESULT.FEASIBLE}
# testing whether the dual multipliers can be found for the retrieved
# subproblem model. If the constraints don't exist, then the subproblem
# model is not correct.
# Also checking whether the dual multiplier is the same between the
# member subproblem and the retrieved subproblem`
lhs = 0
for i in self.I:
subprobcons = self.benders.demand[i]
try:
dualmult = subprob.getDualsolLinear(subprobcons)
lhs += dualmult*self.d[i]
except:
print("Subproblem constraint <%d> does not exist in the "\
"subproblem."%subprobcons.name)
assert False
memberdualmult = membersubprob.getDualsolLinear(subprobcons)
if dualmult != memberdualmult:
print("The dual multipliers between the two subproblems are not "\
"the same.")
assert False
coeffs = [subprob.getDualsolLinear(self.benders.capacity[j])*\
self.M[j] for j in self.J]
self.model.addCons(self.model.getBendersAuxiliaryVar(probnumber,
self.benders) -
quicksum(self.model.getBendersVar(self.benders.subprob.data[1][j],
self.benders)*coeffs[j] for j in self.J) >= lhs)
return {"result" : SCIP_RESULT.CONSADDED}
def flp(I, J, M, d,f, c=None, monolithic=False):
"""flp -- model for the capacitated facility location problem
Parameters:
- I: set of customers
- J: set of facilities
- d[i]: demand for customer i
- M[j]: capacity of facility j
- f[j]: fixed cost for using a facility in point j
- c[i,j]: unit cost of servicing demand point i from facility j
Returns a model, ready to be solved.
"""
master = Model("flp-master")
# creating the problem
y = {}
for j in J:
y["y(%d)"%j] = master.addVar(vtype="B", name="y(%s)"%j)
if monolithic:
x = {}
demand = {}
capacity = {}
for j in J:
for i in I:
x[i, j] = master.addVar(vtype="C", name="x(%s,%s)" % (i, j))
for i in I:
demand[i] = master.addCons(quicksum(x[i, j] for j in J) >= d[i], "Demand(%s)" % i)
for j in J:
print(j, M[j])
capacity[j] = master.addCons(quicksum(x[i, j] for i in I) <= M[j] * y["y(%d)"%j], "Capacity(%s)" % j)
master.addCons(quicksum(y["y(%d)"%j]*M[j] for j in J)
- quicksum(d[i] for i in I) >= 0)
master.setObjective(
quicksum(f[j]*y["y(%d)"%j] for j in J) + (0 if not monolithic else
quicksum(c[i, j] * x[i, j] for i in I for j in J)),
"minimize")
master.data = y
return master
def make_data():
I,d = multidict({0:80, 1:270, 2:250, 3:160, 4:180}) # demand
J,M,f = multidict({0:[500,1000], 1:[500,1000], 2:[500,1000]}) # capacity, fixed costs
c = {(0,0):4, (0,1):6, (0,2):9, # transportation costs
(1,0):5, (1,1):4, (1,2):7,
(2,0):6, (2,1):3, (2,2):4,
(3,0):8, (3,1):5, (3,2):3,
(4,0):10, (4,1):8, (4,2):4,
}
return I,J,d,M,f,c
def test_flpbenders_defcuts():
'''
test the Benders' decomposition plugins with the facility location problem.
'''
I,J,d,M,f,c = make_data()
master = flp(I, J, M, d, f)
# initializing the default Benders' decomposition with the subproblem
master.setPresolve(SCIP_PARAMSETTING.OFF)
master.setBoolParam("misc/allowstrongdualreds", False)
master.setBoolParam("misc/allowweakdualreds", False)
master.setBoolParam("benders/copybenders", False)
bendersName = "testBenders"
testbd = testBenders(master.data, I, J, M, c, d, bendersName)
master.includeBenders(testbd, bendersName, "benders plugin")
master.includeBendersDefaultCuts(testbd)
master.activateBenders(testbd, 1)
master.setBoolParam("constraints/benders/active", True)
master.setBoolParam("constraints/benderslp/active", True)
master.setBoolParam("benders/testBenders/updateauxvarbound", False)
# optimizing the problem using Benders' decomposition
master.optimize()
# since custom solving functions are defined, we need to manually solve the
# Benders' decomposition subproblems to get the best solution
master.setupBendersSubproblem(0, testbd, master.getBestSol())
testbd.subprob.solveProbingLP()
EPS = 1.e-6
y = master.data
facilities = [j for j in y if master.getVal(y[j]) > EPS]
x, suby = testbd.subprob.data
edges = [(i, j) for (i, j) in x if testbd.subprob.getVal(x[i,j]) > EPS]
print("Optimal value:", master.getObjVal())
print("Facilities at nodes:", facilities)
print("Edges:", edges)
master.printStatistics()
# since the subproblems were setup and then solved, we need to free the
# subproblems. This must happen after the solution is extracted, otherwise
# the solution will be lost
master.freeBendersSubproblems()
return master.getObjVal()
def test_flpbenders_customcuts():
'''
test the Benders' decomposition plugins with the facility location problem.
'''
I,J,d,M,f,c = make_data()
master = flp(I, J, M, d, f)
# initializing the default Benders' decomposition with the subproblem
master.setPresolve(SCIP_PARAMSETTING.OFF)
master.setBoolParam("misc/allowstrongdualreds", False)
master.setBoolParam("misc/allowweakdualreds", False)
master.setBoolParam("benders/copybenders", False)
bendersName = "testBenders"
benderscutName = "testBenderscut"
testbd = testBenders(master.data, I, J, M, c, d, bendersName)
testbdc = testBenderscut(I, J, M, d)
master.includeBenders(testbd, bendersName, "benders plugin")
master.includeBenderscut(testbd, testbdc, benderscutName,
"benderscut plugin", priority=1000000)
master.activateBenders(testbd, 1)
master.setBoolParam("constraints/benders/active", True)
master.setBoolParam("constraints/benderslp/active", True)
master.setBoolParam("benders/testBenders/updateauxvarbound", False)
# optimizing the problem using Benders' decomposition
master.optimize()
# since custom solving functions are defined, we need to manually solve the
# Benders' decomposition subproblems to get the best solution
master.setupBendersSubproblem(0, testbd, master.getBestSol())
testbd.subprob.solveProbingLP()
EPS = 1.e-6
y = master.data
facilities = [j for j in y if master.getVal(y[j]) > EPS]
x, suby = testbd.subprob.data
edges = [(i, j) for (i, j) in x if testbd.subprob.getVal(x[i,j]) > EPS]
print("Optimal value:", master.getObjVal())
print("Facilities at nodes:", facilities)
print("Edges:", edges)
master.printStatistics()
# since the subproblems were setup and then solved, we need to free the
# subproblems. This must happen after the solution is extracted, otherwise
# the solution will be lost
master.freeBendersSubproblems()
return master.getObjVal()
def test_flp():
'''
test the Benders' decomposition plugins with the facility location problem.
'''
I,J,d,M,f,c = make_data()
master = flp(I, J, M, d, f, c=c, monolithic=True)
# initializing the default Benders' decomposition with the subproblem
master.setPresolve(SCIP_PARAMSETTING.OFF)
# optimizing the monolithic problem
master.optimize()
EPS = 1.e-6
y = master.data
facilities = [j for j in y if master.getVal(y[j]) > EPS]
print("Optimal value:", master.getObjVal())
print("Facilities at nodes:", facilities)
master.printBestSol()
master.printStatistics()
return master.getObjVal()
if __name__ == "__main__":
defcutsobj = test_flpbenders_defcuts()
customcutsobj = test_flpbenders_customcuts()
monolithicobj = test_flp()
assert defcutsobj == customcutsobj
assert defcutsobj == monolithicobj
|
SCIP-Interfaces/PySCIPOpt
|
tests/test_customizedbenders.py
|
Python
|
mit
| 11,595
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-25 01:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('email', models.EmailField(max_length=254, unique=True)),
('username', models.CharField(max_length=40, unique=True)),
('first_name', models.CharField(blank=True, max_length=40)),
('last_name', models.CharField(blank=True, max_length=40)),
('tag_line', models.CharField(blank=True, max_length=140)),
('is_admin', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'abstract': False,
},
),
]
|
maxwallasaurus/arboretum
|
authentication/migrations 08-40-23-458/0001_initial 08-45-26-265.py
|
Python
|
mit
| 1,345
|
"""
#;+
#; NAME:
#; files
#; Version 1.0
#;
#; PURPOSE:
#; Module for file utilities
#; 10-Dec-2014 by JXP
#;-
#;------------------------------------------------------------------------------
"""
from __future__ import print_function, absolute_import, division, unicode_literals
import numpy as np
import os
from astropy import units as u
from xastropy.xutils import xdebug as xdb
#
def ensure_dir(fil): # Stolen from the Web
''' Make sure a directory exists
-- includes quantities
'''
#
d = os.path.dirname(fil)
if not os.path.exists(d):
os.mkdir(d)
|
profxj/xastropy
|
xastropy/xutils/files.py
|
Python
|
bsd-3-clause
| 597
|
from tools.load import LoadMatrix
from numpy import where
lm=LoadMatrix()
traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
parameter_list=[[traindat,testdat, 1.5, 1.0],[traindat,testdat, 1.0, 1.5]]
def kernel_wavelet_modular (fm_train_real=traindat,fm_test_real=testdat, dilation=1.5, translation=1.0):
from shogun.Features import RealFeatures
from shogun.Kernel import WaveletKernel
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=WaveletKernel(feats_train, feats_train, 10, dilation, translation)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('Wavelet')
kernel_wavelet_modular(*parameter_list[0])
|
ratschlab/ASP
|
examples/undocumented/python_modular/kernel_wavelet_modular.py
|
Python
|
gpl-2.0
| 840
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.