code
stringlengths 1
199k
|
|---|
import os
import sys
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import pfp
import pfp.fields
from pfp.fields import PYVAL,PYSTR
import pfp.interp
import pfp.utils
import utils
class TestTypeCreation(unittest.TestCase, utils.UtilsMixin):
def setUp(self):
pfp.fields.NumberBase.endian = pfp.fields.LITTLE_ENDIAN
def tearDown(self):
pass
def test_atomic(self):
dom = self._test_parse_build(
"",
"""
typedef unsigned int BLAH;
"""
)
res = dom.BLAH()
self.assertTrue(isinstance(res, pfp.fields.UInt))
self.assertEqual(res, 0)
def test_struct(self):
dom = self._test_parse_build(
"",
"""
LittleEndian();
typedef struct {
char a;
char b;
uint c;
} TEST_STRUCT;
"""
)
res = dom.TEST_STRUCT()
self.assertTrue(isinstance(res, pfp.fields.Struct))
self.assertEqual(res.a, 0)
self.assertEqual(res.b, 0)
self.assertEqual(res.c, 0)
res.a = 0x30
res.b = 0x40
res.c = 0x1000
self.assertEqual(res.a, 0x30)
self.assertEqual(res.b, 0x40)
self.assertEqual(res.c, 0x1000)
output = res._pfp__build()
self.assertEqual(output, pfp.utils.binary("\x30\x40\x00\x10\x00\x00"))
if __name__ == "__main__":
unittest.main()
|
from django import forms
from django_countries.tests import models
class PersonForm(forms.ModelForm):
class Meta:
model = models.Person
fields = ['country', 'favourite_country']
class AllowNullForm(forms.ModelForm):
class Meta:
model = models.AllowNull
fields = ['country']
|
from okcupyd import magicnumbers
def test_yield_exponents_of_two():
assert list(magicnumbers.yield_exponents_of_two(32 + 16)) == [4, 5]
assert list(magicnumbers.yield_exponents_of_two(1)) == [0]
assert list(magicnumbers.yield_exponents_of_two(2)) == [1]
assert list(magicnumbers.yield_exponents_of_two(2+8)) == [1, 3]
def test_get_kids_query_with_both_specified():
assert magicnumbers.get_kids_int(["has a kid"], ["might want kids",
"doesn't want kids",
"wants kids"]) == 33686016
assert magicnumbers.get_kids_int(["has a kid"], ["might want kids"]) == 512
assert magicnumbers.get_kids_int(["has a kid", "has kids"],
["might want kids"]) == 1536
assert magicnumbers.get_kids_int(["has a kid", "has kids"], []) == 101058054
assert magicnumbers.get_kids_int(["has a kid", "has kids"],
["doesn't want kids"]) == 100663296
def test_get_kids_query_when_has_not_specified():
magicnumbers.get_kids_int([], ['wants kids']) == 4653056
assert magicnumbers.get_kids_int([], ["doesn't want kids",
"wants kids"]) == 1195835440
assert magicnumbers.get_kids_int([], ["might want kids",
"doesn't want kids"]) == 1191200560
assert magicnumbers.get_kids_int([], ["might want kids",
"doesn't want kids",
"wants kids"]) == 1195853616
assert magicnumbers.get_kids_int([], ["might want kids",
"wants kids"]) == 4671232
assert magicnumbers.get_kids_int([], ["might want kids"]) == 18176
def test_get_kids_int_when_wants_not_specified():
assert magicnumbers.get_kids_int(["has a kid"], []) == 33686018
assert magicnumbers.get_kids_int(["has a kid", "has kids"], []) == 101058054
def test_get_kids_int_with_all():
assert magicnumbers.get_kids_int(
["has a kid",
"has kids",
"doesn't have kids"],
["might want kids",
"doesn't want kids",
"wants kids"]
) == 1179010560
def test_bodytype():
magicnumbers.filters.bodytype('thin') == 30,4
magicnumbers.filters.bodytype(['thin', 'jacked']) == '30,260'
|
"""
MoinMoin - OpenOffice.org 2.x Writer Filter (OpenDocument Text)
Depends on: nothing (only python with zlib)
@copyright: 2006 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
from MoinMoin.filter.application_vnd_oasis_opendocument import execute as odfilter
def execute(indexobj, filename):
return odfilter(indexobj, filename)
|
from __future__ import absolute_import
import scipy.special
import autograd.numpy as np
from autograd.extend import primitive, defvjp, defjvp
from autograd.numpy.numpy_vjps import unbroadcast_f, repeat_to_match_shape
beta = primitive(scipy.special.beta)
betainc = primitive(scipy.special.betainc)
betaln = primitive(scipy.special.betaln)
defvjp(beta,
lambda ans, a, b: unbroadcast_f(a, lambda g: g * ans * (psi(a) - psi(a + b))),
lambda ans, a, b: unbroadcast_f(b, lambda g: g * ans * (psi(b) - psi(a + b))))
defvjp(betainc,
lambda ans, a, b, x: unbroadcast_f(x, lambda g: g * np.power(x, a - 1) * np.power(1 - x, b - 1) / beta(a, b)),
argnums=[2])
defvjp(betaln,
lambda ans, a, b: unbroadcast_f(a, lambda g: g * (psi(a) - psi(a + b))),
lambda ans, a, b: unbroadcast_f(b, lambda g: g * (psi(b) - psi(a + b))))
polygamma = primitive(scipy.special.polygamma)
psi = primitive(scipy.special.psi) # psi(x) is just polygamma(0, x)
digamma = primitive(scipy.special.digamma) # digamma is another name for psi.
gamma = primitive(scipy.special.gamma)
gammaln = primitive(scipy.special.gammaln)
gammainc = primitive(scipy.special.gammainc)
gammaincc = primitive(scipy.special.gammaincc)
gammasgn = primitive(scipy.special.gammasgn)
rgamma = primitive(scipy.special.rgamma)
multigammaln = primitive(scipy.special.multigammaln)
defvjp(gammasgn, None)
defvjp(polygamma, None, lambda ans, n, x: lambda g: g * polygamma(n + 1, x))
defvjp(psi, lambda ans, x: lambda g: g * polygamma(1, x))
defvjp(digamma, lambda ans, x: lambda g: g * polygamma(1, x))
defvjp(gamma, lambda ans, x: lambda g: g * ans * psi(x))
defvjp(gammaln, lambda ans, x: lambda g: g * psi(x))
defvjp(rgamma, lambda ans, x: lambda g: g * psi(x) / -gamma(x))
defvjp(multigammaln,lambda ans, a, d: lambda g:
g * np.sum(digamma(np.expand_dims(a, -1) - np.arange(d)/2.), -1),
None)
def make_gammainc_vjp_arg1(sign):
def gammainc_vjp_arg1(ans, a, x):
coeffs = sign * np.exp(-x) * np.power(x, a - 1) / gamma(a)
return unbroadcast_f(x, lambda g: g * coeffs)
return gammainc_vjp_arg1
defvjp(gammainc, make_gammainc_vjp_arg1(1), argnums=[1])
defvjp(gammaincc, make_gammainc_vjp_arg1(-1), argnums=[1])
j0 = primitive(scipy.special.j0)
y0 = primitive(scipy.special.y0)
j1 = primitive(scipy.special.j1)
y1 = primitive(scipy.special.y1)
jn = primitive(scipy.special.jn)
yn = primitive(scipy.special.yn)
defvjp(j0,lambda ans, x: lambda g: -g * j1(x))
defvjp(y0,lambda ans, x: lambda g: -g * y1(x))
defvjp(j1,lambda ans, x: lambda g: g * (j0(x) - jn(2, x)) / 2.0)
defvjp(y1,lambda ans, x: lambda g: g * (y0(x) - yn(2, x)) / 2.0)
defvjp(jn, None, lambda ans, n, x: lambda g: g * (jn(n - 1, x) - jn(n + 1, x)) / 2.0)
defvjp(yn, None, lambda ans, n, x: lambda g: g * (yn(n - 1, x) - yn(n + 1, x)) / 2.0)
i0 = primitive(scipy.special.i0)
i1 = primitive(scipy.special.i1)
iv = primitive(scipy.special.iv)
ive = primitive(scipy.special.ive)
defvjp(i0, lambda ans, x: lambda g: g * i1(x))
defvjp(i1, lambda ans, x: lambda g: g * (i0(x) + iv(2, x)) / 2.0)
defvjp(iv, None, lambda ans, n, x: lambda g: g * (iv(n - 1, x) + iv(n + 1, x)) / 2.0)
defvjp(ive, None, lambda ans, n, x: lambda g: g * (ans * (n / x - np.sign(x)) + ive(n + 1, x)))
inv_root_pi = 0.56418958354775627928
erf = primitive(scipy.special.erf)
erfc = primitive(scipy.special.erfc)
defvjp(erf, lambda ans, x: lambda g: 2.*g*inv_root_pi*np.exp(-x**2))
defvjp(erfc,lambda ans, x: lambda g: -2.*g*inv_root_pi*np.exp(-x**2))
root_pi = 1.7724538509055159
erfinv = primitive(scipy.special.erfinv)
erfcinv = primitive(scipy.special.erfcinv)
defvjp(erfinv,lambda ans, x: lambda g: g * root_pi / 2 * np.exp(erfinv(x)**2))
defvjp(erfcinv,lambda ans, x: lambda g: -g * root_pi / 2 * np.exp(erfcinv(x)**2))
logit = primitive(scipy.special.logit)
expit = primitive(scipy.special.expit)
defvjp(logit,lambda ans, x: lambda g: g / ( x * (1 - x)))
defvjp(expit,lambda ans, x: lambda g: g * ans * (1 - ans))
logsumexp = primitive(scipy.special.logsumexp)
def make_grad_logsumexp(ans, x, axis=None, b=1.0, keepdims=False):
shape, dtype = np.shape(x), np.result_type(x)
def vjp(g):
g_repeated, _ = repeat_to_match_shape(g, shape, dtype, axis, keepdims)
ans_repeated, _ = repeat_to_match_shape(ans, shape, dtype, axis, keepdims)
return g_repeated * b * np.exp(x - ans_repeated)
return vjp
defvjp(logsumexp, make_grad_logsumexp)
def fwd_grad_logsumexp(g, ans, x, axis=None, b=1.0, keepdims=False):
if not keepdims:
if isinstance(axis, int):
ans = np.expand_dims(ans, axis)
elif isinstance(axis, tuple):
for ax in sorted(axis):
ans = np.expand_dims(ans, ax)
return np.sum(g * b * np.exp(x - ans), axis=axis, keepdims=keepdims)
defjvp(logsumexp, fwd_grad_logsumexp)
|
from flask import render_template, redirect, url_for, abort, flash, request,\
current_app, make_response
from flask_login import login_required, current_user
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm,\
CommentForm
from .. import db
from ..models import Permission, Role, User, Post, Comment
from ..decorators import admin_required, permission_required
@main.route('/', methods=['GET', 'POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
post = Post(body=form.body.data,
author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
show_followed=show_followed, pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) // \
current_app.config['FLASKY_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if not current_user.is_following(user):
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following %s anymore.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followers of",
endpoint='.followers', pagination=pagination,
follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followed by",
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('moderate.html', comments=comments,
pagination=pagination, page=page)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
|
from django.contrib.sites.models import Site
mysite = Site.objects.get_current()
mysite.domain = 'expfactory.org'
mysite.name = 'The Experiment Factory'
mysite.save()
|
"""Check that we do not crash with a recursion error
https://github.com/PyCQA/pylint/issues/3159
"""
from setuptools import Command, find_packages, setup
class AnyCommand(Command):
def initialize_options(self):
pass
def finalize_options(self):
pass
@staticmethod
def run():
print("Do anything")
setup(
name="Thing",
version="1.0",
packages=find_packages(),
cmdclass={"anycommand": AnyCommand},
)
|
from django.shortcuts import render
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
class Stats(APIView):
def retrieve_player_hitting_stats(self):
pass
def retrieve_player_fielding_stats(self):
pass
def retrieve_player_pitching_stats(self):
pass
def retrieve_team_hitting_stats(self):
pass
def retrieve_team_fielding_stats(self):
pass
def retrieve_team_pitching_stats(self):
pass
def get(self, request):
stat_type = request.data.get('type')
stat_group = request.data.get('group')
if(stat_group == 'indiv'):
if(stat_type == 'hitting'):
self.retrieve_player_hitting_stats()
elif(stat_type == 'fielding'):
self.retrieve_player_fielding_stats()
elif(stat_type == 'pitching'):
self.retrieve_player_pitching_stats()
else:
return Response("Bad Request", status=status.HTTP_400_BAD_REQUEST)
elif(stat_group == 'team'):
if(stat_type == 'hitting'):
self.retrieve_team_hitting_stats()
elif(stat_type == 'fielding'):
self.retrieve_team_fielding_stats()
elif(stat_type == 'pitching'):
self.retrieve_team_pitching_stats()
else:
return Response("Bad Request", status=status.HTTP_400_BAD_REQUEST)
else:
return Response("Bad Request", status=status.HTTP_400_BAD_REQUEST)
|
import time
import logging
from redis import BusyLoadingError
log = logging.getLogger(__name__)
def wait_for_redis_data_loaded(redis):
while True:
try:
redis.ping()
except BusyLoadingError:
log.warning("Redis not done loading, will retry in 2 seconds...")
time.sleep(2)
continue
break
|
class Solution(object):
def searchInsert(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
low, high = 0, len(nums) - 1
while low <= high:
mid = (low + high) / 2
if target == nums[mid]:
return mid
elif target > nums[mid]:
low = mid + 1
else:
high = mid - 1
return low
|
import math
import warnings
import numpy
import chainer
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
_erf_cpu = None
class Erf(function_node.FunctionNode):
@property
def label(self):
return 'erf'
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
type_check.expect(in_types[0].dtype.kind == 'f')
def forward_cpu(self, x):
global _erf_cpu
if _erf_cpu is None:
try:
from scipy import special
_erf_cpu = special.erf
except ImportError:
warnings.warn(
"SciPy is not available. Forward computation of erf in CPU"
" can be slow without SciPy.")
_erf_cpu = numpy.vectorize(math.erf)
self.retain_inputs((0,))
return utils.force_array(_erf_cpu(x[0]), dtype=x[0].dtype),
def forward_gpu(self, x):
self.retain_inputs((0,))
return cuda.elementwise(
'T x', 'T y',
'y = erf(x)',
'elementwise_erf',
)(x[0]),
def backward(self, indexes, gy):
x = self.get_retained_inputs()[0]
return 2 / numpy.pi ** 0.5 * chainer.functions.exp(-x ** 2) * gy[0],
def erf(x):
"""Elementwise error function.
.. note::
Forward computation in CPU can be slow if
`SciPy <https://www.scipy.org/>`_ is not available.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Erf().apply((x,))[0]
|
from fabric.api import *
from fabric.contrib.files import *
from fabric.contrib.project import rsync_project
from subprocess import check_output
env.use_ssh_config = True
env.user = 'ubuntu'
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
HOME_DIR = '/home/ubuntu'
DEPLOY_PATH = '%s/cabot' % HOME_DIR
LOG_DIR = '/var/log/cabot/'
VENV_DIR = '%s/venv' % HOME_DIR
BACKUP_DIR = '/tmp/'
PG_DATABASE = 'index'
PG_USERNAME = 'cabot'
PG_PASSWORD = 'cabot' # You should probably change this
def _ensure_dirs():
dirs = [LOG_DIR]
for d in dirs:
sudo('mkdir -p {d}'.format(d=d))
sudo('chmod -R 777 {d}'.format(d=d))
def _setup_venv():
with settings(warn_only=True):
if sudo('test -d %s' % VENV_DIR).failed:
sudo('virtualenv %s' % VENV_DIR)
def install_requirements(deploy_path=DEPLOY_PATH):
with cd(deploy_path):
with prefix("source {venv}/bin/activate".format(venv=VENV_DIR)):
sudo(
"{venv}/bin/pip install -r requirements.txt --exists-action=w".format(venv=VENV_DIR))
def run_migrations(deploy_path=DEPLOY_PATH):
with cd(deploy_path):
with prefix("source {venv}/bin/activate".format(venv=VENV_DIR)):
sudo(
"foreman run python manage.py syncdb -e conf/{env}.env".format(env=env.deploy_version))
sudo(
"foreman run python manage.py migrate cabotapp --noinput -e conf/{env}.env".format(env=env.deploy_version))
# Wrap in failure for legacy reasons
with settings(warn_only=True):
sudo(
"foreman run python manage.py migrate djcelery --noinput -e conf/{env}.env".format(env=env.deploy_version))
def collect_static(deploy_path=DEPLOY_PATH):
with cd(deploy_path):
with prefix("source {venv}/bin/activate".format(venv=VENV_DIR)):
sudo(
"foreman run python manage.py collectstatic --noinput -e conf/{env}.env".format(env=env.deploy_version))
sudo(
"foreman run python manage.py compress -e conf/{env}.env".format(env=env.deploy_version))
def setup_upstart(deploy_path=DEPLOY_PATH):
with cd(deploy_path):
# Point at master (i.e. symlinked) path
procfile = os.path.join(DEPLOY_PATH, 'Procfile')
env_file = os.path.join(DEPLOY_PATH, 'conf', '%s.env' %
env.deploy_version)
template_file = os.path.join(DEPLOY_PATH, 'upstart')
sudo('foreman export upstart /etc/init -f {conf} -e {env} -u ubuntu -a cabot -t {tmplt}'.format(
conf=procfile, env=env_file, tmplt=template_file))
def production():
"""
Select production instance(s)
"""
env.hosts = ['cabot.arachnys.com']
def restart():
with settings(warn_only=True):
if sudo('restart cabot').failed:
sudo('start cabot')
def stop():
with settings(warn_only=True):
sudo('stop cabot')
def provision():
"""
Provision a clean Ubuntu 12.04 instance with dependencies
"""
with open(os.path.expanduser('~/.ssh/id_rsa.pub')) as f:
local_ssh_key = f.read().strip('\n')
put('bin/setup_dependencies.sh', '/tmp/setup_dependencies.sh')
sudo('LOCAL_SSH_KEY="%s" bash /tmp/setup_dependencies.sh' % local_ssh_key)
# Clean up
run('rm /tmp/setup_dependencies.sh')
def deploy(deploy_version=None):
"""
Deploy a new version of code to production or test server.
Push code to remote server, install requirements, apply migrations,
collect and compress static assets, export foreman to upstart,
restart service
"""
# TODO: replace this with
# - zip up working directory
# - upload and unzip into DEPLOY_PATH
env.deploy_version = deploy_version or 'production'
dirname = check_output(
["echo \"$(date +'%Y-%m-%d')-$(git log --pretty=format:'%h' -n 1)\""], shell=True).strip('\n ')
deploy_path = os.path.join(HOME_DIR, dirname)
run('mkdir -p {}'.format(deploy_path))
print 'Uploading project to %s' % deploy_path
rsync_project(
remote_dir=deploy_path,
local_dir='./',
exclude=['.git', 'backups', 'venv',
'static/CACHE', '.vagrant', '*.pyc', 'dev.db'],
)
with cd(deploy_path):
_setup_venv()
create_database()
install_requirements(deploy_path)
run_migrations(deploy_path)
collect_static(deploy_path)
# This may cause a bit of downtime
run('ln -sfn {new} {current}'.format(
new=deploy_path,
current=DEPLOY_PATH
))
setup_upstart(deploy_path)
restart()
print "Done!"
def backup():
"""
Back up database locally
TODO: send backups to s3
"""
backup_file = 'outfile.sql.gz'
with cd(BACKUP_DIR):
run('PGPASSWORD=cabot pg_dump -U cabot index | gzip > {}'.format(backup_file))
get(backup_file, 'backups/%(basename)s')
def create_database():
"""Creates role and database"""
with settings(warn_only=True):
sudo(
'psql -c "CREATE USER %s WITH NOCREATEDB NOCREATEUSER ENCRYPTED PASSWORD E\'%s\'"' %
(PG_USERNAME, PG_PASSWORD), user='postgres')
sudo('psql -c "CREATE DATABASE %s WITH OWNER %s"' %
(PG_DATABASE, PG_USERNAME), user='postgres')
@parallel
def logs():
"""
Tail logfiles
"""
sudo('tail -f {logdir}* /var/log/nginx/*.log'.format(logdir=LOG_DIR))
|
""" S3 RESTful API
@copyright: 2009-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3Request",
"S3Method",
"s3_request",
)
import datetime
import os
import re
import sys
import time
import types
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import *
from gluon.storage import Storage
from s3datetime import s3_parse_datetime
from s3resource import S3Resource
from s3utils import s3_get_extension, s3_remove_last_record_id, s3_store_last_record_id
REGEX_FILTER = re.compile(".+\..+|.*\(.+\).*")
DEBUG = False
if DEBUG:
print >> sys.stderr, "S3REST: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
class S3Request(object):
"""
Class to handle RESTful requests
"""
INTERACTIVE_FORMATS = ("html", "iframe", "popup", "dl")
DEFAULT_REPRESENTATION = "html"
# -------------------------------------------------------------------------
def __init__(self,
prefix=None,
name=None,
r=None,
c=None,
f=None,
args=None,
vars=None,
extension=None,
get_vars=None,
post_vars=None,
http=None):
"""
Constructor
@param prefix: the table name prefix
@param name: the table name
@param c: the controller prefix
@param f: the controller function
@param args: list of request arguments
@param vars: dict of request variables
@param extension: the format extension (representation)
@param get_vars: the URL query variables (overrides vars)
@param post_vars: the POST variables (overrides vars)
@param http: the HTTP method (GET, PUT, POST, or DELETE)
@note: all parameters fall back to the attributes of the
current web2py request object
"""
auth = current.auth
# Common settings
# XSLT Paths
self.XSLT_PATH = "static/formats"
self.XSLT_EXTENSION = "xsl"
# Attached files
self.files = Storage()
# Allow override of controller/function
self.controller = c or self.controller
self.function = f or self.function
if "." in self.function:
self.function, ext = self.function.split(".", 1)
if extension is None:
extension = ext
if c or f:
if not auth.permission.has_permission("read",
c=self.controller,
f=self.function):
auth.permission.fail()
# Allow override of request args/vars
if args is not None:
if isinstance(args, (list, tuple)):
self.args = args
else:
self.args = [args]
if get_vars is not None:
self.get_vars = get_vars
self.vars = get_vars.copy()
if post_vars is not None:
self.vars.update(post_vars)
else:
self.vars.update(self.post_vars)
if post_vars is not None:
self.post_vars = post_vars
if get_vars is None:
self.vars = post_vars.copy()
self.vars.update(self.get_vars)
if get_vars is None and post_vars is None and vars is not None:
self.vars = vars
self.get_vars = vars
self.post_vars = Storage()
self.extension = extension or current.request.extension
self.http = http or current.request.env.request_method
# Main resource attributes
if r is not None:
if not prefix:
prefix = r.prefix
if not name:
name = r.name
self.prefix = prefix or self.controller
self.name = name or self.function
# Parse the request
self.__parse()
self.custom_action = None
get_vars = Storage(self.get_vars)
# Interactive representation format?
self.interactive = self.representation in self.INTERACTIVE_FORMATS
# Show information on deleted records?
include_deleted = False
if self.representation == "xml" and "include_deleted" in get_vars:
include_deleted = True
if "components" in get_vars:
cnames = get_vars["components"]
if isinstance(cnames, list):
cnames = ",".join(cnames)
cnames = cnames.split(",")
if len(cnames) == 1 and cnames[0].lower() == "none":
cnames = []
else:
cnames = None
# Append component ID to the URL query
component_name = self.component_name
component_id = self.component_id
if component_name and component_id:
varname = "%s.id" % component_name
if varname in get_vars:
var = get_vars[varname]
if not isinstance(var, (list, tuple)):
var = [var]
var.append(component_id)
get_vars[varname] = var
else:
get_vars[varname] = component_id
# Define the target resource
_filter = current.response.s3.filter
components = component_name
if components is None:
components = cnames
tablename = "%s_%s" % (self.prefix, self.name)
if self.method == "review":
approved, unapproved = False, True
elif auth.s3_has_permission("review", tablename, self.id):
# Approvers should be able to edit records during review
# @ToDo: deployment_setting to allow Filtering out from
# multi-record methods even for those with Review permission
approved, unapproved = True, True
else:
approved, unapproved = True, False
self.resource = S3Resource(tablename,
id=self.id,
filter=_filter,
vars=get_vars,
components=components,
approved=approved,
unapproved=unapproved,
include_deleted=include_deleted,
context=True,
filter_component=component_name,
)
self.tablename = self.resource.tablename
table = self.table = self.resource.table
# Try to load the master record
self.record = None
uid = self.vars.get("%s.uid" % self.name)
if self.id or uid and not isinstance(uid, (list, tuple)):
# Single record expected
self.resource.load()
if len(self.resource) == 1:
self.record = self.resource.records().first()
_id = table._id.name
self.id = self.record[_id]
s3_store_last_record_id(self.tablename, self.id)
else:
raise KeyError(current.ERROR.BAD_RECORD)
# Identify the component
self.component = None
if self.component_name:
c = self.resource.components.get(self.component_name)
if c:
self.component = c
else:
error = "%s not a component of %s" % (self.component_name,
self.resource.tablename)
raise AttributeError(error)
# Identify link table and link ID
self.link = None
self.link_id = None
if self.component is not None:
self.link = self.component.link
if self.link and self.id and self.component_id:
self.link_id = self.link.link_id(self.id, self.component_id)
if self.link_id is None:
raise KeyError(current.ERROR.BAD_RECORD)
# Store method handlers
self._handler = Storage()
set_handler = self.set_handler
set_handler("export_tree", self.get_tree,
http=("GET",), transform=True)
set_handler("import_tree", self.put_tree,
http=("GET", "PUT", "POST"), transform=True)
set_handler("fields", self.get_fields,
http=("GET",), transform=True)
set_handler("options", self.get_options,
http=("GET",), transform=True)
sync = current.sync
set_handler("sync", sync,
http=("GET", "PUT", "POST",), transform=True)
set_handler("sync_log", sync.log,
http=("GET",), transform=True)
set_handler("sync_log", sync.log,
http=("GET",), transform=False)
# Initialize CRUD
self.resource.crud(self, method="_init")
if self.component is not None:
self.component.crud(self, method="_init")
# -------------------------------------------------------------------------
# Method handler configuration
# -------------------------------------------------------------------------
def set_handler(self, method, handler,
http=None,
representation=None,
transform=False):
"""
Set a method handler for this request
@param method: the method name
@param handler: the handler function
@type handler: handler(S3Request, **attr)
"""
HTTP = ("GET", "PUT", "POST", "DELETE")
if http is None:
http = HTTP
if not isinstance(http, (set, tuple, list)):
http = [http]
if transform:
representation = ["__transform__"]
elif representation is None:
representation = [self.DEFAULT_REPRESENTATION]
if not isinstance(representation, (set, tuple, list)):
representation = [representation]
if not isinstance(method, (set, tuple, list)):
method = [method]
handlers = self._handler
for h in http:
if h not in HTTP:
continue
if h not in handlers:
handlers[h] = Storage()
format_hooks = handlers[h]
for r in representation:
if r not in format_hooks:
format_hooks[r] = Storage()
method_hooks = format_hooks[r]
for m in method:
if m is None:
_m = "__none__"
else:
_m = m
method_hooks[_m] = handler
return
# -------------------------------------------------------------------------
def get_handler(self, method, transform=False):
"""
Get a method handler for this request
@param method: the method name
@return: the handler function
"""
http = self.http
representation = self.representation
if transform:
representation = "__transform__"
elif representation is None:
representation = self.DEFAULT_REPRESENTATION
if method is None:
method = "__none__"
if http not in self._handler:
http = "GET"
if http not in self._handler:
return None
else:
format_hooks = self._handler[http]
if representation not in format_hooks:
representation = self.DEFAULT_REPRESENTATION
if representation not in format_hooks:
return None
else:
method_hooks = format_hooks[representation]
if method not in method_hooks:
method = "__none__"
if method not in method_hooks:
return None
else:
handler = method_hooks[method]
if isinstance(handler, (type, types.ClassType)):
return handler()
else:
return handler
# -------------------------------------------------------------------------
def get_widget_handler(self, method):
"""
Get the widget handler for a method
@param r: the S3Request
@param method: the widget method
"""
if self.component:
resource = self.component
if resource.link:
resource = resource.link
else:
resource = self.resource
prefix, name = self.prefix, self.name
component_name = self.component_name
custom_action = current.s3db.get_method(prefix,
name,
component_name=component_name,
method=method)
http = self.http
handler = None
if method and custom_action:
handler = custom_action
if http == "GET":
if not method:
if resource.count() == 1:
method = "read"
else:
method = "list"
transform = self.transformable()
handler = self.get_handler(method, transform=transform)
elif http == "PUT":
transform = self.transformable(method="import")
handler = self.get_handler(method, transform=transform)
elif http == "POST":
transform = self.transformable(method="import")
return self.get_handler(method, transform=transform)
elif http == "DELETE":
if method:
return self.get_handler(method)
else:
return self.get_handler("delete")
else:
return None
if handler is None:
handler = resource.crud
if isinstance(handler, (type, types.ClassType)):
handler = handler()
return handler
# -------------------------------------------------------------------------
# Request Parser
# -------------------------------------------------------------------------
def __parse(self):
""" Parses the web2py request object """
self.id = None
self.component_name = None
self.component_id = None
self.method = None
representation = self.extension
# Get the names of all components
tablename = "%s_%s" % (self.prefix, self.name)
components = current.s3db.get_components(tablename)
if components:
components = components.keys()
else:
components = []
# Map request args, catch extensions
f = []
append = f.append
args = self.args
if len(args) > 4:
args = args[:4]
method = self.name
for arg in args:
if "." in arg:
arg, representation = arg.rsplit(".", 1)
if method is None:
method = arg
elif arg.isdigit():
append((method, arg))
method = None
else:
append((method, None))
method = arg
if method:
append((method, None))
self.id = f[0][1]
# Sort out component name and method
l = len(f)
if l > 1:
m = f[1][0].lower()
i = f[1][1]
if m in components:
self.component_name = m
self.component_id = i
else:
self.method = m
if not self.id:
self.id = i
if self.component_name and l > 2:
self.method = f[2][0].lower()
if not self.component_id:
self.component_id = f[2][1]
representation = s3_get_extension(self)
if representation:
self.representation = representation
else:
self.representation = self.DEFAULT_REPRESENTATION
return
# -------------------------------------------------------------------------
# REST Interface
# -------------------------------------------------------------------------
def __call__(self, **attr):
"""
Execute this request
@param attr: Parameters for the method handler
"""
response = current.response
s3 = response.s3
self.next = None
bypass = False
output = None
preprocess = None
postprocess = None
representation = self.representation
# Enforce primary record ID
if not self.id and representation == "html":
if self.component or self.method in ("read", "profile", "update"):
count = self.resource.count()
if self.vars is not None and count == 1:
self.resource.load()
self.record = self.resource._rows[0]
self.id = self.record.id
else:
#current.session.error = current.ERROR.BAD_RECORD
redirect(URL(r=self, c=self.prefix, f=self.name))
# Pre-process
if s3 is not None:
preprocess = s3.get("prep")
if preprocess:
pre = preprocess(self)
# Re-read representation after preprocess:
representation = self.representation
if pre and isinstance(pre, dict):
bypass = pre.get("bypass", False) is True
output = pre.get("output")
if not bypass:
success = pre.get("success", True)
if not success:
if representation == "html" and output:
if isinstance(output, dict):
output.update(r=self)
return output
else:
status = pre.get("status", 400)
message = pre.get("message",
current.ERROR.BAD_REQUEST)
self.error(status, message)
elif not pre:
self.error(400, current.ERROR.BAD_REQUEST)
# Default view
if representation not in ("html", "popup"):
response.view = "xml.html"
# Content type
response.headers["Content-Type"] = s3.content_type.get(representation,
"text/html")
# Custom action?
if not self.custom_action:
action = current.s3db.get_method(self.prefix,
self.name,
component_name=self.component_name,
method=self.method)
if isinstance(action, (type, types.ClassType)):
self.custom_action = action()
else:
self.custom_action = action
# Method handling
http = self.http
handler = None
if not bypass:
# Find the method handler
if self.method and self.custom_action:
handler = self.custom_action
elif http == "GET":
handler = self.__GET()
elif http == "PUT":
handler = self.__PUT()
elif http == "POST":
handler = self.__POST()
elif http == "DELETE":
handler = self.__DELETE()
else:
self.error(405, current.ERROR.BAD_METHOD)
# Invoke the method handler
if handler is not None:
output = handler(self, **attr)
else:
# Fall back to CRUD
output = self.resource.crud(self, **attr)
# Post-process
if s3 is not None:
postprocess = s3.get("postp")
if postprocess is not None:
output = postprocess(self, output)
if output is not None and isinstance(output, dict):
# Put a copy of r into the output for the view
# to be able to make use of it
output.update(r=self)
# Redirection
if self.next is not None and \
(self.http != "GET" or self.method == "clear"):
if isinstance(output, dict):
form = output.get("form")
if form:
if not hasattr(form, "errors"):
# Form embedded in a DIV together with other components
form = form.elements('form', first_only=True)
form = form[0] if form else None
if form and form.errors:
return output
session = current.session
session.flash = response.flash
session.confirmation = response.confirmation
session.error = response.error
session.warning = response.warning
redirect(self.next)
return output
# -------------------------------------------------------------------------
def __GET(self, resource=None):
"""
Get the GET method handler
"""
method = self.method
transform = False
if method is None or method in ("read", "display", "update"):
if self.transformable():
method = "export_tree"
transform = True
elif self.component:
resource = self.resource
if self.interactive and resource.count() == 1:
# Load the record
if not resource._rows:
resource.load(start=0, limit=1)
if resource._rows:
self.record = resource._rows[0]
self.id = resource.get_id()
self.uid = resource.get_uid()
if self.component.multiple and not self.component_id:
method = "list"
else:
method = "read"
elif self.id or method in ("read", "display", "update"):
# Enforce single record
resource = self.resource
if not resource._rows:
resource.load(start=0, limit=1)
if resource._rows:
self.record = resource._rows[0]
self.id = resource.get_id()
self.uid = resource.get_uid()
else:
self.error(404, current.ERROR.BAD_RECORD)
method = "read"
else:
method = "list"
elif method in ("create", "update"):
if self.transformable(method="import"):
method = "import_tree"
transform = True
elif method == "delete":
return self.__DELETE()
elif method == "clear" and not self.component:
s3_remove_last_record_id(self.tablename)
self.next = URL(r=self, f=self.name)
return lambda r, **attr: None
elif self.transformable():
transform = True
return self.get_handler(method, transform=transform)
# -------------------------------------------------------------------------
def __PUT(self):
"""
Get the PUT method handler
"""
method = self.method
transform = self.transformable(method="import")
if not self.method and transform:
method = "import_tree"
return self.get_handler(method, transform=transform)
# -------------------------------------------------------------------------
def __POST(self):
"""
Get the POST method handler
"""
method = self.method
if method == "delete":
return self.__DELETE()
else:
if self.transformable(method="import"):
return self.__PUT()
else:
post_vars = self.post_vars
table = self.target()[2]
if "deleted" in table and "id" not in post_vars: # and "uuid" not in post_vars:
original = S3Resource.original(table, post_vars)
if original and original.deleted:
self.post_vars.update(id=original.id)
self.vars.update(id=original.id)
return self.__GET()
# -------------------------------------------------------------------------
def __DELETE(self):
"""
Get the DELETE method handler
"""
if self.method:
return self.get_handler(self.method)
else:
return self.get_handler("delete")
# -------------------------------------------------------------------------
# Built-in method handlers
# -------------------------------------------------------------------------
@staticmethod
def get_tree(r, **attr):
"""
XML Element tree export method
@param r: the S3Request instance
@param attr: controller attributes
"""
get_vars = r.get_vars
args = Storage()
# Slicing
start = get_vars.get("start")
if start is not None:
try:
start = int(start)
except ValueError:
start = None
limit = get_vars.get("limit")
if limit is not None:
try:
limit = int(limit)
except ValueError:
limit = None
# msince
msince = get_vars.get("msince")
if msince is not None:
msince = s3_parse_datetime(msince)
# Show IDs (default: False)
if "show_ids" in get_vars:
if get_vars["show_ids"].lower() == "true":
current.xml.show_ids = True
# Show URLs (default: True)
if "show_urls" in get_vars:
if get_vars["show_urls"].lower() == "false":
current.xml.show_urls = False
# Maxbounds (default: False)
maxbounds = False
if "maxbounds" in get_vars:
if get_vars["maxbounds"].lower() == "true":
maxbounds = True
if r.representation in ("gpx", "osm"):
maxbounds = True
# Components of the master resource (tablenames)
if "mcomponents" in get_vars:
mcomponents = get_vars["mcomponents"]
if str(mcomponents).lower() == "none":
mcomponents = None
elif not isinstance(mcomponents, list):
mcomponents = mcomponents.split(",")
else:
mcomponents = [] # all
# Components of referenced resources (tablenames)
if "rcomponents" in get_vars:
rcomponents = get_vars["rcomponents"]
if str(rcomponents).lower() == "none":
rcomponents = None
elif not isinstance(rcomponents, list):
rcomponents = rcomponents.split(",")
else:
rcomponents = None
# Maximum reference resolution depth
if "maxdepth" in get_vars:
try:
args["maxdepth"] = int(get_vars["maxdepth"])
except ValueError:
pass
# References to resolve (field names)
if "references" in get_vars:
references = get_vars["references"]
if str(references).lower() == "none":
references = []
elif not isinstance(references, list):
references = references.split(",")
else:
references = None # all
# Export field selection
if "fields" in get_vars:
fields = get_vars["fields"]
if str(fields).lower() == "none":
fields = []
elif not isinstance(fields, list):
fields = fields.split(",")
else:
fields = None # all
# Find XSLT stylesheet
stylesheet = r.stylesheet()
# Add stylesheet parameters
if stylesheet is not None:
if r.component:
args.update(id=r.id,
component=r.component.tablename)
if r.component.alias:
args.update(alias=r.component.alias)
mode = get_vars.get("xsltmode")
if mode is not None:
args.update(mode=mode)
# Set response headers
response = current.response
s3 = response.s3
headers = response.headers
representation = r.representation
if representation in s3.json_formats:
as_json = True
default = "application/json"
else:
as_json = False
default = "text/xml"
headers["Content-Type"] = s3.content_type.get(representation,
default)
# Export the resource
resource = r.resource
target = r.target()[3]
if target == resource.tablename:
# Master resource targetted
target = None
output = resource.export_xml(start=start,
limit=limit,
msince=msince,
fields=fields,
dereference=True,
# maxdepth in args
references=references,
mcomponents=mcomponents,
rcomponents=rcomponents,
stylesheet=stylesheet,
as_json=as_json,
maxbounds=maxbounds,
target= target,
**args)
# Transformation error?
if not output:
r.error(400, "XSLT Transformation Error: %s " % current.xml.error)
return output
# -------------------------------------------------------------------------
@staticmethod
def put_tree(r, **attr):
"""
XML Element tree import method
@param r: the S3Request method
@param attr: controller attributes
"""
get_vars = r.get_vars
# Skip invalid records?
if "ignore_errors" in get_vars:
ignore_errors = True
else:
ignore_errors = False
# Find all source names in the URL vars
def findnames(get_vars, name):
nlist = []
if name in get_vars:
names = get_vars[name]
if isinstance(names, (list, tuple)):
names = ",".join(names)
names = names.split(",")
for n in names:
if n[0] == "(" and ")" in n[1:]:
nlist.append(n[1:].split(")", 1))
else:
nlist.append([None, n])
return nlist
filenames = findnames(get_vars, "filename")
fetchurls = findnames(get_vars, "fetchurl")
source_url = None
# Get the source(s)
s3 = current.response.s3
json_formats = s3.json_formats
csv_formats = s3.csv_formats
source = []
format = r.representation
if format in json_formats or format in csv_formats:
if filenames:
try:
for f in filenames:
source.append((f[0], open(f[1], "rb")))
except:
source = []
elif fetchurls:
import urllib
try:
for u in fetchurls:
source.append((u[0], urllib.urlopen(u[1])))
except:
source = []
elif r.http != "GET":
source = r.read_body()
else:
if filenames:
source = filenames
elif fetchurls:
source = fetchurls
# Assume only 1 URL for GeoRSS feed caching
source_url = fetchurls[0][1]
elif r.http != "GET":
source = r.read_body()
if not source:
if filenames or fetchurls:
# Error: source not found
r.error(400, "Invalid source")
else:
# No source specified => return resource structure
return r.get_struct(r, **attr)
# Find XSLT stylesheet
stylesheet = r.stylesheet(method="import")
# Target IDs
if r.method == "create":
_id = None
else:
_id = r.id
# Transformation mode?
if "xsltmode" in get_vars:
args = dict(xsltmode=get_vars["xsltmode"])
else:
args = dict()
# These 3 options are called by gis.show_map() & read by the
# GeoRSS Import stylesheet to populate the gis_cache table
# Source URL: For GeoRSS/KML Feed caching
if source_url:
args["source_url"] = source_url
# Data Field: For GeoRSS/KML Feed popups
if "data_field" in get_vars:
args["data_field"] = get_vars["data_field"]
# Image Field: For GeoRSS/KML Feed popups
if "image_field" in get_vars:
args["image_field"] = get_vars["image_field"]
# Format type?
if format in json_formats:
format = "json"
elif format in csv_formats:
format = "csv"
else:
format = "xml"
try:
output = r.resource.import_xml(source,
id=_id,
format=format,
files=r.files,
stylesheet=stylesheet,
ignore_errors=ignore_errors,
**args)
except IOError:
current.auth.permission.fail()
except SyntaxError:
e = sys.exc_info()[1]
if hasattr(e, "message"):
e = e.message
r.error(400, e)
return output
# -------------------------------------------------------------------------
@staticmethod
def get_struct(r, **attr):
"""
Resource structure introspection method
@param r: the S3Request instance
@param attr: controller attributes
"""
response = current.response
json_formats = response.s3.json_formats
if r.representation in json_formats:
as_json = True
content_type = "application/json"
else:
as_json = False
content_type = "text/xml"
get_vars = r.get_vars
meta = str(get_vars.get("meta", False)).lower() == "true"
opts = str(get_vars.get("options", False)).lower() == "true"
refs = str(get_vars.get("references", False)).lower() == "true"
stylesheet = r.stylesheet()
output = r.resource.export_struct(meta=meta,
options=opts,
references=refs,
stylesheet=stylesheet,
as_json=as_json)
if output is None:
# Transformation error
r.error(400, current.xml.error)
response.headers["Content-Type"] = content_type
return output
# -------------------------------------------------------------------------
@staticmethod
def get_fields(r, **attr):
"""
Resource structure introspection method (single table)
@param r: the S3Request instance
@param attr: controller attributes
"""
representation = r.representation
if representation == "xml":
output = r.resource.export_fields(component=r.component_name)
content_type = "text/xml"
elif representation == "s3json":
output = r.resource.export_fields(component=r.component_name,
as_json=True)
content_type = "application/json"
else:
r.error(501, current.ERROR.BAD_FORMAT)
response = current.response
response.headers["Content-Type"] = content_type
return output
# -------------------------------------------------------------------------
@staticmethod
def get_options(r, **attr):
"""
Field options introspection method (single table)
@param r: the S3Request instance
@param attr: controller attributes
"""
get_vars = r.get_vars
if "field" in get_vars:
items = get_vars["field"]
if not isinstance(items, (list, tuple)):
items = [items]
fields = []
add_fields = fields.extend
for item in items:
f = item.split(",")
if f:
add_fields(f)
else:
fields = None
if "hierarchy" in get_vars:
hierarchy = get_vars["hierarchy"].lower() not in ("false", "0")
else:
hierarchy = False
if "only_last" in get_vars:
only_last = get_vars["only_last"].lower() not in ("false", "0")
else:
only_last = False
if "show_uids" in get_vars:
show_uids = get_vars["show_uids"].lower() not in ("false", "0")
else:
show_uids = False
representation = r.representation
if representation == "xml":
only_last = False
as_json = False
content_type = "text/xml"
elif representation == "s3json":
show_uids = False
as_json = True
content_type = "application/json"
else:
r.error(501, current.ERROR.BAD_FORMAT)
component = r.component_name
output = r.resource.export_options(component=component,
fields=fields,
show_uids=show_uids,
only_last=only_last,
hierarchy=hierarchy,
as_json=as_json)
current.response.headers["Content-Type"] = content_type
return output
# -------------------------------------------------------------------------
# Tools
# -------------------------------------------------------------------------
def factory(self, **args):
"""
Generate a new request for the same resource
@param args: arguments for request constructor
"""
return s3_request(r=self, **args)
# -------------------------------------------------------------------------
def __getattr__(self, key):
"""
Called upon S3Request.<key> - looks up the value for the <key>
attribute. Falls back to current.request if the attribute is
not defined in this S3Request.
@param key: the key to lookup
"""
if key in self.__dict__:
return self.__dict__[key]
sentinel = object()
value = getattr(current.request, key, sentinel)
if value is sentinel:
raise AttributeError
return value
# -------------------------------------------------------------------------
def transformable(self, method=None):
"""
Check the request for a transformable format
@param method: "import" for import methods, else None
"""
if self.representation in ("html", "aadata", "popup", "iframe"):
return False
stylesheet = self.stylesheet(method=method, skip_error=True)
if not stylesheet and self.representation != "xml":
return False
else:
return True
# -------------------------------------------------------------------------
def actuate_link(self, component_id=None):
"""
Determine whether to actuate a link or not
@param component_id: the component_id (if not self.component_id)
"""
if not component_id:
component_id = self.component_id
if self.component:
single = component_id != None
component = self.component
if component.link:
actuate = self.component.actuate
if "linked" in self.get_vars:
linked = self.get_vars.get("linked", False)
linked = linked in ("true", "True")
if linked:
actuate = "replace"
else:
actuate = "hide"
if actuate == "link":
if self.method != "delete" and self.http != "DELETE":
return single
else:
return not single
elif actuate == "replace":
return True
#elif actuate == "embed":
#raise NotImplementedError
else:
return False
else:
return True
else:
return False
# -------------------------------------------------------------------------
@staticmethod
def unauthorised():
"""
Action upon unauthorised request
"""
current.auth.permission.fail()
# -------------------------------------------------------------------------
def error(self, status, message, tree=None, next=None):
"""
Action upon error
@param status: HTTP status code
@param message: the error message
@param tree: the tree causing the error
"""
if self.representation == "html":
current.session.error = message
if next is not None:
redirect(next)
else:
redirect(URL(r=self, f="index"))
else:
headers = {"Content-Type":"application/json"}
current.log.error(message)
raise HTTP(status,
body=current.xml.json_message(success=False,
statuscode=status,
message=message,
tree=tree),
web2py_error=message,
**headers)
# -------------------------------------------------------------------------
def url(self,
id=None,
component=None,
component_id=None,
target=None,
method=None,
representation=None,
vars=None,
host=None):
"""
Returns the URL of this request, use parameters to override
current requests attributes:
- None to keep current attribute (default)
- 0 or "" to set attribute to NONE
- value to use explicit value
@param id: the master record ID
@param component: the component name
@param component_id: the component ID
@param target: the target record ID (choose automatically)
@param method: the URL method
@param representation: the representation for the URL
@param vars: the URL query variables
@param host: string to force absolute URL with host (True means http_host)
Particular behavior:
- changing the master record ID resets the component ID
- removing the target record ID sets the method to None
- removing the method sets the target record ID to None
- [] as id will be replaced by the "[id]" wildcard
"""
if vars is None:
vars = self.get_vars
elif vars and isinstance(vars, str):
# We've come from a dataTable_vars which has the vars as
# a JSON string, but with the wrong quotation marks
vars = json.loads(vars.replace("'", "\""))
if "format" in vars:
del vars["format"]
args = []
cname = self.component_name
# target
if target is not None:
if cname and (component is None or component == cname):
component_id = target
else:
id = target
# method
default_method = False
if method is None:
default_method = True
method = self.method
elif method == "":
# Switch to list? (= method="" and no explicit target ID)
if component_id is None:
if self.component_id is not None:
component_id = 0
elif not self.component:
if id is None:
if self.id is not None:
id = 0
method = None
# id
if id is None:
id = self.id
elif id in (0, ""):
id = None
elif id in ([], "[id]", "*"):
id = "[id]"
component_id = 0
elif str(id) != str(self.id):
component_id = 0
# component
if component is None:
component = cname
elif component == "":
component = None
if cname and cname != component or not component:
component_id = 0
# component_id
if component_id is None:
component_id = self.component_id
elif component_id == 0:
component_id = None
if self.component_id and default_method:
method = None
if id is None and self.id and \
(not component or not component_id) and default_method:
method = None
if id:
args.append(id)
if component:
args.append(component)
if component_id:
args.append(component_id)
if method:
args.append(method)
# representation
if representation is None:
representation = self.representation
elif representation == "":
representation = self.DEFAULT_REPRESENTATION
f = self.function
if not representation == self.DEFAULT_REPRESENTATION:
if len(args) > 0:
args[-1] = "%s.%s" % (args[-1], representation)
else:
f = "%s.%s" % (f, representation)
return URL(r=self,
c=self.controller,
f=f,
args=args,
vars=vars,
host=host)
# -------------------------------------------------------------------------
def target(self):
"""
Get the target table of the current request
@return: a tuple of (prefix, name, table, tablename) of the target
resource of this request
@todo: update for link table support
"""
component = self.component
if component is not None:
link = self.component.link
if link and not self.actuate_link():
return(link.prefix,
link.name,
link.table,
link.tablename)
return (component.prefix,
component.name,
component.table,
component.tablename)
else:
return (self.prefix,
self.name,
self.table,
self.tablename)
# -------------------------------------------------------------------------
def stylesheet(self, method=None, skip_error=False):
"""
Find the XSLT stylesheet for this request
@param method: "import" for data imports, else None
@param skip_error: do not raise an HTTP error status
if the stylesheet cannot be found
"""
stylesheet = None
format = self.representation
if self.component:
resourcename = self.component.name
else:
resourcename = self.name
# Native S3XML?
if format == "xml":
return stylesheet
# External stylesheet specified?
if "transform" in self.vars:
return self.vars["transform"]
# Stylesheet attached to the request?
extension = self.XSLT_EXTENSION
filename = "%s.%s" % (resourcename, extension)
if filename in self.post_vars:
p = self.post_vars[filename]
import cgi
if isinstance(p, cgi.FieldStorage) and p.filename:
stylesheet = p.file
return stylesheet
# Internal stylesheet?
folder = self.folder
path = self.XSLT_PATH
if method != "import":
method = "export"
filename = "%s.%s" % (method, extension)
stylesheet = os.path.join(folder, path, format, filename)
if not os.path.exists(stylesheet):
if not skip_error:
self.error(501, "%s: %s" % (current.ERROR.BAD_TEMPLATE,
stylesheet))
else:
stylesheet = None
return stylesheet
# -------------------------------------------------------------------------
def read_body(self):
"""
Read data from request body
"""
self.files = Storage()
content_type = self.env.get("content_type")
source = []
if content_type and content_type.startswith("multipart/"):
import cgi
ext = ".%s" % self.representation
post_vars = self.post_vars
for v in post_vars:
p = post_vars[v]
if isinstance(p, cgi.FieldStorage) and p.filename:
self.files[p.filename] = p.file
if p.filename.endswith(ext):
source.append((v, p.file))
elif v.endswith(ext):
if isinstance(p, cgi.FieldStorage):
source.append((v, p.value))
elif isinstance(p, basestring):
source.append((v, StringIO(p)))
else:
s = self.body
s.seek(0)
source.append(s)
return source
# -------------------------------------------------------------------------
def customise_resource(self, tablename=None):
"""
Invoke the customization callback for a resource.
@param tablename: the tablename of the resource; if called
without tablename it will invoke the callbacks
for the target resources of this request:
- master
- active component
- active link table
(in this order)
Resource customization functions can be defined like:
def customise_resource_my_table(r, tablename):
current.s3db.configure(tablename,
my_custom_setting = "example")
return
settings.customise_resource_my_table = \
customise_resource_my_table
@note: the hook itself can call r.customise_resource in order
to cascade customizations as necessary
@note: if a table is customised that is not currently loaded,
then it will be loaded for this process
"""
if tablename is None:
customise = self.customise_resource
customise(self.resource.tablename)
component = self.component
if component:
customise(component.tablename)
link = self.link
if link:
customise(link.tablename)
else:
# Always load the model first (otherwise it would
# override the custom settings when loaded later)
db = current.db
if tablename not in db:
db.table(tablename)
customise = current.deployment_settings.customise_resource(tablename)
if customise:
customise(self, tablename)
return
class S3Method(object):
"""
REST Method Handler Base Class
Method handler classes should inherit from this class and
implement the apply_method() method.
@note: instances of subclasses don't have any of the instance
attributes available until they actually get invoked
from a request - i.e. apply_method() should never be
called directly.
"""
# -------------------------------------------------------------------------
def __call__(self, r, method=None, widget_id=None, **attr):
"""
Entry point for the REST interface
@param r: the S3Request
@param method: the method established by the REST interface
@param widget_id: widget ID
@param attr: dict of parameters for the method handler
@return: output object to send to the view
"""
# Environment of the request
self.request = r
# Settings
response = current.response
self.download_url = response.s3.download_url
# Init
self.next = None
# Override request method
if method is not None:
self.method = method
else:
self.method = r.method
# Find the target resource and record
if r.component:
component = r.component
resource = component
self.record_id = self._record_id(r)
if not self.method:
if component.multiple and not r.component_id:
self.method = "list"
else:
self.method = "read"
if component.link:
actuate_link = r.actuate_link()
if not actuate_link:
resource = component.link
else:
self.record_id = r.id
resource = r.resource
if not self.method:
if r.id or r.method in ("read", "display"):
self.method = "read"
else:
self.method = "list"
self.prefix = resource.prefix
self.name = resource.name
self.tablename = resource.tablename
self.table = resource.table
self.resource = resource
if self.method == "_init":
return None
if r.interactive:
# hide_filter policy:
#
# None show filters on master,
# hide for components (default)
# False show all filters (on all tabs)
# True hide all filters (on all tabs)
#
# dict(alias=setting) setting per component, alias
# None means master resource,
# use special alias _default
# to specify an alternative
# default
#
hide_filter = attr.get("hide_filter")
if isinstance(hide_filter, dict):
component_name = r.component_name
if component_name in hide_filter:
hide_filter = hide_filter[component_name]
elif "_default" in hide_filter:
hide_filter = hide_filter["_default"]
else:
hide_filter = None
if hide_filter is None:
hide_filter = r.component is not None
self.hide_filter = hide_filter
else:
self.hide_filter = True
# Apply method
if widget_id and hasattr(self, "widget"):
output = self.widget(r,
method=self.method,
widget_id=widget_id,
**attr)
else:
output = self.apply_method(r, **attr)
# Redirection
if self.next and resource.lastid:
self.next = str(self.next)
placeholder = "%5Bid%5D"
self.next = self.next.replace(placeholder, resource.lastid)
placeholder = "[id]"
self.next = self.next.replace(placeholder, resource.lastid)
if not response.error:
r.next = self.next
# Add additional view variables (e.g. rheader)
self._extend_view(output, r, **attr)
return output
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Stub, to be implemented in subclass. This method is used
to get the results as a standalone page.
@param r: the S3Request
@param attr: dictionary of parameters for the method handler
@return: output object to send to the view
"""
output = dict()
return output
# -------------------------------------------------------------------------
def widget(self, r, method=None, widget_id=None, visible=True, **attr):
"""
Stub, to be implemented in subclass. This method is used
by other method handlers to embed this method as widget.
@note:
For "html" format, the widget method must return an XML
component that can be embedded in a DIV. If a dict is
returned, it will be rendered against the view template
of the calling method - the view template selected by
the widget method will be ignored.
For other formats, the data returned by the widget method
will be rendered against the view template selected by
the widget method. If no view template is set, the data
will be returned as-is.
The widget must use the widget_id as HTML id for the element
providing the Ajax-update hook and this element must be
visible together with the widget.
The widget must include the widget_id as ?w=<widget_id> in
the URL query of the Ajax-update call, and Ajax-calls should
not use "html" format.
If visible==False, then the widget will initially be hidden,
so it can be rendered empty and Ajax-load its data layer
upon a separate refresh call. Otherwise, the widget should
receive its data layer immediately. Widgets can ignore this
parameter if delayed loading of the data layer is not
all([possible, useful, supported]).
@param r: the S3Request
@param method: the URL method
@param widget_id: the widget ID
@param visible: whether the widget is initially visible
@param attr: dictionary of parameters for the method handler
@return: output
"""
return None
# -------------------------------------------------------------------------
# Utility functions
# -------------------------------------------------------------------------
def _permitted(self, method=None):
"""
Check permission for the requested resource
@param method: method to check, defaults to the actually
requested method
"""
auth = current.auth
has_permission = auth.s3_has_permission
r = self.request
if not method:
method = self.method
if method in ("list", "datatable", "datalist"):
# Rest handled in S3Permission.METHODS
method = "read"
if r.component is None:
table = r.table
record_id = r.id
else:
table = r.component.table
record_id = r.component_id
if method == "create":
# Must have permission to update the master record
# in order to create a new component record...
master_access = has_permission("update",
r.table,
record_id=r.id)
if not master_access:
return False
return has_permission(method, table, record_id=record_id)
# -------------------------------------------------------------------------
@staticmethod
def _record_id(r):
"""
Get the ID of the target record of a S3Request
@param r: the S3Request
"""
master_id = r.id
if r.component:
component = r.component
component_id = r.component_id
link = r.link
if not component.multiple and not component_id:
# Enforce first component record
table = component.table
pkey = table._id.name
component.load(start=0, limit=1)
if len(component):
component_id = component.records().first()[pkey]
if link and master_id:
r.link_id = link.link_id(master_id, component_id)
r.component_id = component_id
if not link or r.actuate_link():
return component_id
else:
return r.link_id
else:
return master_id
return None
# -------------------------------------------------------------------------
def _config(self, key, default=None):
"""
Get a configuration setting of the current table
@param key: the setting key
@param default: the default value
"""
return current.s3db.get_config(self.tablename, key, default)
# -------------------------------------------------------------------------
@staticmethod
def _view(r, default):
"""
Get the path to the view template
@param r: the S3Request
@param default: name of the default view template
"""
folder = r.folder
prefix = r.controller
exists = os.path.exists
join = os.path.join
settings = current.deployment_settings
theme = settings.get_theme()
location = settings.get_template_location()
if theme != "default":
# See if there is a Custom View for this Theme
view = join(folder, location, "templates", theme, "views",
"%s_%s_%s" % (prefix, r.name, default))
if exists(view):
# There is a view specific to this page
# NB This should normally include {{extend layout.html}}
# Pass view as file not str to work in compiled mode
return open(view, "rb")
else:
if "/" in default:
subfolder, _default = default.split("/", 1)
else:
subfolder = ""
_default = default
if exists(join(folder, location, "templates", theme, "views",
subfolder, "_%s" % _default)):
# There is a general view for this page type
# NB This should not include {{extend layout.html}}
if subfolder:
subfolder = "%s/" % subfolder
# Pass this mapping to the View
current.response.s3.views[default] = \
"../%s/templates/%s/views/%s_%s" % (location,
theme,
subfolder,
_default)
if r.component:
view = "%s_%s_%s" % (r.name, r.component_name, default)
path = join(folder, "views", prefix, view)
if exists(path):
return "%s/%s" % (prefix, view)
else:
view = "%s_%s" % (r.name, default)
path = join(folder, "views", prefix, view)
else:
view = "%s_%s" % (r.name, default)
path = join(folder, "views", prefix, view)
if exists(path):
return "%s/%s" % (prefix, view)
else:
return default
# -------------------------------------------------------------------------
@staticmethod
def _extend_view(output, r, **attr):
"""
Add additional view variables (invokes all callables)
@param output: the output dict
@param r: the S3Request
@param attr: the view variables (e.g. 'rheader')
@note: overload this method in subclasses if you don't want
additional view variables to be added automatically
"""
if r.interactive and isinstance(output, dict):
for key in attr:
handler = attr[key]
if callable(handler):
resolve = True
try:
display = handler(r)
except TypeError:
# Argument list failure
# => pass callable to the view as-is
display = handler
continue
except:
# Propagate all other errors to the caller
raise
else:
resolve = False
display = handler
if isinstance(display, dict) and resolve:
output.update(**display)
elif display is not None:
output.update(**{key: display})
elif key in output and callable(handler):
del output[key]
# -------------------------------------------------------------------------
@staticmethod
def _remove_filters(vars):
"""
Remove all filters from URL vars
@param vars: the URL vars as dict
"""
return Storage((k, v) for k, v in vars.iteritems()
if not REGEX_FILTER.match(k))
# -------------------------------------------------------------------------
@staticmethod
def crud_string(tablename, name):
"""
Get a CRUD info string for interactive pages
@param tablename: the table name
@param name: the name of the CRUD string
"""
crud_strings = current.response.s3.crud_strings
# CRUD strings for this table
_crud_strings = crud_strings.get(tablename, crud_strings)
return _crud_strings.get(name,
# Default fallback
crud_strings.get(name))
def s3_request(*args, **kwargs):
"""
Helper function to generate S3Request instances
@param args: arguments for the S3Request
@param kwargs: keyword arguments for the S3Request
@keyword catch_errors: if set to False, errors will be raised
instead of returned to the client, useful
for optional sub-requests, or if the caller
implements fallbacks
"""
error = None
try:
r = S3Request(*args, **kwargs)
except (AttributeError, SyntaxError):
error = 400
except KeyError:
error = 404
if error:
if kwargs.get("catch_errors") is False:
raise
message = sys.exc_info()[1]
if hasattr(message, "message"):
message = message.message
if current.auth.permission.format == "html":
current.session.error = message
redirect(URL(f="index"))
else:
headers = {"Content-Type":"application/json"}
current.log.error(message)
raise HTTP(error,
body=current.xml.json_message(success=False,
statuscode=error,
message=message,
),
web2py_error=message,
**headers)
return r
|
from typing import Optional
from pathlib import Path
from wasabi import msg
import subprocess
import re
from ... import about
from ...util import ensure_path
from .._util import project_cli, Arg, Opt, COMMAND, PROJECT_FILE
from .._util import git_checkout, get_git_version
DEFAULT_REPO = about.__projects__
DEFAULT_PROJECTS_BRANCH = about.__projects_branch__
DEFAULT_BRANCH = "master"
@project_cli.command("clone")
def project_clone_cli(
# fmt: off
name: str = Arg(..., help="The name of the template to clone"),
dest: Optional[Path] = Arg(None, help="Where to clone the project. Defaults to current working directory", exists=False),
repo: str = Opt(DEFAULT_REPO, "--repo", "-r", help="The repository to clone from"),
branch: Optional[str] = Opt(None, "--branch", "-b", help="The branch to clone from"),
sparse_checkout: bool = Opt(False, "--sparse", "-S", help="Use sparse Git checkout to only check out and clone the files needed. Requires Git v22.2+.")
# fmt: on
):
"""Clone a project template from a repository. Calls into "git" and will
only download the files from the given subdirectory. The GitHub repo
defaults to the official spaCy template repo, but can be customized
(including using a private repo).
DOCS: https://spacy.io/api/cli#project-clone
"""
if dest is None:
dest = Path.cwd() / Path(name).parts[-1]
if branch is None:
# If it's a user repo, we want to default to other branch
branch = DEFAULT_PROJECTS_BRANCH if repo == DEFAULT_REPO else DEFAULT_BRANCH
project_clone(name, dest, repo=repo, branch=branch, sparse_checkout=sparse_checkout)
def project_clone(
name: str,
dest: Path,
*,
repo: str = about.__projects__,
branch: str = about.__projects_branch__,
sparse_checkout: bool = False,
) -> None:
"""Clone a project template from a repository.
name (str): Name of subdirectory to clone.
dest (Path): Destination path of cloned project.
repo (str): URL of Git repo containing project templates.
branch (str): The branch to clone from
"""
dest = ensure_path(dest)
check_clone(name, dest, repo)
project_dir = dest.resolve()
repo_name = re.sub(r"(http(s?)):\/\/github.com/", "", repo)
try:
git_checkout(repo, name, dest, branch=branch, sparse=sparse_checkout)
except subprocess.CalledProcessError:
err = f"Could not clone '{name}' from repo '{repo_name}'"
msg.fail(err, exits=1)
msg.good(f"Cloned '{name}' from {repo_name}", project_dir)
if not (project_dir / PROJECT_FILE).exists():
msg.warn(f"No {PROJECT_FILE} found in directory")
else:
msg.good(f"Your project is now ready!")
print(f"To fetch the assets, run:\n{COMMAND} project assets {dest}")
def check_clone(name: str, dest: Path, repo: str) -> None:
"""Check and validate that the destination path can be used to clone. Will
check that Git is available and that the destination path is suitable.
name (str): Name of the directory to clone from the repo.
dest (Path): Local destination of cloned directory.
repo (str): URL of the repo to clone from.
"""
git_err = (
f"Cloning spaCy project templates requires Git and the 'git' command. "
f"To clone a project without Git, copy the files from the '{name}' "
f"directory in the {repo} to {dest} manually."
)
get_git_version(error=git_err)
if not dest:
msg.fail(f"Not a valid directory to clone project: {dest}", exits=1)
if dest.exists():
# Directory already exists (not allowed, clone needs to create it)
msg.fail(f"Can't clone project, directory already exists: {dest}", exits=1)
if not dest.parent.exists():
# We're not creating parents, parent dir should exist
msg.fail(
f"Can't clone project, parent directory doesn't exist: {dest.parent}. "
f"Create the necessary folder(s) first before continuing.",
exits=1,
)
|
import ShareYourSystem as SYS
MyCommander=SYS.CommanderClass(
).get(
'/ChildCommander/ChildCommander'
)
MyCommander['--ChildCommander']={
'MyInt':0
}
MyCommander['--...ChildCommander']={
'MyStr':"hello"
}
MyCommander['...--ChildCommander']={
'MyFloat':0.1
}
MyCommander['/...--ChildCommander']={
'MyList':[0.1]
}
MyCommander['.../--ChildCommander']={
'MyBool':False
}
print('MyCommander is ')
SYS._print(MyCommander)
|
"""
Large file parsing of Genepop files
The standard parser loads the whole file into memory. This parser
provides an iterator over data.
Classes:
LargeRecord Holds GenePop data.
Functions:
read Parses a GenePop record (file) into a Record object.
"""
def get_indiv(line):
indiv_name, marker_line = line.split(',')
markers = marker_line.replace('\t', ' ').split(' ')
markers = [marker for marker in markers if marker != '']
if len(markers[0]) in [2, 4]: # 2 digits per allele
marker_len = 2
else:
marker_len = 3
try:
allele_list = [(int(marker[0:marker_len]),
int(marker[marker_len:]))
for marker in markers]
except ValueError: # Haploid
allele_list = [(int(marker[0:marker_len]),)
for marker in markers]
return indiv_name, allele_list, marker_len
def read(handle):
"""Parses a handle containing a GenePop file.
handle is a file-like object that contains a GenePop record.
"""
record = Record(handle)
record.comment_line = str(handle.readline()).rstrip()
# We can now have one loci per line or all loci in a single line
# separated by either space or comma+space...
# We will remove all commas on loci... that should not be a problem
sample_loci_line = str(handle.readline()).rstrip().replace(',', '')
all_loci = sample_loci_line.split(' ')
record.loci_list.extend(all_loci)
line = handle.readline()
while line != "":
line = line.rstrip()
if line.upper() == "POP":
record.stack.append("POP")
break
record.loci_list.append(line)
line = handle.readline()
next_line = handle.readline().rstrip()
indiv_name, allele_list, record.marker_len = get_indiv(next_line)
record.stack.append(next_line)
return record
class Record(object):
"""Holds information from a GenePop record.
Members:
marker_len The marker length (2 or 3 digit code per allele).
comment_line Comment line.
loci_list List of loci names.
data_generator Iterates over population data.
The generator will only work once. If you want to read a handle
twice you have to re-open it!
data_generator can either be () - an empty tuple - marking a new
population or an individual. An individual is something like
('Ind1', [(1,1), (3,None), (200,201)],
In the case above the individual is called Ind1,
has three diploid loci. For the second loci, one of the alleles
is unknown.
"""
def __init__(self, handle):
self.handle = handle
self.marker_len = 0
self.comment_line = ""
self.loci_list = []
self.populations = []
self.stack = []
def data_generator(self):
for handle in [self.stack, self.handle]:
for line in handle:
line = line.rstrip()
if line.upper() == 'POP':
yield ()
else:
indiv_name, allele_list, marker_len = get_indiv(line)
clean_list = []
for locus in allele_list:
mk_real = []
for al in locus:
if al == 0:
mk_real.append(None)
else:
mk_real.append(al)
clean_list.append(tuple(mk_real))
yield indiv_name, clean_list
raise StopIteration()
|
from django.contrib.contenttypes.models import ContentType
from django.shortcuts import render
from django.conf import settings
from django.db.models.fields import related
from django.template.loader import get_template
from django.template import Context
import json
graph_settings = getattr(settings, 'SPAGHETTI_SAUCE', {})
apps = graph_settings.get('apps',[])
def plate(request):
excludes = ['%s__%s'%(app,model) for app,models in graph_settings.get('exclude',{}).items() for model in models ]
models = ContentType.objects.filter(app_label__in=apps)
nodes = []
edges = []
for model in models:
if (model.model_class() == None):
continue
model.doc = model.model_class().__doc__
_id = "%s__%s"%(model.app_label,model.model)
if _id in excludes:
continue
label = "%s"%(model.model)
fields = [f for f in model.model_class()._meta.fields]
many = [f for f in model.model_class()._meta.many_to_many]
if graph_settings.get('show_fields',True):
label += "\n%s\n"%("-"*len(model.model))
label += "\n".join([str(f.name) for f in fields])
edge_color = {'inherit':'from'}
for f in fields+many:
if f.rel is not None:
m = f.rel.to._meta
if m.app_label != model.app_label:
edge_color = {'inherit':'both'}
edge = { 'from':_id,
'to':"%s__%s"%(m.app_label,m.model_name),
'color':edge_color,
}
if str(f.name).endswith('_ptr'):
#fields that end in _ptr are pointing to a parent object
edge.update({
'arrows':{'to':{'scaleFactor':0.75}}, #needed to draw from-to
'font': {'align': 'middle'},
'label':'is a',
'dashes':True
})
elif type(f) == related.ForeignKey:
edge.update({
'arrows':{'to':{'scaleFactor':0.75}}
})
elif type(f) == related.OneToOneField:
edge.update({
'font': {'align': 'middle'},
'label':'|'
})
elif type(f) == related.ManyToManyField:
edge.update({
'color':{'color':'gray'},
'arrows':{'to':{'scaleFactor':1}, 'from':{'scaleFactor':1}},
})
edges.append(edge)
nodes.append(
{
'id':_id,
'label':label,
'shape':'box',
'group':model.app_label,
'title':get_template("django_spaghetti/meatball.html").render(
Context({'model':model,'fields':fields,})
)
}
)
data = {
'meatballs':json.dumps(nodes),
'spaghetti':json.dumps(edges)
}
return render(request, 'django_spaghetti/plate.html', data)
|
"""Test fee estimation code."""
from decimal import Decimal
import random
from test_framework.messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
)
from test_framework.script import (
CScript,
OP_1,
OP_2,
OP_DROP,
OP_TRUE,
)
from test_framework.script_util import (
script_to_p2sh_script,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises_rpc_error,
satoshi_round,
)
REDEEM_SCRIPT_1 = CScript([OP_1, OP_DROP])
REDEEM_SCRIPT_2 = CScript([OP_2, OP_DROP])
P2SH_1 = script_to_p2sh_script(REDEEM_SCRIPT_1)
P2SH_2 = script_to_p2sh_script(REDEEM_SCRIPT_2)
SCRIPT_SIG = [CScript([OP_TRUE, REDEEM_SCRIPT_1]), CScript([OP_TRUE, REDEEM_SCRIPT_2])]
def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
"""Create and send a transaction with a random fee.
The transaction pays to a trivial P2SH script, and assumes that its inputs
are of the same form.
The function takes a list of confirmed outputs and unconfirmed outputs
and attempts to use the confirmed list first for its inputs.
It adds the newly created outputs to the unconfirmed list.
Returns (raw transaction, fee)."""
# It's best to exponentially distribute our random fees
# because the buckets are exponentially spaced.
# Exponentially distributed from 1-128 * fee_increment
rand_fee = float(fee_increment) * (1.1892 ** random.randint(0, 28))
# Total fee ranges from min_fee to min_fee + 127*fee_increment
fee = min_fee - fee_increment + satoshi_round(rand_fee)
tx = CTransaction()
total_in = Decimal("0.00000000")
while total_in <= (amount + fee) and len(conflist) > 0:
t = conflist.pop(0)
total_in += t["amount"]
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
if total_in <= amount + fee:
while total_in <= (amount + fee) and len(unconflist) > 0:
t = unconflist.pop(0)
total_in += t["amount"]
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
if total_in <= amount + fee:
raise RuntimeError(f"Insufficient funds: need {amount + fee}, have {total_in}")
tx.vout.append(CTxOut(int((total_in - amount - fee) * COIN), P2SH_1))
tx.vout.append(CTxOut(int(amount * COIN), P2SH_2))
# These transactions don't need to be signed, but we still have to insert
# the ScriptSig that will satisfy the ScriptPubKey.
for inp in tx.vin:
inp.scriptSig = SCRIPT_SIG[inp.prevout.n]
txid = from_node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
unconflist.append({"txid": txid, "vout": 0, "amount": total_in - amount - fee})
unconflist.append({"txid": txid, "vout": 1, "amount": amount})
return (tx.serialize().hex(), fee)
def split_inputs(from_node, txins, txouts, initial_split=False):
"""Generate a lot of inputs so we can generate a ton of transactions.
This function takes an input from txins, and creates and sends a transaction
which splits the value into 2 outputs which are appended to txouts.
Previously this was designed to be small inputs so they wouldn't have
a high coin age when the notion of priority still existed."""
prevtxout = txins.pop()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(prevtxout["txid"], 16), prevtxout["vout"]), b""))
half_change = satoshi_round(prevtxout["amount"] / 2)
rem_change = prevtxout["amount"] - half_change - Decimal("0.00001000")
tx.vout.append(CTxOut(int(half_change * COIN), P2SH_1))
tx.vout.append(CTxOut(int(rem_change * COIN), P2SH_2))
# If this is the initial split we actually need to sign the transaction
# Otherwise we just need to insert the proper ScriptSig
if (initial_split):
completetx = from_node.signrawtransactionwithwallet(tx.serialize().hex())["hex"]
else:
tx.vin[0].scriptSig = SCRIPT_SIG[prevtxout["vout"]]
completetx = tx.serialize().hex()
txid = from_node.sendrawtransaction(hexstring=completetx, maxfeerate=0)
txouts.append({"txid": txid, "vout": 0, "amount": half_change})
txouts.append({"txid": txid, "vout": 1, "amount": rem_change})
def check_raw_estimates(node, fees_seen):
"""Call estimaterawfee and verify that the estimates meet certain invariants."""
delta = 1.0e-6 # account for rounding error
for i in range(1, 26):
for _, e in node.estimaterawfee(i).items():
feerate = float(e["feerate"])
assert_greater_than(feerate, 0)
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
raise AssertionError(f"Estimated fee ({feerate}) out of range ({min(fees_seen)},{max(fees_seen)})")
def check_smart_estimates(node, fees_seen):
"""Call estimatesmartfee and verify that the estimates meet certain invariants."""
delta = 1.0e-6 # account for rounding error
last_feerate = float(max(fees_seen))
all_smart_estimates = [node.estimatesmartfee(i) for i in range(1, 26)]
for i, e in enumerate(all_smart_estimates): # estimate is for i+1
feerate = float(e["feerate"])
assert_greater_than(feerate, 0)
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
raise AssertionError(f"Estimated fee ({feerate}) out of range ({min(fees_seen)},{max(fees_seen)})")
if feerate - delta > last_feerate:
raise AssertionError(f"Estimated fee ({feerate}) larger than last fee ({last_feerate}) for lower number of confirms")
last_feerate = feerate
if i == 0:
assert_equal(e["blocks"], 2)
else:
assert_greater_than_or_equal(i + 1, e["blocks"])
def check_estimates(node, fees_seen):
check_raw_estimates(node, fees_seen)
check_smart_estimates(node, fees_seen)
class EstimateFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
# mine non-standard txs (e.g. txs with "dust" outputs)
# Force fSendTrickle to true (via whitelist.noban)
self.extra_args = [
["-acceptnonstdtxn", "-whitelist=noban@127.0.0.1"],
["-acceptnonstdtxn", "-whitelist=noban@127.0.0.1", "-blockmaxweight=68000"],
["-acceptnonstdtxn", "-whitelist=noban@127.0.0.1", "-blockmaxweight=32000"],
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
"""
We'll setup the network to have 3 nodes that all mine with different parameters.
But first we need to use one node to create a lot of outputs
which we will use to generate our transactions.
"""
self.add_nodes(3, extra_args=self.extra_args)
# Use node0 to mine blocks for input splitting
# Node1 mines small blocks but that are bigger than the expected transaction rate.
# NOTE: the CreateNewBlock code starts counting block weight at 4,000 weight,
# (68k weight is room enough for 120 or so transactions)
# Node2 is a stingy miner, that
# produces too small blocks (room for only 55 or so transactions)
self.start_nodes()
self.import_deterministic_coinbase_privkeys()
self.stop_nodes()
def transact_and_mine(self, numblocks, mining_node):
min_fee = Decimal("0.00001")
# We will now mine numblocks blocks generating on average 100 transactions between each block
# We shuffle our confirmed txout set before each set of transactions
# small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible
# resorting to tx's that depend on the mempool when those run out
for _ in range(numblocks):
random.shuffle(self.confutxo)
for _ in range(random.randrange(100 - 50, 100 + 50)):
from_index = random.randint(1, 2)
(txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo,
self.memutxo, Decimal("0.005"), min_fee, min_fee)
tx_kbytes = (len(txhex) // 2) / 1000.0
self.fees_per_kb.append(float(fee) / tx_kbytes)
self.sync_mempools(wait=.1)
mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"]
self.sync_blocks(wait=.1)
# update which txouts are confirmed
newmem = []
for utx in self.memutxo:
if utx["txid"] in mined:
self.confutxo.append(utx)
else:
newmem.append(utx)
self.memutxo = newmem
def run_test(self):
self.log.info("This test is time consuming, please be patient")
self.log.info("Splitting inputs so we can generate tx's")
# Start node0
self.start_node(0)
self.txouts = []
self.txouts2 = []
# Split a coinbase into two transaction puzzle outputs
split_inputs(self.nodes[0], self.nodes[0].listunspent(0), self.txouts, True)
# Mine
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].generate(1)
# Repeatedly split those 2 outputs, doubling twice for each rep
# Use txouts to monitor the available utxo, since these won't be tracked in wallet
reps = 0
while reps < 5:
# Double txouts to txouts2
while len(self.txouts) > 0:
split_inputs(self.nodes[0], self.txouts, self.txouts2)
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].generate(1)
# Double txouts2 to txouts
while len(self.txouts2) > 0:
split_inputs(self.nodes[0], self.txouts2, self.txouts)
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].generate(1)
reps += 1
self.log.info("Finished splitting")
# Now we can connect the other nodes, didn't want to connect them earlier
# so the estimates would not be affected by the splitting transactions
self.start_node(1)
self.start_node(2)
self.connect_nodes(1, 0)
self.connect_nodes(0, 2)
self.connect_nodes(2, 1)
self.sync_all()
self.fees_per_kb = []
self.memutxo = []
self.confutxo = self.txouts # Start with the set of confirmed txouts after splitting
self.log.info("Will output estimates for 1/2/3/6/15/25 blocks")
for _ in range(2):
self.log.info("Creating transactions and mining them with a block size that can't keep up")
# Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine
self.transact_and_mine(10, self.nodes[2])
check_estimates(self.nodes[1], self.fees_per_kb)
self.log.info("Creating transactions and mining them at a block size that is just big enough")
# Generate transactions while mining 10 more blocks, this time with node1
# which mines blocks with capacity just above the rate that transactions are being created
self.transact_and_mine(10, self.nodes[1])
check_estimates(self.nodes[1], self.fees_per_kb)
# Finish by mining a normal-sized block:
while len(self.nodes[1].getrawmempool()) > 0:
self.nodes[1].generate(1)
self.sync_blocks(self.nodes[0:3], wait=.1)
self.log.info("Final estimates after emptying mempools")
check_estimates(self.nodes[1], self.fees_per_kb)
self.log.info("Testing that fee estimation is disabled in blocksonly.")
self.restart_node(0, ["-blocksonly"])
assert_raises_rpc_error(-32603, "Fee estimation disabled",
self.nodes[0].estimatesmartfee, 2)
if __name__ == '__main__':
EstimateFeeTest().main()
|
from __future__ import absolute_import, unicode_literals
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
from draftjs_exporter.defaults import BLOCK_MAP
TERMS_BLOCK_ID = 'TERMS_AND_CONDITIONS_TEXT'
DRAFT_BLOCK_TYPE_H3 = {'label': 'H3', 'type': BLOCK_TYPES.HEADER_THREE}
DRAFT_BLOCK_TYPE_H4 = {'label': 'H4', 'type': BLOCK_TYPES.HEADER_FOUR}
DRAFT_BLOCK_TYPE_UL = {'label': 'UL', 'type': BLOCK_TYPES.UNORDERED_LIST_ITEM, 'icon': 'icon-list-ul'}
DRAFT_BLOCK_TYPE_OL = {'label': 'OL', 'type': BLOCK_TYPES.ORDERED_LIST_ITEM, 'icon': 'icon-list-ol'}
DRAFT_BLOCK_TYPE_TERMS = {'label': 'T&Cs', 'type': TERMS_BLOCK_ID, 'element': 'div', 'class': 'legals'}
DRAFT_INLINE_STYLE_BOLD = {'label': 'Bold', 'type': INLINE_STYLES.BOLD, 'icon': 'icon-bold'}
DRAFT_INLINE_STYLE_ITALIC = {'label': 'Italic', 'type': INLINE_STYLES.ITALIC, 'icon': 'icon-italic'}
DRAFT_IMAGE_FORMATS = '__all__'
DRAFT_ENTITY_TYPE_IMAGE = {
'label': 'Image',
'type': ENTITY_TYPES.IMAGE,
'icon': 'icon-image',
'imageFormats': DRAFT_IMAGE_FORMATS,
'source': 'ImageSource',
'decorator': 'Image',
}
DRAFT_ENTITY_TYPE_EMBED = {
'label': 'Embed',
'type': ENTITY_TYPES.EMBED,
'icon': 'icon-media',
'source': 'EmbedSource',
'decorator': 'Embed',
}
DRAFT_ENTITY_TYPE_LINK = {
'label': 'Link',
'type': ENTITY_TYPES.LINK,
'icon': 'icon-link',
'source': 'LinkSource',
'decorator': 'Link',
}
DRAFT_ENTITY_TYPE_DOCUMENT = {
'label': 'Document',
'type': ENTITY_TYPES.DOCUMENT,
'icon': 'icon-doc-full',
'source': 'DocumentSource',
'decorator': 'Document',
}
BUTTON_ENTITY_ID = 'BUTTON'
DRAFT_ENTITY_TYPE_BUTTON = {
'label': 'Button',
'type': BUTTON_ENTITY_ID,
'icon': 'icon-link',
'source': 'LinkSource',
'decorator': 'ButtonDecorator',
}
WAGTAILADMIN_RICH_TEXT_EDITORS = {
'simple': {
'WIDGET': 'wagtaildraftail.widgets.DraftailTextArea',
'OPTIONS': {
'enableHorizontalRule': True,
'enableLineBreak': False,
'entityTypes': [
DRAFT_ENTITY_TYPE_LINK,
DRAFT_ENTITY_TYPE_DOCUMENT,
],
'blockTypes': [
DRAFT_BLOCK_TYPE_H3,
DRAFT_BLOCK_TYPE_UL,
DRAFT_BLOCK_TYPE_TERMS,
],
'inlineStyles': [
DRAFT_INLINE_STYLE_BOLD,
DRAFT_INLINE_STYLE_ITALIC,
],
}
},
'default_draftail': {
'WIDGET': 'wagtaildraftail.widgets.DraftailTextArea',
'OPTIONS': {
'enableHorizontalRule': True,
'enableLineBreak': False,
'entityTypes': [
DRAFT_ENTITY_TYPE_IMAGE,
DRAFT_ENTITY_TYPE_EMBED,
DRAFT_ENTITY_TYPE_LINK,
DRAFT_ENTITY_TYPE_DOCUMENT,
DRAFT_ENTITY_TYPE_BUTTON,
],
'blockTypes': [
DRAFT_BLOCK_TYPE_H3,
DRAFT_BLOCK_TYPE_H4,
DRAFT_BLOCK_TYPE_UL,
DRAFT_BLOCK_TYPE_OL,
DRAFT_BLOCK_TYPE_TERMS,
],
'inlineStyles': [
DRAFT_INLINE_STYLE_BOLD,
DRAFT_INLINE_STYLE_ITALIC,
],
}
},
# Wagtail dependencies
'default': {
'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'
},
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
}
DRAFT_EXPORTER_ENTITY_DECORATORS = {
ENTITY_TYPES.LINK: 'wagtaildraftail.decorators.Link',
ENTITY_TYPES.DOCUMENT: 'wagtaildraftail.decorators.Document',
ENTITY_TYPES.IMAGE: 'wagtaildraftail.decorators.Image',
ENTITY_TYPES.EMBED: 'wagtaildraftail.decorators.Embed',
ENTITY_TYPES.HORIZONTAL_RULE: 'wagtaildraftail.decorators.HR',
BUTTON_ENTITY_ID: 'home.decorators.Button',
}
DRAFT_EXPORTER_COMPOSITE_DECORATORS = [
'wagtaildraftail.decorators.BR',
]
DRAFT_EXPORTER_BLOCK_MAP = dict(BLOCK_MAP, **{
BLOCK_TYPES.UNORDERED_LIST_ITEM: {
'element': 'li',
'wrapper': 'ul',
'wrapper_props': {'class': 'list-styled'},
},
BLOCK_TYPES.ORDERED_LIST_ITEM: {
'element': 'li',
'wrapper': 'ol',
'wrapper_props': {'class': 'list-numbered'},
},
TERMS_BLOCK_ID: {
'element': 'p',
'props': {'class': 'legals'},
},
})
|
"""
This file uses Python nose to test the correctness of writing and reading parameter files.
To run the test, 'parameters' directory should exists in the 'tests' directory with all .par files (with the correct names).
The parameters directory is iterated, and .par files are read (using the suitable param classes). Then they are written back, and the files are compared.
"""
import sys, os
sys.path += ['..']
import parameters
referenceParamsDir = "testing_fodder/parameters"
testParamsDir = "parameters_test"
def paramsFactory(filename, n_img, n_pts = 4):
if filename == "ptv.par":
return parameters.PtvParams()
if filename == "cal_ori.par":
return parameters.CalOriParams(n_img)
if filename == "sequence.par":
return parameters.SequenceParams(n_img)
if filename == "criteria.par":
return parameters.CriteriaParams()
if filename == "targ_rec.par":
return parameters.TargRecParams(n_img)
if filename == "man_ori.par":
return parameters.ManOriParams(n_img, n_pts)
if filename == "detect_plate.par":
return parameters.DetectPlateParams()
if filename == "orient.par":
return parameters.OrientParams()
if filename == "track.par":
return parameters.TrackingParams()
if filename == "pft_version.par":
return parameters.PftVersionParams()
if filename == "examine.par":
return parameters.ExamineParams()
if filename == "dumbbell.par":
return parameters.DumbbellParams()
if filename == "shaking.par":
return parameters.ShakingParams()
return None
def setup_func():
if not os.path.exists(testParamsDir):
os.mkdir(testParamsDir)
def teardown_func():
#print "Finished testing", __file__
pass
def testReadWrite():
#get n_img from ptv.par
ptvParams = parameters.PtvParams()
ptvParams.path = referenceParamsDir
ptvParams.read()
n_img = ptvParams.n_img
n_pts = 4
#loop all .par files in the parameters directory
paramdirfiles = os.listdir(referenceParamsDir)
for paramfile in paramdirfiles:
if paramfile.strip()[-4:] == '.par':
yield checkSingleParamFile, paramfile, n_img, n_pts
(testReadWrite.setup, testReadWrite.teardown) = (setup_func, teardown_func)
def checkSingleParamFile(paramfile, n_img, n_pts):
params = paramsFactory(paramfile, n_img, n_pts)
if params is None:
print "No parameters class found for file %s." % (paramfile)
assert False
return
params.path = referenceParamsDir
referenceFile = params.filepath()
try:
params.read()
except:
print "Error reading %s from %s:" % (paramfile, params.path), sys.exc_info()
assert False
params.path = testParamsDir
testFile = params.filepath()
try:
params.write()
except:
print "Error writing %s to %s:" % (paramfile, params.path), sys.exc_info()
assert False
if not compareFiles(referenceFile, testFile, True):
assert False
def compareFiles(f1, f2, verbose = False):
readlines = lambda f: file(f, "r").readlines()
lns1 = readlines(f1)
lns2 = readlines(f2)
def getLen(lns):
for i in reversed(range(len(lns))):
if lns[i].strip() != '':
return i
return 0
nlns1 = getLen(lns1)
nlns2 = getLen(lns2)
if nlns1 != nlns2:
if verbose:
print "Files %s and %s have different amount of lines (%d/%d)." % (f1, f2, nlns1, nlns2)
return False
for n in range(nlns1):
l1 = lns1[n].strip()
l2 = lns2[n].strip()
if l1 != l2:
try: #check for float values
if abs(float(l1)-float(l2)) > 1e-10: #l1!=l2
raise
except:
if verbose:
print "Files %s and %s differ at line %d (%s/%s)" % (f1, f2, n+1, l1, l2)
return False
return True
|
from nose.tools import eq_
from ...element_iterator import ElementIterator
from ..namespace import Namespace
from ..page import Page
def test_page():
XML = """
<page>
<title>AccessibleComputing</title>
<ns>0</ns>
<id>10</id>
<redirect title="Computer accessibility" />
<revision>
<id>233192</id>
<timestamp>2001-01-21T02:12:21Z</timestamp>
<contributor>
<username>RoseParks</username>
<id>99</id>
</contributor>
<comment>*</comment>
<model>wikitext</model>
<format>text/x-wiki</format>
<text xml:space="preserve">Text of rev 233192</text>
<sha1>8kul9tlwjm9oxgvqzbwuegt9b2830vw</sha1>
</revision>
<revision>
<id>862220</id>
<parentid>233192</parentid>
<timestamp>2002-02-25T15:43:11Z</timestamp>
<contributor>
<username>Conversion script</username>
<id>0</id>
</contributor>
<minor />
<comment>Automated conversion</comment>
<model>wikitext</model>
<format>text/x-wiki</format>
<text xml:space="preserve">Text of rev 862220</text>
<sha1>i8pwco22fwt12yp12x29wc065ded2bh</sha1>
</revision>
</page>
"""
page = Page.from_element(ElementIterator.from_string(XML))
eq_(page.id, 10)
eq_(page.title, "AccessibleComputing")
eq_(page.namespace, 0)
eq_(page.redirect, "Computer accessibility")
eq_(page.restrictions, []) # Should be known to be empty
revision = next(page)
eq_(revision.id, 233192)
eq_(revision.page, page)
revision = next(page)
eq_(revision.id, 862220)
def test_old_page():
XML = """
<page>
<title>Talk:AccessibleComputing</title>
<id>10</id>
<redirect title="Computer accessibility" />
<revision>
<id>233192</id>
<timestamp>2001-01-21T02:12:21Z</timestamp>
<contributor>
<username>RoseParks</username>
<id>99</id>
</contributor>
<comment>*</comment>
<model>wikitext</model>
<format>text/x-wiki</format>
<text xml:space="preserve">Text of rev 233192</text>
<sha1>8kul9tlwjm9oxgvqzbwuegt9b2830vw</sha1>
</revision>
</page>
"""
page = Page.from_element(ElementIterator.from_string(XML),
{"Talk": Namespace(1, "Talk")})
eq_(page.namespace, 1)
def test_page_with_discussion():
XML = """
<page>
<title>Talk:AccessibleComputing</title>
<ns>90</ns>
<id>10</id>
<redirect title="Computer accessibility" />
<DiscussionThreading>
<ThreadSubject>Foo</ThreadSubject>
<ThreadParent>1</ThreadParent>
<ThreadAncestor>2</ThreadAncestor>
<ThreadPage>Bar</ThreadPage>
<ThreadPage>3</ThreadPage>
<ThreadAuthor>Baz</ThreadAuthor>
<ThreadEditStatus>Herp</ThreadEditStatus>
<ThreadType>Derp</ThreadType>
</DiscussionThreading>
<revision>
<id>862220</id>
<parentid>233192</parentid>
<timestamp>2002-02-25T15:43:11Z</timestamp>
<contributor>
<username>Conversion script</username>
<id>0</id>
</contributor>
<minor />
<comment>Automated conversion</comment>
<model>wikitext</model>
<format>text/x-wiki</format>
<text xml:space="preserve">Text of rev 862220</text>
<sha1>i8pwco22fwt12yp12x29wc065ded2bh</sha1>
</revision>
</page>
"""
page = Page.from_element(ElementIterator.from_string(XML),
{"Talk": Namespace(1, "Talk")})
eq_(page.namespace, 1)
revision = next(page)
eq_(revision.id, 862220)
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0151_auto_20180902_0409'),
]
operations = [
migrations.AddField(
model_name='event',
name='open_TTT_applications',
field=models.BooleanField(blank=True, default=False, help_text="If this event is TTT, you can mark it as 'open applications' which means that people not associated with this event's member sites can also take part in this event.", verbose_name='TTT Open applications'),
),
]
|
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/melee/2h_sword/crafted_saber/shared_sword_lightsaber_two_handed_s7_gen3.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","sword_lightsaber_2h_type7")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/bestine/shared_bestine_painting_schematic_golden_flower_03.iff"
result.attribute_template_id = -1
result.stfName("craft_furniture_ingredients_n","painting_schematic_golden_flower_03")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
import os
import unittest
from ingenico.connect.sdk.log.logging_util import LoggingUtil
from tests import file_utils
class LoggingUtilTest(unittest.TestCase):
"""Tests if the log util is capable of obfuscating headers and bodies of requests"""
def test_obfuscate_body_none_as_body(self):
"""Test that the obfuscate_body function can deal with a body = none and produce a result of none"""
body = None
obfuscated_body = LoggingUtil.obfuscate_body(body)
self.assertIsNone(obfuscated_body)
def test_obfuscate_body_empty(self):
"""Tests if the logging util is capable of obfuscating an empty body"""
body = ""
obfuscated_body = LoggingUtil.obfuscate_body(body)
self.assertEqual("", obfuscated_body)
def test_obfuscate_body_card(self):
"""Tests that the obfuscate_body correctly obfuscates a json containing payment card card data"""
self.obfuscate_body_match("bodyWithCardOriginal.json",
"bodyWithCardObfuscated.json")
def test_obfuscate_body_iban(self):
"""Tests that the obfuscate_body correctly obfuscates a json containing an iban number"""
self.obfuscate_body_match("bodyWithIbanOriginal.json",
"bodyWithIbanObfuscated.json")
def test_obfuscate_body_bin(self):
"""Tests that the obfuscate_body correctly obfuscates a json containing bin data"""
self.obfuscate_body_match("bodyWithBinOriginal.json",
"bodyWithBinObfuscated.json")
def test_obfuscate_body_nothing(self):
"""Tests that the obfuscate_body function does not touch data that does not need to be obfuscated"""
self.obfuscate_body_no_match("bodyNoObfuscation.json")
def test_obfuscate_body_object(self):
"""Tests that the obfuscate_body correctly obfuscates a json containing an object that does not need to be obfuscated"""
self.obfuscate_body_match("bodyWithObjectOriginal.json",
"bodyWithObjectObfuscated.json")
def obfuscate_body_match(self, original_resource, obfuscated_resource):
"""Tests that the LoggingUtil obfuscates the json in original_resource to the json in obfuscated_resource
original_resource is the path to a json file that contains one or more data entries to be obfuscated
obfuscated_resource is the path to a json file containing the expected obfuscated json result of the obfuscation
"""
body = _read_resource(original_resource)
expected = _read_resource(obfuscated_resource)
obfuscated_body = LoggingUtil.obfuscate_body(body)
self.assertEqual(expected, obfuscated_body)
def obfuscate_body_no_match(self, resource):
"""Tests that the LoggingUtil does not obfuscate the json given in the resource file
resource is the path to a json file that contains no data that should be obfuscated
"""
self.obfuscate_body_match(resource, resource)
def test_obfuscate_header(self):
"""Tests that any authorization headers get obfuscated, while others do not"""
self.obfuscate_header_match("Authorization",
"Basic QWxhZGRpbjpPcGVuU2VzYW1l",
"********")
self.obfuscate_header_match("authorization",
"Basic QWxhZGRpbjpPcGVuU2VzYW1l",
"********")
self.obfuscate_header_match("AUTHORIZATION",
"Basic QWxhZGRpbjpPcGVuU2VzYW1l",
"********")
self.obfuscate_header_match("X-GCS-Authentication-Token", "foobar",
"********")
self.obfuscate_header_match("x-gcs-authentication-token", "foobar",
"********")
self.obfuscate_header_match("X-GCS-AUTHENTICATION-TOKEN", "foobar",
"********")
self.obfuscate_header_match("X-GCS-Callerpassword", "foobar",
"********")
self.obfuscate_header_match("x-gcs-callerpassword", "foobar",
"********")
self.obfuscate_header_match("X-GCS-CALLERPASSWORD", "foobar",
"********")
self.obfuscate_header_no_match("Content-Type", "application/json")
self.obfuscate_header_no_match("content-type", "application/json")
self.obfuscate_header_no_match("CONTENT-TYPE", "application/json")
def obfuscate_header_match(self, name, original_value,
expected_obfuscated_value):
"""Tests that the obfuscator obfuscates the original_value to produce the expected_obfuscated_value"""
obfuscated_value = LoggingUtil.obfuscate_header(name, original_value)
self.assertEqual(expected_obfuscated_value, obfuscated_value)
def obfuscate_header_no_match(self, name, original_value):
"""Tests that the obfuscator leaves the parameter header intact and does not obfuscate anything"""
obfuscated_value = LoggingUtil.obfuscate_header(name, original_value)
self.assertEqual(original_value, obfuscated_value)
def _read_resource(file_name): return file_utils.read_file(
os.path.join("log", file_name))
if __name__ == '__main__':
unittest.main()
|
import atexit
import ssl
from pyVim.connect import SmartConnect, Disconnect
from vcdriver.config import configurable
_session_id = None
_connection_obj = None
def close():
""" Close the session if exists """
global _session_id, _connection_obj
if _connection_obj:
Disconnect(_connection_obj)
print('Vcenter session with ID {} closed'.format(_session_id))
_session_id = None
_connection_obj = None
@configurable([
('Vsphere Session', 'vcdriver_host'),
('Vsphere Session', 'vcdriver_port'),
('Vsphere Session', 'vcdriver_username'),
('Vsphere Session', 'vcdriver_password'),
])
def connection(**kwargs):
""" Open the session if it does not exist and return the connection """
global _session_id, _connection_obj
if not _connection_obj:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
_connection_obj = SmartConnect(
host=kwargs['vcdriver_host'],
port=kwargs['vcdriver_port'],
user=kwargs['vcdriver_username'],
pwd=kwargs['vcdriver_password'],
sslContext=context
)
_session_id = _connection_obj.content.sessionManager.currentSession.key
print('Vcenter session opened with ID {}'.format(_session_id))
atexit.register(close)
return _connection_obj
def id():
"""
Get the session id
:return: The session id
"""
global _session_id
return _session_id
|
"""
Assumes the following:
1. That you have installed the noxer using
cd ~/noxer
pip install -e .
2. That the clone of your fork of the noxer-org.github.io
repository is available in your home folder, that is in
~/noxer-org.github.io
"""
import os
import shutil
home = os.path.expanduser('~')
docs = os.path.join(home, 'noxer-org.github.io')
permanent_objects = {'.git', 'LICENSE', 'README.md', '.gitignore'}
for f in os.listdir(docs):
if f in permanent_objects:
continue
obj_loc = os.path.join(docs, f)
if os.path.isdir(obj_loc):
shutil.rmtree(obj_loc)
else:
os.remove(obj_loc)
os.system('pdoc --html --html-dir '+docs+' noxer')
subf = os.path.join(docs, 'noxer')
os.system('mv ' + subf + '/* ' + docs)
shutil.rmtree(subf)
|
import xlrd
from datetime import datetime
from slugify import slugify
def make_headers(worksheet):
"""Make headers"""
headers = {}
cell_idx = 0
while cell_idx < worksheet.ncols:
cell_type = worksheet.cell_type(0, cell_idx)
cell_value = worksheet.cell_value(0, cell_idx)
cell_value = slugify(cell_value).replace('-', '_')
if cell_type == 1:
headers[cell_idx] = cell_value
cell_idx += 1
return headers
def clean_data(worksheet, writer, headers, datemode):
row_idx = 1
while row_idx < worksheet.nrows:
cell_idx = 0
row_dict = {}
while cell_idx < worksheet.ncols:
try:
header = headers[cell_idx]
except KeyError:
cell_idx += 1
continue
if header == "ship_date":
# clean date
try:
cell_value = int(worksheet.cell_value(row_idx, cell_idx))
if cell_value > 20000000:
# turn into string and parse as YYYYMMDD
cell_value = str(cell_value)
cell_value = datetime.strptime(cell_value, "%Y%m%d")
else:
parts = xlrd.xldate_as_tuple(cell_value, datemode)
cell_value = datetime(*parts)
except ValueError:
cell_value = None
elif header == 'nsn':
cell_value = str(worksheet.cell_value(row_idx, cell_idx))
id_prefix = cell_value.split('-')[0]
row_dict['federal_supply_class'] = id_prefix
federal_supply_category = id_prefix[:2]
row_dict['federal_supply_category'] = federal_supply_category
elif header == "quantity":
try:
cell_value = int(worksheet.cell_value(row_idx, cell_idx))
except ValueError:
cell_value = None
else:
try:
# Strings
cell_value = worksheet.cell_value(row_idx, cell_idx).strip()
if cell_value.startswith('DESC='):
cell_value = cell_value[5:]
except AttributeError:
# Numbers
cell_value = worksheet.cell_value(row_idx, cell_idx)
row_dict[header] = cell_value
cell_idx += 1
writer.writerow(row_dict)
row_idx += 1
|
import CatalogItem
import time
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPLocalizer
from direct.interval.IntervalGlobal import *
from toontown.toontowngui import TTDialog
from toontown.estate import GardenTutorial
class CatalogGardenStarterItem(CatalogItem.CatalogItem):
def makeNewItem(self):
CatalogItem.CatalogItem.makeNewItem(self)
def getPurchaseLimit(self):
return 0
def reachedPurchaseLimit(self, avatar):
if self in avatar.onOrder or self in avatar.mailboxContents or self in avatar.onGiftOrder or self in avatar.awardMailboxContents or self in avatar.onAwardOrder or hasattr(avatar, 'gardenStarted') and avatar.getGardenStarted():
return 1
return 0
def saveHistory(self):
return 1
def getTypeName(self):
return TTLocalizer.GardenStarterTypeName
def getName(self):
return TTLocalizer.GardenStarterTypeName
def recordPurchase(self, avatar, optional):
if avatar:
estate = simbase.air.estateManager._lookupEstate(avatar.doId)
if estate:
estate.placeStarterGarden(avatar.doId)
else:
print 'starter garden-- something not there'
return ToontownGlobals.P_ItemAvailable
def getPicture(self, avatar):
self.hasPicture = True
scale = 1
heading = 0
pitch = 30
roll = 0
spin = 1
down = -1
modelParent = loader.loadModel('phase_5.5/models/estate/watering_cans')
model = modelParent.find('**/water_canA')
scale = 0.5
heading = 45
return self.makeFrameModel(model, spin)
def output(self, store = -1):
return 'CatalogGardenStarterItem(%s)' % self.formatOptionalData(store)
def compareTo(self, other):
return 0
def getHashContents(self):
return 0
def getBasePrice(self):
return 50
def decodeDatagram(self, di, versionNumber, store):
CatalogItem.CatalogItem.decodeDatagram(self, di, versionNumber, store)
def encodeDatagram(self, dg, store):
CatalogItem.CatalogItem.encodeDatagram(self, dg, store)
def isRental(self):
return 0
def isGift(self):
return 0
def acceptItem(self, mailbox, index, callback):
self.confirmGarden = TTDialog.TTGlobalDialog(doneEvent='confirmGarden', message=TTLocalizer.MessageConfirmGarden, command=Functor(self.handleGardenConfirm, mailbox, index, callback), style=TTDialog.TwoChoice)
self.confirmGarden.show()
def handleGardenConfirm(self, mailbox, index, callback, choice):
if choice > 0:
def handleTutorialDone():
self.gardenTutorial.destroy()
self.gardenTutorial = None
return
self.gardenTutorial = GardenTutorial.GardenTutorial(callback=handleTutorialDone)
if hasattr(mailbox, 'mailboxGui') and mailbox.mailboxGui:
mailbox.acceptItem(self, index, callback)
mailbox.mailboxGui.justExit()
else:
callback(ToontownGlobals.P_UserCancelled, self, index)
if self.confirmGarden:
self.confirmGarden.cleanup()
self.confirmGarden = None
return
|
"""A libusb1-based ADB reimplementation.
ADB was giving us trouble with its client/server architecture, which is great
for users and developers, but not so great for reliable scripting. This will
allow us to more easily catch errors as Python exceptions instead of checking
random exit codes, and all the other great benefits from not going through
subprocess and a network socket.
All timeouts are in milliseconds.
"""
try:
import cStringIO
except ImportError:
import io as cStringIO # Python 3 compatibility
import os
import socket
from . import adb_protocol
from . import common
from . import filesync_protocol
try:
basestring
except NameError:
basestring = str # Python 3 compatibility
try:
import libusb1
# From adb.h
CLASS = 0xFF
SUBCLASS = 0x42
PROTOCOL = 0x01
# pylint: disable=invalid-name
DeviceIsAvailable = common.InterfaceMatcher(CLASS, SUBCLASS, PROTOCOL)
except:
# no libusb1 support
pass
try:
from M2Crypto import RSA
class M2CryptoSigner(adb_protocol.AuthSigner):
"""AuthSigner using M2Crypto."""
def __init__(self, rsa_key_path):
with open(rsa_key_path + '.pub') as rsa_pub_file:
self.public_key = rsa_pub_file.read()
self.rsa_key = RSA.load_key(rsa_key_path)
def Sign(self, data):
return self.rsa_key.sign(data, 'sha1')
def GetPublicKey(self):
return self.public_key
except:
#print 'Install M2Crypto in order to use adb debug'
pass
class AdbCommands(object):
"""Exposes adb-like methods for use.
Some methods are more-pythonic and/or have more options.
"""
protocol_handler = adb_protocol.AdbMessage
filesync_handler = filesync_protocol.FilesyncProtocol
@classmethod
def ConnectDevice(
cls, port_path=None, serial=None, default_timeout_ms=None, **kwargs):
"""Convenience function to get an adb device from usb path or serial.
Args:
port_path: The filename of usb port to use.
serial: The serial number of the device to use.
default_timeout_ms: The default timeout in milliseconds to use.
If serial specifies a TCP address:port, then a TCP connection is
used instead of a USB connection.
"""
if serial and ':' in serial:
handle = common.TcpHandle(serial)
else:
handle = common.UsbHandle.FindAndOpen(
DeviceIsAvailable, port_path=port_path, serial=serial,
timeout_ms=default_timeout_ms)
return cls.Connect(handle, **kwargs)
def __init__(self, handle, device_state):
self._handle = handle
self._device_state = device_state
@property
def usb_handle(self):
return self._handle
def Close(self):
self._handle.Close()
@classmethod
def Connect(cls, usb, banner=None, **kwargs):
"""Connect to the device.
Args:
usb: UsbHandle or TcpHandle instance to use.
banner: See protocol_handler.Connect.
**kwargs: See protocol_handler.Connect for kwargs. Includes rsa_keys,
and auth_timeout_ms.
Returns:
An instance of this class if the device connected successfully.
"""
if not banner:
banner = socket.gethostname()
device_state = cls.protocol_handler.Connect(usb, banner=banner, **kwargs)
# Remove banner and colons after device state (state::banner)
device_state = device_state.split(':')[0]
return cls(usb, device_state)
@classmethod
def Devices(cls):
"""Get a generator of UsbHandle for devices available."""
return common.UsbHandle.FindDevices(DeviceIsAvailable)
def GetState(self):
return self._device_state
def Install(self, apk_path, destination_dir=None, timeout_ms=None):
"""Install apk to device.
Doesn't support verifier file, instead allows destination directory to be
overridden.
Arguments:
apk_path: Local path to apk to install.
destination_dir: Optional destination directory. Use /system/app/ for
persistent applications.
timeout_ms: Expected timeout for pushing and installing.
Returns:
The pm install output.
"""
if not destination_dir:
destination_dir = '/data/local/tmp/'
basename = os.path.basename(apk_path)
destination_path = destination_dir + basename
self.Push(apk_path, destination_path, timeout_ms=timeout_ms)
return self.Shell('pm install -r "%s"' % destination_path,
timeout_ms=timeout_ms)
def Push(self, source_file, device_filename, mtime='0', timeout_ms=None):
"""Push source_file to file on device.
Arguments:
source_file: Either a filename or file-like object to push to the device.
device_filename: The filename on the device to write to.
mtime: Optional, modification time to set on the file.
timeout_ms: Expected timeout for any part of the push.
"""
connection = self.protocol_handler.Open(
self._handle, destination='sync:',
timeout_ms=timeout_ms)
if isinstance(source_file, basestring):
source_file = open(source_file)
self.filesync_handler.Push(connection, source_file, device_filename,
mtime=int(mtime))
connection.Close()
def Pull(self, device_filename, dest_file=None, timeout_ms=None):
"""Pull file from device.
Arguments:
device_filename: The filename on the device to pull.
dest_file: If set, a filename or writable file-like object.
timeout_ms: Expected timeout for any part of the pull.
Returns:
The file data if dest_file is not set.
"""
if isinstance(dest_file, basestring):
dest_file = open(dest_file, 'w')
elif not dest_file:
dest_file = cStringIO.StringIO()
connection = self.protocol_handler.Open(
self._handle, destination='sync:',
timeout_ms=timeout_ms)
self.filesync_handler.Pull(connection, device_filename, dest_file)
connection.Close()
try:
# An empty call to cStringIO.StringIO returns an instance of
# cStringIO.OutputType on Python 2.
StringIOType = cStringIO.OutputType
except AttributeError:
# On Python 3, this object just an instance of StringIO.
StringIOType = cStringIO.StringIO
if isinstance(dest_file, StringIOType):
return dest_file.getvalue()
def Stat(self, device_filename):
"""Get a file's stat() information."""
connection = self.protocol_handler.Open(self._handle, destination='sync:')
mode, size, mtime = self.filesync_handler.Stat(
connection, device_filename)
connection.Close()
return mode, size, mtime
def List(self, device_path):
"""Return a directory listing of the given path."""
connection = self.protocol_handler.Open(self._handle, destination='sync:')
listing = self.filesync_handler.List(connection, device_path)
connection.Close()
return listing
def Reboot(self, destination=''):
"""Reboot device, specify 'bootloader' for fastboot."""
self.protocol_handler.Open(self._handle, 'reboot:%s' % destination)
def RebootBootloader(self):
"""Reboot device into fastboot."""
self.Reboot('bootloader')
def Remount(self):
"""Remount / as read-write."""
return self.protocol_handler.Command(self._handle, service='remount')
def Root(self):
"""Restart adbd as root on device."""
return self.protocol_handler.Command(self._handle, service='root')
def Shell(self, command, timeout_ms=None):
"""Run command on the device, returning the output."""
return self.protocol_handler.Command(
self._handle, service='shell', command=command,
timeout_ms=timeout_ms)
def StreamingShell(self, command, timeout_ms=None):
"""Run command on the device, returning the output.
Args:
command: the command to run on the target.
timeout_ms: Maximum time to allow the command to run.
Yields:
The responses from the shell command.
"""
return self.protocol_handler.StreamingCommand(
self._handle, service='shell', command=command,
timeout_ms=timeout_ms)
def Logcat(self, options, timeout_ms=None):
"""Run 'shell logcat' and stream the output to stdout."""
return self.protocol_handler.StreamingCommand(
self._handle, service='shell', command='logcat %s' % options,
timeout_ms=timeout_ms)
|
from ddt import data, ddt
from rest_framework import status, test
from waldur_core.quotas.tests import factories
from waldur_core.structure import models as structure_models
from waldur_core.structure.tests import fixtures as structure_fixtures
@ddt
class QuotaUpdateTest(test.APITransactionTestCase):
def setUp(self):
super(QuotaUpdateTest, self).setUp()
self.fixture = structure_fixtures.ServiceFixture()
self.quota_name = structure_models.Customer.Quotas.nc_project_count
self.quota = self.fixture.customer.quotas.get(name=self.quota_name)
self.quota.usage = 10
self.quota.save()
self.url = factories.QuotaFactory.get_url(self.quota)
def test_staff_can_set_quota_limit(self):
self.client.force_authenticate(self.fixture.staff)
response = self.client.put(self.url, {'limit': self.quota.usage + 1})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['limit'], self.quota.usage + 1)
@data('global_support', 'owner')
def test_other_users_can_not_set_quota_limit(self, user):
self.client.force_authenticate(getattr(self.fixture, user))
response = self.client.put(self.url, {'limit': self.quota.usage + 1})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_quota_limit_should_not_be_less_than_usage(self):
self.client.force_authenticate(self.fixture.staff)
response = self.client.put(self.url, {'limit': self.quota.usage - 1})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
from flask import request, redirect, url_for, render_template
from love_airbnb import app
from love_airbnb.models import Ad
from love_airbnb.utils import generate_ad
@app.route('/')
def index():
return render_template('index.html')
@app.route('/ad', methods=['GET'])
def opps_shouldnt_get():
return redirect(url_for('index'))
@app.errorhandler(404)
def page_not_found(e):
ad = Ad('User',
'404: This page was not found. ' +
'Guess you should be more careful...',
'The Webmaster')
ad_img = generate_ad(ad)
return render_template('ad.html', ad_img=ad_img), 404
@app.errorhandler(500)
def server_error(e):
ad = Ad('User',
'500: Opps, so I messed up, ' +
'but you\'ll probably try again anyways',
'The Webmaster')
ad_img = generate_ad(ad)
return render_template('ad.html', ad_img=ad_img), 500
@app.route('/ad', methods=['POST'])
def create_ad():
data = request.form
dear = unicode(data['dear']) or 'San Francisco'
message = unicode(data['message']) or 'Thanks for being a real pal!'
sender = unicode(data['sender']) or 'Airbnb'
ad = Ad(dear, message, sender)
ad = ad.create()
return redirect(url_for('view_ad', ad_id=ad.id))
@app.route('/ad/<ad_id>')
def view_ad(ad_id):
ad = Ad.query.get(ad_id)
ad_img = generate_ad(ad)
return render_template('ad.html', ad_img=ad_img)
|
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class SecurityRulesOperations(object):
"""SecurityRulesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2016-12-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-12-01"
self.config = config
def delete(
self, resource_group_name, network_security_group_name, security_rule_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security
group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [204, 202, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, network_security_group_name, security_rule_name, custom_headers=None, raw=False, **operation_config):
"""Get the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security
group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`SecurityRule
<azure.mgmt.network.v2016_12_01.models.SecurityRule>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecurityRule', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, network_security_group_name, security_rule_name, security_rule_parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a security rule in the specified network security
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security
group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:param security_rule_parameters: Parameters supplied to the create or
update network security rule operation.
:type security_rule_parameters: :class:`SecurityRule
<azure.mgmt.network.v2016_12_01.models.SecurityRule>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`SecurityRule
<azure.mgmt.network.v2016_12_01.models.SecurityRule>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(security_rule_parameters, 'SecurityRule')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecurityRule', response)
if response.status_code == 201:
deserialized = self._deserialize('SecurityRule', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list(
self, resource_group_name, network_security_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all security rules in a network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security
group.
:type network_security_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`SecurityRulePaged
<azure.mgmt.network.v2016_12_01.models.SecurityRulePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.SecurityRulePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.SecurityRulePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
|
"""Communities bundles."""
from invenio.ext.assets import Bundle
js = Bundle(
"js/communities/custom.js",
filters="uglifyjs",
output="communities.js",
weight=91
)
|
from Properties_version import version as __version__
from Properties import *
__doc__ = Properties.__doc__
|
from argparse import ArgumentParser
from yaml import load as yload
import requests
def main(argv=None):
parser = ArgumentParser(description="batch queue workload manager client")
parser.add_argument('-s', '--server', default='127.0.0.1',
help='bqwmd server')
parser.add_argument('-p', '--port', default='5000', help='bqwmd port')
parser.add_argument('-s', '--submit', help='submit jobspec file')
args = parser.parse_args()
configs = yload(open(args.submit, 'r'))
service_url = "http://{}:{}/v2.0/createReservation".format(args.server,
args.port)
response = requests.post(service_url, json=configs)
print response.text
if __name__ == "__main__":
main()
|
from tcms.settings.test import * # noqa: F403
DATABASES["default"].update( # noqa: F405
{
"ENGINE": "django.db.backends.mysql",
"NAME": "kiwi",
"USER": "kiwi",
"PASSWORD": "kiwi",
"HOST": "127.0.0.1",
"OPTIONS": {
"init_command": "SET sql_mode='STRICT_TRANS_TABLES'",
},
}
)
|
"""syspwd URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from app.views import *
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', index),
url(r'^accounts/login/$', login),
url(r'^passwd_update/$', passwd_update),
url(r'^login_action/$', account_login),
url(r'^update_list/$', update_list),
url(r'^user_add/$', user_add),
url(r'^get_data/$', get_data),
url(r'^prompt/$', prompt),
url(r'^userdel/$', userdel),
url(r'^download/$', download),
]
|
from django.conf.urls.defaults import *
from django.views.static import serve
from omeroweb.webtest import views
import os
urlpatterns = patterns('django.views.generic.simple',
# index 'home page' of the webtest app
url( r'^$', views.index, name='webtest_index' ),
# 'Hello World' example from tutorial on http://trac.openmicroscopy.org.uk/ome/wiki/OmeroWeb
url( r'^dataset/(?P<datasetId>[0-9]+)/$', views.dataset, name="webtest_dataset" ),
# Another simple example - shows a stack preview for an image with multiple Z sections
url( r'^stack_preview/(?P<imageId>[0-9]+)/$', views.stack_preview, name="webtest_stack_preview" ),
# Displays images (one per row) one channel per column in a grid. Params are passed in request, E.g. imageIds
url( r'^split_view_figure/$', views.split_view_figure, name="webtest_split_view_figure"),
url( r'^split_view_figure_plugin/$', views.split_view_figure,
{"template":"webtest/webclient_plugins/split_view_figure.html"}, name="webtest_split_view_figure_plugin"),
url( r'^split_view_fig_include/$', views.split_view_figure,
{"template":"webtest/webclient_plugins/split_view_fig_include.html"}, name="webtest_split_view_fig_include"),
# View a dataset as two panels of images, each with a different rendering setting
url( r'^dataset_split_view/(?P<datasetId>[0-9]+)/', views.dataset_split_view, name='webtest_dataset_split_view' ),
url( r'^dataset_split_include/(?P<datasetId>[0-9]+)/', views.dataset_split_view,
{"template":"webtest/webclient_plugins/dataset_split_include.html"}, name='webtest_dataset_split_include' ),
# view an image in grid with the Z, C, T dimensions split over the x or y axes as chosen by user.
url( r'^image_dimensions/(?P<imageId>[0-9]+)/', views.image_dimensions, name='webtest_image_dimensions' ),
# Viewer overlays individual channels from the same image (or different images) and manipulate them separately..
# translate, scale etc relative to one-another.
url( r'^channel_overlay_viewer/(?P<imageId>[0-9]+)/', views.channel_overlay_viewer, name='webtest_channel_overlay_viewer' ),
# this is the url for rendering planes for the viewer
url( r'^render_channel_overlay/', views.render_channel_overlay, name='webtest_render_channel_overlay' ),
# Show a panel of ROI thumbnails for an image
url( r'^image_rois/(?P<imageId>[0-9]+)/', views.image_rois, name='webtest_image_rois' ),
# post a comment annotation to images. parameters are in request: imageIds=123,234 comment=blah
# ns=Namespace replace=true (replaces existing comment with same ns if found)
url( r'^add_annotations/$', views.add_annotations, name="webtest_add_annotations"),
# examples of using the 'common' templates
url(r'^common/(?P<base_template>[a-z0-9_]+)/', views.common_templates, name='common'),
url( r'^img_detail/(?:(?P<iid>[0-9]+)/)?$', views.image_viewer, name="webtest_image_viewer"),
)
|
from gettext import NullTranslations
from sys import version_info
import base64
import email.mime.text
import gettext
import itertools
import locale
import types
PY3 = version_info.major >= 3
if PY3:
from io import StringIO
from configparser import ConfigParser
import queue
import urllib.parse
import shlex
# functions renamed in py3
Queue = queue.Queue
basestring = unicode = str
filterfalse = itertools.filterfalse
long = int
NullTranslations.ugettext = NullTranslations.gettext
NullTranslations.ungettext = NullTranslations.ngettext
xrange = range
raw_input = input
base64_decodebytes = base64.decodebytes
urlparse = urllib.parse
urllib_quote = urlparse.quote
shlex_quote = shlex.quote
def gettext_setup(t):
_ = t.gettext
P_ = t.ngettext
return (_, P_)
# string helpers
def is_py2str_py3bytes(o):
return isinstance(o, bytes)
def is_py3bytes(o):
return isinstance(o, bytes)
# functions that don't take unicode arguments in py2
ModuleType = lambda m: types.ModuleType(m)
format = locale.format_string
def setlocale(category, loc=None):
locale.setlocale(category, loc)
def write_to_file(f, content):
f.write(content)
def email_mime(body):
return email.mime.text.MIMEText(body)
else:
# functions renamed in py3
from __builtin__ import unicode, basestring, long, xrange, raw_input
from StringIO import StringIO
from ConfigParser import ConfigParser
import Queue
import urllib
import urlparse
import pipes
Queue = Queue.Queue
filterfalse = itertools.ifilterfalse
base64_decodebytes = base64.decodestring
urllib_quote = urllib.quote
shlex_quote = pipes.quote
def gettext_setup(t):
_ = t.ugettext
P_ = t.ungettext
return (_, P_)
# string helpers
def is_py2str_py3bytes(o):
return isinstance(o, str)
def is_py3bytes(o):
return False
# functions that don't take unicode arguments in py2
ModuleType = lambda m: types.ModuleType(m.encode('utf-8'))
def format(percent, *args, **kwargs):
return locale.format(percent.encode('utf-8'), *args, **kwargs)
def setlocale(category, loc=None):
locale.setlocale(category, loc.encode('utf-8'))
def write_to_file(f, content):
f.write(content.encode('utf-8'))
def email_mime(body):
return email.mime.text.MIMEText(body.encode('utf-8'))
|
"""
* Perspective.
*
* Move the mouse left and right to change the field of view (fov).
* Click to modify the aspect ratio. The perspective() function
* sets a perspective projection applying foreshortening, making
* distant objects appear smaller than closer ones. The parameters
* define a viewing volume with the shape of truncated pyramid.
* Objects near to the front of the volume appear their actual size,
* while farther objects appear smaller. This projection simulates
* the perspective of the world more accurately than orthographic projection.
* The version of perspective without parameters sets the default
* perspective and the version with four parameters allows the programmer
* to set the area precisely.
"""
def setup():
size(640, 360, P3D)
noStroke()
def draw():
lights()
background(204)
cameraY = height / 2.0
fov = mouseX / float(width) * PI / 2
cameraZ = cameraY / max(1, tan(fov / 2.0))
aspect = float(width) / float(height)
if mousePressed:
aspect = aspect / 2.0
perspective(fov, aspect, cameraZ / 10.0, cameraZ * 10.0)
translate(width / 2 + 30, height / 2, 0)
rotateX(-PI / 6)
rotateY(PI / 3 + mouseY / float(height) * PI)
box(45)
translate(0, 0, -50)
box(30)
|
from csc.conceptnet.models import *
from csc.corpus.models import *
from django.contrib.auth import *
from django.db import transaction
den = Assertion.objects.filter(raw__isnull=True).count()
if den > 0:
batch = Batch(owner=User.objects.get(id=20003),
remarks="creating raw assertions for ruby commons",
progress_den=den)
batch.save()
num = 0
for a in Assertion.objects.filter(raw__isnull=True):
raw = RawAssertion(batch=batch, frame=a.frame, predtype=a.predtype,
text1=a.text1, text2=a.text2, polarity=a.polarity,
modality=a.modality, sentence=a.sentence,
language=a.language, predicate=a)
raw.save()
a.raw = raw
a.save()
num += 1
batch.progress_num = num
batch.save()
print num, '/', den, raw
@transaction.commit_on_success
def switch_raw():
i = 0
for a in Assertion.objects.all().select_related('raw'):
if i % 1000 == 0: print i
i += 1
if (a.language.nl.normalize(a.text1) == a.language.nl.normalize(a.raw.text2) and
a.language.nl.normalize(a.text2) == a.language.nl.normalize(a.raw.text1) and
a.stem1.text != a.stem2.text):
t1, t2 = a.raw.text2, a.raw.text1
a.raw.text1 = t1
a.raw.text2 = t2
a.raw.save()
print a
print a.raw
print
switch_raw()
|
pattern = "*.png"
scm = """
(define (save_indexed_png in_file out_file)
(let*
(
(image (car (gimp-file-load RUN-NONINTERACTIVE in_file in_file)))
(drawable (car (gimp-image-get-active-layer image)))
)
(print in_file)
(if (= (car (gimp-drawable-is-indexed drawable)) FALSE)
(begin
(print "converting")
(gimp-image-convert-indexed image 0 0 255 0 1 "none")
(file-png-save RUN-NONINTERACTIVE image drawable out_file out_file 0 9 1 0 0 1 1)
)
(print "already indexed")
)
(gimp-image-delete image)
)
)
(define (save_indexed_png_glob pattern)
(let*
(
(filelist (cadr (file-glob pattern 1)))
)
(while (not (null? filelist))
(save_indexed_png (car filelist) (car filelist))
(set! filelist (cdr filelist))
)
)
)
(save_indexed_png_glob "%s")
(gimp-quit 0)
""" % (pattern, )
import sys
def y_or_n():
while True:
response = sys.stdin.readline().strip()
if response == 'y': return True
elif response == 'n': return False
else: print "Please enter 'y' or 'n'"
sys.stdout.write("""WARNING:
This script will permanently convert (and reduce the quality of)
all files matching the pattern "%s" in this directory.
Please ensure you have backed up the originals.
PNGs already indexed will not be affected.
Are you sure you wish to proceed? (y/n) """ % (pattern, ))
if y_or_n():
from subprocess import *
from tempfile import TemporaryFile
t = TemporaryFile()
t.writelines(scm)
t.seek(0)
p = Popen("gimp -ib -", shell=True, stdin=t)
p.communicate() #wait for completion
else:
print "Conversion aborted."
|
"""
@author: Rinze de Laat
Copyright © 2012-2020 Rinze de Laat, Éric Piel, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License version 2 as published by the Free Software
Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
from __future__ import division
from past.builtins import basestring, long
import collections
import cv2
import json
import logging
import math
import numpy
from odemis import model
import re
import yaml
def wave2rgb(wavelength):
"""
Convert a wavelength into a (r,g,b) value
wavelength (0<float): wavelength in m
return (3-tupe int in 0..255): RGB value
"""
w = wavelength * 1e9
# outside of the visible spectrum, use fixed colour
w = min(max(w, 350), 780)
# colour
if 350 <= w < 440:
r = -(w - 440) / (440 - 350)
g = 0
b = 1
elif 440 <= w < 490:
r = 0
g = (w - 440) / (490 - 440)
b = 1
elif 490 <= w < 510:
r = 0
g = 1
b = -(w - 510) / (510 - 490)
elif 510 <= w < 580:
r = (w - 510) / (580 - 510)
g = 1
b = 0
elif 580 <= w < 645:
r = 1
g = -(w - 645) / (645 - 580)
b = 0
elif 645 <= w <= 780:
r = 1
g = 0
b = 0
else:
logging.warning("Unable to compute RGB for wavelength %d", w)
return int(round(255 * r)), int(round(255 * g)), int(round(255 * b))
def hex_to_rgb(hex_str):
""" Convert a Hexadecimal colour representation into a 3-tuple of RGB integers
:param hex_str: str Colour value of the form '#FFFFFF'
:rtype : (int, int int)
"""
if len(hex_str) != 7:
raise ValueError("Invalid HEX colour %s" % hex_str)
hex_str = hex_str[-6:]
return tuple(int(hex_str[i:i + 2], 16) for i in [0, 2, 4])
def hex_to_rgba(hex_str, af=255):
""" Convert a Hexadecimal colour representation into a 4-tuple of RGBA ints
:param hex_str: str Colour value of the form '#FFFFFF'
:param af: int Alpha value in the range [0..255]
:rtype : (int, int int, int)
"""
if len(hex_str) != 7:
raise ValueError("Invalid HEX colour %s" % hex_str)
return hex_to_rgb(hex_str) + (af,)
def rgb_to_frgb(rgb):
""" Convert an integer RGB value into a float RGB value
:param rgb: (int, int, int) RGB values in the range [0..255]
:return: (float, float, float)
"""
if len(rgb) != 3:
raise ValueError("Illegal RGB colour %s" % rgb)
return tuple(v / 255.0 for v in rgb)
def rgba_to_frgba(rgba):
""" Convert an integer RGBA value into a float RGBA value
:param rgba: (int, int, int, int) RGBA values in the range [0..255]
:return: (float, float, float, float)
"""
if len(rgba) != 4:
raise ValueError("Illegal RGB colour %s" % rgba)
return tuple(v / 255.0 for v in rgba)
def frgb_to_rgb(frgb):
""" Convert an float RGB value into an integer RGB value
:param frgb: (float, float, float) RGB values in the range [0..1]
:return: (int, int, int)
"""
if len(frgb) != 3:
raise ValueError("Illegal RGB colour %s" % frgb)
return tuple(int(v * 255) for v in frgb)
def frgba_to_rgba(frgba):
""" Convert an float RGBA value into an integer RGBA value
:param rgba: (float, float, float, float) RGBA values in the range [0..1]
:return: (int, int, int, int)
"""
if len(frgba) != 4:
raise ValueError("Illegal RGB colour %s" % frgba)
return tuple(int(v * 255) for v in frgba)
def hex_to_frgb(hex_str):
""" Convert a Hexadecimal colour representation into a 3-tuple of floats
:rtype : (float, float, float)
"""
return rgb_to_frgb(hex_to_rgb(hex_str))
def hex_to_frgba(hex_str, af=1.0):
""" Convert a Hexadecimal colour representation into a 4-tuple of floats
:rtype : (float, float, float, float)
"""
return rgba_to_frgba(hex_to_rgba(hex_str, int(af * 255)))
def convert_to_object(s):
"""
Tries to convert a string to a (simple) object.
s (str): string that will be converted
return (object) the value contained in the string with the type of the real value
raises
ValueError() if not possible to convert
"""
try:
# be nice and accept list and dict without [] or {}
fixed = s.strip()
if re.match(
r"([-.a-zA-Z0-9_]+\s*:\s+[-.a-zA-Z0-9_]+)(\s*,\s*([-.a-zA-Z0-9_]+\s*:\s+[-.a-zA-Z0-9_]+))*$",
fixed): # a dict?
fixed = "{" + fixed + "}"
elif re.match(r"[-.a-zA-Z0-9_]+(\s*,\s*[-.a-zA-Z0-9_]+)+$", fixed): # a list?
fixed = "[" + fixed + "]"
# We could also use ast.literal_eval() to accept Python syntax instead,
# but as the microscope file is in YAML, it might be easier for the user
# that this follows the same syntax.
return yaml.safe_load(fixed)
except yaml.YAMLError as exc:
logging.error("Syntax error: %s", exc)
# TODO: with Python3: raise from?
raise ValueError("Failed to parse %s" % s)
def boolify(s):
if s == 'True' or s == 'true':
return True
if s == 'False' or s == 'false':
return False
raise ValueError('Not a boolean value: %s' % s)
def reproduce_typed_value(typed_value, str_val):
""" Convert a string to the type of the given typed value
Args:
typed_value: (object) Example value with the type that must be converted to
str_val: (string) String to be converted
Returns:
(object) The converted string value:
Raises:
ValueError: if not possible to convert
TypeError: if type of real value is not supported
"""
if isinstance(typed_value, bool):
return boolify(str_val)
elif isinstance(typed_value, int):
return int(str_val)
elif isinstance(typed_value, float):
return float(str_val)
elif isinstance(typed_value, basestring):
return str_val
# Process dictionaries before matching against Iterables
elif isinstance(typed_value, dict):
# Grab the first key/value pair, to determine their types
if typed_value:
key_typed_val = list(typed_value.keys())[0]
value_typed_val = typed_value[key_typed_val]
else:
logging.warning("Type of attribute is unknown, using string")
key_typed_val = ""
value_typed_val = ""
dict_val = {}
for sub_str in str_val.split(','):
item = sub_str.split(':')
if len(item) != 2:
raise ValueError("Cannot convert '%s' to a dictionary item" % item)
key = reproduce_typed_value(key_typed_val, item[0])
value = reproduce_typed_value(value_typed_val, item[1])
dict_val[key] = value
return dict_val
elif isinstance(typed_value, collections.Iterable):
if typed_value:
typed_val_elm = typed_value[0]
else:
logging.warning("Type of attribute is unknown, using string")
typed_val_elm = ""
# Try to be open-minded if the sub-type is a number (so that things like
# " 3 x 5 px" returns (3, 5)
if isinstance(typed_val_elm, (int, long)):
pattern = r"[+-]?[\d]+" # ex: -15
elif isinstance(typed_val_elm, float):
pattern = r"[+-]?[\d.]+(?:[eE][+-]?[\d]+)?" # ex: -156.41e-9
else:
pattern = "[^,]+"
iter_val = []
for sub_str in re.findall(pattern, str_val):
iter_val.append(reproduce_typed_value(typed_val_elm, sub_str))
# Cast to detected type
final_val = type(typed_value)(iter_val)
return final_val
raise TypeError("Type %r is not supported to convert %s" % (type(typed_value), str_val))
def ensure_tuple(v):
"""
Recursively convert an iterable object into a tuple
v (iterable or object): If it is an iterable, it will be converted into a tuple, and
otherwise it will be returned as is
return (tuple or object): same a v, but a tuple if v was iterable
"""
if isinstance(v, collections.Iterable) and not isinstance(v, basestring):
# convert to a tuple, with each object contained also converted
return tuple(ensure_tuple(i) for i in v)
else:
return v
def get_img_transformation_matrix(md):
"""
Computes the 2D transformation matrix based on the given metadata.
md (dict str -> value): the metadata (of the DataArray) containing MD_PIXEL_SIZE
and possibly also MD_ROTATION and MD_SHEAR.
return (numpy.matrix of 2,2 floats): the 2D transformation matrix
"""
if model.MD_PIXEL_SIZE not in md:
raise ValueError("MD_PIXEL_SIZE must be set")
ps = md[model.MD_PIXEL_SIZE]
rotation = md.get(model.MD_ROTATION, 0.0)
shear = md.get(model.MD_SHEAR, 0.0)
# Y pixel coordinates goes down, but Y coordinates in world goes up
# The '-' before ps[1] is there to make this conversion
ps_mat = numpy.matrix([[ps[0], 0], [0, -ps[1]]])
rcos, rsin = math.cos(rotation), math.sin(rotation)
rot_mat = numpy.matrix([[rcos, -rsin], [rsin, rcos]])
shear_mat = numpy.matrix([[1, 0], [-shear, 1]])
return rot_mat * shear_mat * ps_mat
def get_tile_md_pos(i, tile_size, tileda, origda):
"""
Compute the position of the center of the tile, aka MD_POS.
i (int, int): the tile index (X, Y)
tile_size (int>0, int>0): the standard size of a tile in the (X, Y)
tileda (DataArray): the tile data, with MD_PIXEL_SIZE in its metadata.
It can be smaller than the tile_size in case
origda (DataArray or DataArrayShadow): the original/raw DataArray. If
no MD_POS is provided, the image is considered located at (0,0).
return (float, float): the center position
"""
md = origda.metadata
tile_md = tileda.metadata
md_pos = numpy.asarray(md.get(model.MD_POS, (0.0, 0.0)))
if model.MD_PIXEL_SIZE not in md or model.MD_PIXEL_SIZE not in tile_md:
raise ValueError("MD_PIXEL_SIZE must be set")
orig_ps = numpy.asarray(md[model.MD_PIXEL_SIZE])
tile_ps = numpy.asarray(tile_md[model.MD_PIXEL_SIZE])
dims = md.get(model.MD_DIMS, "CTZYX"[-origda.ndim::])
img_shape = [origda.shape[dims.index('X')], origda.shape[dims.index('Y')]]
img_shape = numpy.array(img_shape, numpy.float)
# center of the image in pixels
img_center = img_shape / 2
tile_shape = [tileda.shape[dims.index('X')], tileda.shape[dims.index('Y')]]
# center of the tile in pixels
tile_center_pixels = numpy.array([
i[0] * tile_size[0] + tile_shape[0]/2,
i[1] * tile_size[1] + tile_shape[1]/2]
)
# convert to the original image coordinates
tile_center_pixels *= tile_ps / orig_ps
# center of the tile relative to the center of the image
tile_rel_to_img_center_pixels = tile_center_pixels - img_center
# calculate the transformation matrix
tmat = get_img_transformation_matrix(md)
# Converts the tile_rel_to_img_center_pixels array of coordinates to a 2 x 1 matrix
# The numpy.matrix(array) function returns a 1 x 2 matrix, so .getT() is called
# to transpose the matrix
tile_rel_to_img_center_pixels = numpy.matrix(tile_rel_to_img_center_pixels).getT()
# calculate the new position of the tile, relative to the center of the image,
# in world coordinates
new_tile_pos_rel = tmat * tile_rel_to_img_center_pixels
new_tile_pos_rel = numpy.ravel(new_tile_pos_rel)
# calculate the final position of the tile, in world coordinates
tile_pos_world_final = md_pos + new_tile_pos_rel
return tuple(tile_pos_world_final)
def get_img_transformation_md(mat, timage, src_img):
"""
Computes the metadata of the transformations from the transformation matrix
It is an approximation, as a 3 x 3 matrix cannot be fully represented only
with translation, scale, rotation and shear (eg, no "keystone" shape possible).
mat (ndarray of shape 3,3): transformation matrix (the OpenCV format).
timage (numpy.array): Transformed image
src_image (numpy.array): Source image. It should at least contain MD_PIXEL_SIZE
return (dict str value): metadata with MD_POS, MD_PIXEL_SIZE, MD_ROTATION, MD_SHEAR.
raise ValueError: If the transformation matrix is incorrect
"""
# Check the scale is not null (mathematically, it's allowed, meaning that the
# other image is just a single point, but it's very unlikely what the user
# would want to do, and the rest of the code doesn't deal with this corner
# case for now).
if mat[0, 0] * mat[1, 1] * mat[2, 2] == 0:
raise ValueError("Transformation matrix has null scale")
# TODO: for now we use rather convoluted (and reliable) way to convert from
# the transformation matrix to the values, passing by OpenCV. There should
# be a more straightforward mathematical path to achieve the same.
half_size = (timage.shape[1] / 2, timage.shape[0] / 2)
img_src_center = (src_img.shape[1] / 2, src_img.shape[0] / 2)
# project some key points from the original image on the transformed image
points = [
[half_size[0], half_size[1]],
[0.0, 0.0],
[timage.shape[1], 0.0],
[0.0, timage.shape[0]],
]
converted_points = cv2.perspectiveTransform(numpy.array([points]), mat)[0]
center_point = converted_points[0]
top_left_point = converted_points[1]
top_right_point = converted_points[2]
bottom_left_point = converted_points[3]
def length(p1, p2):
dif_x = p2[0] - p1[0]
dif_y = p2[1] - p1[1]
return math.hypot(dif_x, dif_y)
top_length = length(top_left_point, top_right_point)
scale_x = top_length / timage.shape[1]
left_length = length(top_left_point, bottom_left_point)
scale_y = left_length / timage.shape[0]
diag_length = length(bottom_left_point, top_right_point)
# using the law of cosines
corner_ang = math.acos((left_length ** 2 + top_length ** 2 - diag_length ** 2) /
(2 * left_length * top_length))
shear = math.tan(corner_ang - math.pi / 2)
b = mat[0, 1]
d = mat[1, 1]
sin_full = -b / scale_y
cos_full = d / scale_y
rot = math.atan2(sin_full, cos_full)
translation_x = center_point[0] - img_src_center[0]
translation_y = center_point[1] - img_src_center[1]
# TODO: if no MD_PIXEL_SIZE, just provide MD_PIXEL_SIZE_COR?
# The new pixel size
src_img_ps = src_img.metadata.get(model.MD_PIXEL_SIZE)
ps_cor = (scale_x, scale_y)
new_pixel_size = (src_img_ps[0] * ps_cor[0], src_img_ps[1] * ps_cor[1])
# Position in physical coordinates
src_img_pos = src_img.metadata.get(model.MD_POS, (0.0, 0.0))
pos_cor = (translation_x, -translation_y)
pos_cor_phys = (pos_cor[0] * src_img_ps[0], pos_cor[1] * src_img_ps[1])
src_img_rot = src_img.metadata.get(model.MD_ROTATION, 0.0)
src_img_shear = src_img.metadata.get(model.MD_SHEAR, 0.0)
metadata = {
model.MD_POS: (src_img_pos[0] + pos_cor_phys[0],
src_img_pos[1] + pos_cor_phys[1]),
model.MD_PIXEL_SIZE: new_pixel_size,
model.MD_ROTATION: src_img_rot - rot,
model.MD_SHEAR: src_img_shear + shear,
}
return metadata
class JsonExtraEncoder(json.JSONEncoder):
"""Support for data types that JSON default encoder
does not do.
This includes:
* Numpy array or number
* Complex number
* Set
* Bytes
Based on astropy.utils.misc.JsonCustomEncoder.
Use as: json.dumps(obj, cls=JsonExtraEncoder)
"""
def default(self, obj):
if isinstance(obj, (numpy.number, numpy.ndarray)):
return obj.tolist()
elif isinstance(obj, complex):
return [obj.real, obj.imag]
elif isinstance(obj, set):
return list(obj)
elif isinstance(obj, bytes):
return obj.decode()
return json.JSONEncoder.default(self, obj)
|
import sys
import locale
import os
import getpass
import stat
from tempfile import mktemp, gettempdir
def _detect_encoding():
"""
Find correct encoding from default locale.
"""
# Some locales report encodings like 'utf_8_valencia' which Python doesn't
# know. We try stripping off the right-most parts until we find an
# encoding that works.
e = locale.getdefaultlocale()[1]
while e:
try:
''.encode(e)
return e
except LookupError:
pass
if '_' not in e:
break
e = e.rsplit('_', 1)[0]
ENCODING = _detect_encoding() or 'UTF8'
def get_encoding():
"""
Return the current encoding.
"""
return ENCODING
def tobytes(value, encoding=None, desperate=True, coerce=False, fs=False):
"""
Convert (if necessary) the given value to a "string of bytes", agnostic to
any character encoding.
:param value: the value to be converted to a string of bytes
:param encoding: the character set to first try to encode to; if None, will
use the system default (from the locale).
:type encoding: str
:param desperate: if True and encoding to the given (or default) charset
fails, will also try utf-8 and latin-1 (in that order),
and if those fail, will encode to the preferred charset,
replacing unknown characters with \\\\uFFFD.
:type desperate: bool
:param coerce: if True, will coerce numeric types to a bytes object; if
False, such values will be returned untouched.
:type coerce: bool
:param fs: indicates value is a file name or other environment string; if True,
the encoding (if not explicitly specified) will be the encoding
given by ``sys.getfilesystemencoding()`` and the error handler
used will be ``surrogateescape`` if supported.
:type fs: bool
:returns: the value as a bytes object, or the original value if coerce is
False and the value was not a bytes or string.
"""
if isinstance(value, bytes):
# Nothing to do.
return value
elif isinstance(value, (int, float)):
return bytes(str(value), 'ascii') if coerce else value
elif not isinstance(value, str):
# Need to coerce to a unicode before converting to bytes. We can't just
# feed it to bytes() in case the default character set can't encode it.
value = tostr(value, coerce=coerce)
errors = 'strict'
if fs:
if not encoding:
encoding = sys.getfilesystemencoding()
errors = 'surrogateescape'
for c in (encoding or ENCODING, 'utf-8', 'latin-1'):
try:
return value.encode(c, errors)
except UnicodeError:
pass
if not desperate:
raise UnicodeError("Couldn't encode value to bytes (and not desperate enough to keep trying)")
return value.encode(encoding or ENCODING, 'replace')
def tostr(value, encoding=None, desperate=True, coerce=False, fs=False):
"""
Convert (if necessary) the given value to a unicode string.
:param value: the value to be converted to a unicode string
:param encoding: the character set to first try to decode as; if None, will
use the system default (from the locale).
:type encoding: str
:param desperate: if True and decoding to the given (or default) charset
fails, will also try utf-8 and latin-1 (in that order),
and if those fail, will decode as the preferred charset,
replacing unknown characters with \\\\uFFFD.
:type desperate: bool
:param coerce: if True, will coerce numeric types to a unicode string; if
False, such values will be returned untouched.
:type coerce: bool
:param fs: indicates value is a file name or other environment string; if True,
the encoding (if not explicitly specified) will be the encoding
given by ``sys.getfilesystemencoding()`` and the error handler
used will be ``surrogateescape`` if supported.
:type fs: bool
:returns: the value as a unicode string, or the original value if coerce is
False and the value was not a bytes or string.
"""
if isinstance(value, str):
# Nothing to do.
return value
elif isinstance(value, (int, float)):
return str(value) if coerce else value
elif not isinstance(value, (bytearray, bytes)):
# Need to coerce this value. Try the direct approach.
try:
return str(value)
except UnicodeError:
# Could be that value.__repr__ returned a non-unicode and
# non-8bit-clean string. Be a bit more brute force about it.
return tostr(repr(value), desperate=desperate)
errors = 'strict'
if fs:
if not encoding:
encoding = sys.getfilesystemencoding()
errors = 'surrogateescape'
# We now have a bytes object to decode.
for c in (encoding or ENCODING, 'utf-8', 'latin-1'):
try:
return value.decode(c, errors)
except UnicodeError:
pass
if not desperate:
raise UnicodeError("Couldn't decode value to unicode (and not desperate enough to keep trying)")
return value.decode(encoding or ENCODING, 'replace')
def utf8(s):
"""
Returns a UTF-8 string, converting from other character sets if
necessary.
"""
return tostr(s).encode('utf-8')
def fsname(s):
"""
Return an object appropriate to represent a filename.
:param s: the filename to decode (if needed)
:returns: a string encoded with surrogateescape
.. note::
This is a convenience function, equivalent to::
tostr(s, fs=True, desperate=False)
"""
return tostr(s, fs=True, desperate=False)
def get_temp_path(appname):
try:
return get_temp_path.paths[appname]
except KeyError:
# create tmp directory for the user
base = gettempdir()
path = os.path.join(base, '{}-{}'.format(appname, getpass.getuser()))
if not os.path.isdir(path):
try:
os.mkdir(path, 0o0700)
except OSError:
# Probably the directory already exists. Verify.
if not os.path.isdir(path):
raise IOError('Security Error: %s is not a directory, aborted' % path)
# On non-Windows, verify the permissions.
if sys.platform != 'win32':
if os.path.islink(path):
raise IOError('Security Error: %s is a link, aborted' % path)
if stat.S_IMODE(os.stat(path).st_mode) % 0o01000 != 0o0700:
raise IOError('Security Error: %s has wrong permissions, aborted' % path)
if os.stat(path)[stat.ST_UID] != os.getuid():
raise IOError('Security Error: %s does not belong to you, aborted' % path)
get_temp_path.paths[appname] = path
return path
get_temp_path.paths = {}
def tempfile(appname, name='', suffix='', unique=True):
"""
Return a filename in a secure tmp directory with the given name.
Name can also be a relative path in the temp directory, directories will
be created if missing. If unique is set, it will return a unique name based
on the given name.
"""
path = get_temp_path(appname)
name = os.path.join(path, name)
if not os.path.isdir(os.path.dirname(name)):
os.mkdir(os.path.dirname(name))
if not unique:
return name
return mktemp(suffix=suffix, prefix=os.path.basename(name), dir=os.path.dirname(name))
def which(file, path=None):
"""
Does what which(1) does: searches the PATH in order for a given file name
and returns the full path to first match.
"""
if not path:
path = os.getenv("PATH")
for p in path.split(":"):
fullpath = os.path.join(p, file)
try:
st = os.stat(fullpath)
except OSError:
continue
if sys.platform == 'win32':
return fullpath
# On non-Windows, ensure the file is both executable and accessible to
# the user.
if os.geteuid() == st[stat.ST_UID]:
mask = stat.S_IXUSR
elif st[stat.ST_GID] in os.getgroups():
mask = stat.S_IXGRP
else:
mask = stat.S_IXOTH
if stat.S_IMODE(st[stat.ST_MODE]) & mask:
return fullpath
return None
def daemonize(stdin=os.devnull, stdout=os.devnull, stderr=None,
pidfile=None, exit=True):
"""
Does a double-fork to daemonize the current process using the technique
described at http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 .
If exit is True (default), parent exits immediately. If false, caller will receive
the pid of the forked child.
"""
# First fork.
try:
pid = os.fork()
if pid > 0:
if exit:
# Exit from the first parent.
sys.exit(0)
# Wait for child to fork again (otherwise we have a zombie)
os.waitpid(pid, 0)
return pid
except OSError as e:
#log.error("Initial daemonize fork failed: %d, %s\n", e.errno, e.strerror)
sys.exit(1)
os.chdir("/")
os.setsid()
# Second fork.
try:
pid = os.fork()
if pid > 0:
# Exit from the second parent.
sys.exit(0)
except OSError as e:
#log.error("Second daemonize fork failed: %d, %s\n", e.errno, e.strerror)
sys.exit(1)
# Create new standard file descriptors.
if not stderr:
stderr = stdout
stdin = open(stdin, 'r')
stdout = open(stdout, 'a+')
stderr = open(stderr, 'a+', 0)
if pidfile:
open(pidfile, 'w+').write("%d\n" % os.getpid())
# Remap standard fds.
os.dup2(stdin.fileno(), sys.stdin.fileno())
os.dup2(stdout.fileno(), sys.stdout.fileno())
os.dup2(stderr.fileno(), sys.stderr.fileno())
|
"""
Some useful graphics functions
"""
import util
import binary
from numpy import *
from pylab import *
def plotLinearClassifier(h, X, Y):
"""
Draw the current decision boundary, margin and data
"""
figure(1)
plot(X[Y>=0.5,0], X[Y>=0.5,1], 'b+',
X[Y< 0.5,0], X[Y< 0.5,1], 'ro')
axes = figure(1).get_axes()[0]
xlim = axes.get_xlim()
ylim = axes.get_ylim()
xmin = xlim[0] + (xlim[1] - xlim[0]) / 100
xmax = xlim[1] - (xlim[1] - xlim[0]) / 100
ymin = ylim[0] + (ylim[1] - ylim[0]) / 100
ymax = ylim[1] - (ylim[1] - ylim[0]) / 100
if type(h.weights) == ndarray:
b = 0
try: b = h.bias
except AttributeError: pass
w = h.weights
#print b
#print w
# find the zeros along each axis
# w0*l + w1*? + b = 0 ==> ? = -(b + w0*l) / w1
xmin_zero = - (b + w[0] * xmin) / w[1]
xmax_zero = - (b + w[0] * xmax) / w[1]
ymin_zero = - (b + w[1] * ymin) / w[0]
ymax_zero = - (b + w[1] * ymax) / w[0]
#print (ylim, xlim, (xmin_zero, xmax_zero), (ymin_zero, ymax_zero))
# now, two of these should actually be in bounds, figure out which
inBounds = []
if ylim[0] <= xmin_zero and xmin_zero <= ylim[1]:
inBounds.append( (xmin, xmin_zero) )
if ylim[0] <= xmax_zero and xmax_zero <= ylim[1]:
inBounds.append( (xmax, xmax_zero) )
if xlim[0] <= ymin_zero and ymin_zero <= xlim[1]:
inBounds.append( (ymin_zero, ymin) )
if xlim[0] <= ymax_zero and ymax_zero <= xlim[1]:
inBounds.append( (ymax_zero, ymax) )
#print inBounds
print axes
if len(inBounds) >= 2:
plot(X[Y>=0.5,0], X[Y>=0.5,1], 'b+',
X[Y< 0.5,0], X[Y< 0.5,1], 'ro',
[inBounds[0][0], inBounds[1][0]], [inBounds[0][1], inBounds[1][1]], 'k-')
#figure(1).set_axes(axes)
legend(('positive', 'negative', 'hyperplane'))
else:
plot(X[Y>=0.5,0], X[Y>=0.5,1], 'b+',
X[Y< 0.5,0], X[Y< 0.5,1], 'ro')
#figure(1).set_axes(axes)
legend(('positive', 'negative'))
def runOnlineClassifier(h, X, Y):
N,D = X.shape
order = range(N)
util.permute(order)
plot(X[Y< 0.5,0], X[Y< 0.5,1], 'b+',
X[Y>=0.5,0], X[Y>=0.5,1], 'ro')
noStop = False
for n in order:
print (Y[n], X[n,:])
h.nextExample(X[n,:], Y[n])
hold(True)
plot([X[n,0]], [X[n,1]], 'ys')
hold(False)
if not noStop:
v = raw_input()
if v == "q":
noStop = True
plotLinearClassifier(h, X, Y)
|
from Device import Device
class File(Device):
class Inventory(Device.Inventory):
import pyre.inventory
name = pyre.inventory.str("name", default="journal.log")
name.meta['tip'] = "the name of the file in which messages will be placed"
def createDevice(self):
logfile = file(self.inventory.name, "a", 0)
import os
import time
print >> logfile, " ** MARK: opened by %s on %s" % (os.getpid(), time.ctime())
from journal.devices.File import File
return File(logfile)
def __init__(self):
Device.__init__(self, "file")
return
__id__ = "$Id: File.py,v 1.2 2005/03/10 06:16:37 aivazis Exp $"
|
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Basic caching classes and methods
=================================
- Cache - The generic caching object interfacing with the others
- CacheInRam - providing caching in ram
- CacheOnDisk - provides caches on disk
Memcache is also available via a different module (see gluon.contrib.memcache)
When web2py is running on Google App Engine,
caching will be provided by the GAE memcache
(see gluon.contrib.gae_memcache)
"""
import time
import portalocker
import shelve
import thread
import os
import logging
import re
import hashlib
import datetime
try:
from gluon import settings
have_settings = True
except ImportError:
have_settings = False
logger = logging.getLogger("web2py.cache")
__all__ = ['Cache', 'lazy_cache']
DEFAULT_TIME_EXPIRE = 300
class CacheAbstract(object):
"""
Abstract class for cache implementations.
Main function is now to provide referenced api documentation.
Use CacheInRam or CacheOnDisk instead which are derived from this class.
Attentions, Michele says:
There are signatures inside gdbm files that are used directly
by the python gdbm adapter that often are lagging behind in the
detection code in python part.
On every occasion that a gdbm store is probed by the python adapter,
the probe fails, because gdbm file version is newer.
Using gdbm directly from C would work, because there is backward
compatibility, but not from python!
The .shelve file is discarded and a new one created (with new
signature) and it works until it is probed again...
The possible consequences are memory leaks and broken sessions.
"""
cache_stats_name = 'web2py_cache_statistics'
def __init__(self, request=None):
"""
Parameters
----------
request:
the global request object
"""
raise NotImplementedError
def __call__(self, key, f,
time_expire=DEFAULT_TIME_EXPIRE):
"""
Tries retrieve the value corresponding to `key` from the cache of the
object exists and if it did not expire, else it called the function `f`
and stores the output in the cache corresponding to `key`. In the case
the output of the function is returned.
:param key: the key of the object to be store or retrieved
:param f: the function, whose output is to be cached
:param time_expire: expiration of the cache in microseconds
- `time_expire` is used to compare the current time with the time when
the requested object was last saved in cache. It does not affect
future requests.
- Setting `time_expire` to 0 or negative value forces the cache to
refresh.
If the function `f` is `None` the cache is cleared.
"""
raise NotImplementedError
def clear(self, regex=None):
"""
Clears the cache of all keys that match the provided regular expression.
If no regular expression is provided, it clears all entries in cache.
Parameters
----------
regex:
if provided, only keys matching the regex will be cleared.
Otherwise all keys are cleared.
"""
raise NotImplementedError
def increment(self, key, value=1):
"""
Increments the cached value for the given key by the amount in value
Parameters
----------
key:
key for the cached object to be incremeneted
value:
amount of the increment (defaults to 1, can be negative)
"""
raise NotImplementedError
def _clear(self, storage, regex):
"""
Auxiliary function called by `clear` to search and clear cache entries
"""
r = re.compile(regex)
for (key, value) in storage.items():
if r.match(str(key)):
del storage[key]
class CacheInRam(CacheAbstract):
"""
Ram based caching
This is implemented as global (per process, shared by all threads)
dictionary.
A mutex-lock mechanism avoid conflicts.
"""
locker = thread.allocate_lock()
meta_storage = {}
def __init__(self, request=None):
self.initialized = False
self.request = request
self.storage = {}
def initialize(self):
if self.initialized:
return
else:
self.initialized = True
self.locker.acquire()
request = self.request
if request:
app = request.application
else:
app = ''
if not app in self.meta_storage:
self.storage = self.meta_storage[app] = {
CacheAbstract.cache_stats_name: {'hit_total': 0, 'misses': 0}}
else:
self.storage = self.meta_storage[app]
self.locker.release()
def clear(self, regex=None):
self.initialize()
self.locker.acquire()
storage = self.storage
if regex is None:
storage.clear()
else:
self._clear(storage, regex)
if not CacheAbstract.cache_stats_name in storage.keys():
storage[CacheAbstract.cache_stats_name] = {
'hit_total': 0, 'misses': 0}
self.locker.release()
def __call__(self, key, f,
time_expire=DEFAULT_TIME_EXPIRE,
destroyer=None):
"""
Attention! cache.ram does not copy the cached object. It just stores a reference to it.
Turns out the deepcopying the object has some problems:
1) would break backward compatibility
2) would be limiting because people may want to cache live objects
3) would work unless we deepcopy no storage and retrival which would make things slow.
Anyway. You can deepcopy explicitly in the function generating the value to be cached.
"""
self.initialize()
dt = time_expire
now = time.time()
self.locker.acquire()
item = self.storage.get(key, None)
if item and f is None:
del self.storage[key]
if destroyer:
destroyer(item[1])
self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1
self.locker.release()
if f is None:
return None
if item and (dt is None or item[0] > now - dt):
return item[1]
elif item and (item[0] < now - dt) and destroyer:
destroyer(item[1])
value = f()
self.locker.acquire()
self.storage[key] = (now, value)
self.storage[CacheAbstract.cache_stats_name]['misses'] += 1
self.locker.release()
return value
def increment(self, key, value=1):
self.initialize()
self.locker.acquire()
try:
if key in self.storage:
value = self.storage[key][1] + value
self.storage[key] = (time.time(), value)
except BaseException, e:
self.locker.release()
raise e
self.locker.release()
return value
class CacheOnDisk(CacheAbstract):
"""
Disk based cache
This is implemented as a shelve object and it is shared by multiple web2py
processes (and threads) as long as they share the same filesystem.
The file is locked when accessed.
Disk cache provides persistance when web2py is started/stopped but it slower
than `CacheInRam`
Values stored in disk cache must be pickable.
"""
def _close_shelve_and_unlock(self):
try:
if self.storage:
self.storage.close()
finally:
if self.locker and self.locked:
portalocker.unlock(self.locker)
self.locker.close()
self.locked = False
def _open_shelve_and_lock(self):
"""Open and return a shelf object, obtaining an exclusive lock
on self.locker first. Replaces the close method of the
returned shelf instance with one that releases the lock upon
closing."""
storage = None
locker = None
locked = False
try:
locker = locker = open(self.locker_name, 'a')
portalocker.lock(locker, portalocker.LOCK_EX)
locked = True
try:
storage = shelve.open(self.shelve_name)
except:
logger.error('corrupted cache file %s, will try rebuild it'
% self.shelve_name)
storage = None
if not storage and os.path.exists(self.shelve_name):
os.unlink(self.shelve_name)
storage = shelve.open(self.shelve_name)
if not CacheAbstract.cache_stats_name in storage.keys():
storage[CacheAbstract.cache_stats_name] = {
'hit_total': 0, 'misses': 0}
storage.sync()
except Exception, e:
if storage:
storage.close()
storage = None
if locked:
portalocker.unlock(locker)
locker.close()
locked = False
raise RuntimeError(
'unable to create/re-create cache file %s' % self.shelve_name)
self.locker = locker
self.locked = locked
self.storage = storage
return storage
def __init__(self, request=None, folder=None):
self.initialized = False
self.request = request
self.folder = folder
self.storage = {}
def initialize(self):
if self.initialized:
return
else:
self.initialized = True
folder = self.folder
request = self.request
# Lets test if the cache folder exists, if not
# we are going to create it
folder = folder or os.path.join(request.folder, 'cache')
if not os.path.exists(folder):
os.mkdir(folder)
### we need this because of a possible bug in shelve that may
### or may not lock
self.locker_name = os.path.join(folder, 'cache.lock')
self.shelve_name = os.path.join(folder, 'cache.shelve')
def clear(self, regex=None):
self.initialize()
storage = self._open_shelve_and_lock()
try:
if regex is None:
storage.clear()
else:
self._clear(storage, regex)
storage.sync()
finally:
self._close_shelve_and_unlock()
def __call__(self, key, f,
time_expire=DEFAULT_TIME_EXPIRE):
self.initialize()
dt = time_expire
storage = self._open_shelve_and_lock()
try:
item = storage.get(key, None)
storage[CacheAbstract.cache_stats_name]['hit_total'] += 1
if item and f is None:
del storage[key]
storage.sync()
now = time.time()
if f is None:
value = None
elif item and (dt is None or item[0] > now - dt):
value = item[1]
else:
value = f()
storage[key] = (now, value)
storage[CacheAbstract.cache_stats_name]['misses'] += 1
storage.sync()
finally:
self._close_shelve_and_unlock()
return value
def increment(self, key, value=1):
self.initialize()
storage = self._open_shelve_and_lock()
try:
if key in storage:
value = storage[key][1] + value
storage[key] = (time.time(), value)
storage.sync()
finally:
self._close_shelve_and_unlock()
return value
class CacheAction(object):
def __init__(self, func, key, time_expire, cache, cache_model):
self.__name__ = func.__name__
self.__doc__ = func.__doc__
self.func = func
self.key = key
self.time_expire = time_expire
self.cache = cache
self.cache_model = cache_model
def __call__(self, *a, **b):
if not self.key:
key2 = self.__name__ + ':' + repr(a) + ':' + repr(b)
else:
key2 = self.key.replace('%(name)s', self.__name__)\
.replace('%(args)s', str(a)).replace('%(vars)s', str(b))
cache_model = self.cache_model
if not cache_model or isinstance(cache_model, str):
cache_model = getattr(self.cache, cache_model or 'ram')
return cache_model(key2,
lambda a=a, b=b: self.func(*a, **b),
self.time_expire)
class Cache(object):
"""
Sets up generic caching, creating an instance of both CacheInRam and
CacheOnDisk.
In case of GAE will make use of gluon.contrib.gae_memcache.
- self.ram is an instance of CacheInRam
- self.disk is an instance of CacheOnDisk
"""
autokey = ':%(name)s:%(args)s:%(vars)s'
def __init__(self, request):
"""
Parameters
----------
request:
the global request object
"""
# GAE will have a special caching
if have_settings and settings.global_settings.web2py_runtime_gae:
from gluon.contrib.gae_memcache import MemcacheClient
self.ram = self.disk = MemcacheClient(request)
else:
# Otherwise use ram (and try also disk)
self.ram = CacheInRam(request)
try:
self.disk = CacheOnDisk(request)
except IOError:
logger.warning('no cache.disk (IOError)')
except AttributeError:
# normally not expected anymore, as GAE has already
# been accounted for
logger.warning('no cache.disk (AttributeError)')
def action(self, time_expire=DEFAULT_TIME_EXPIRE, cache_model=None,
prefix=None, session=False, vars=True, lang=True,
user_agent=False, public=True, valid_statuses=None,
quick=None):
"""
Experimental!
Currently only HTTP 1.1 compliant
reference : http://code.google.com/p/doctype-mirror/wiki/ArticleHttpCaching
time_expire: same as @cache
cache_model: same as @cache
prefix: add a prefix to the calculated key
session: adds response.session_id to the key
vars: adds request.env.query_string
lang: adds T.accepted_language
user_agent: if True, adds is_mobile and is_tablet to the key.
Pass a dict to use all the needed values (uses str(.items())) (e.g. user_agent=request.user_agent())
used only if session is not True
public: if False forces the Cache-Control to be 'private'
valid_statuses: by default only status codes starting with 1,2,3 will be cached.
pass an explicit list of statuses on which turn the cache on
quick: Session,Vars,Lang,User-agent,Public:
fast overrides with initial strings, e.g. 'SVLP' or 'VLP', or 'VLP'
"""
from gluon import current
from gluon.http import HTTP
def wrap(func):
def wrapped_f():
if current.request.env.request_method != 'GET':
return func()
if time_expire:
cache_control = 'max-age=%(time_expire)s, s-maxage=%(time_expire)s' % dict(time_expire=time_expire)
if quick:
session_ = True if 'S' in quick else False
vars_ = True if 'V' in quick else False
lang_ = True if 'L' in quick else False
user_agent_ = True if 'U' in quick else False
public_ = True if 'P' in quick else False
else:
session_, vars_, lang_, user_agent_, public_ = session, vars, lang, user_agent, public
if not session_ and public_:
cache_control += ', public'
expires = (current.request.utcnow + datetime.timedelta(seconds=time_expire)).strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
cache_control += ', private'
expires = 'Fri, 01 Jan 1990 00:00:00 GMT'
if cache_model:
#figure out the correct cache key
cache_key = [current.request.env.path_info, current.response.view]
if session_:
cache_key.append(current.response.session_id)
elif user_agent_:
if user_agent_ is True:
cache_key.append("%(is_mobile)s_%(is_tablet)s" % current.request.user_agent())
else:
cache_key.append(str(user_agent_.items()))
if vars_:
cache_key.append(current.request.env.query_string)
if lang_:
cache_key.append(current.T.accepted_language)
cache_key = hashlib.md5('__'.join(cache_key)).hexdigest()
if prefix:
cache_key = prefix + cache_key
try:
#action returns something
rtn = cache_model(cache_key, lambda : func(), time_expire=time_expire)
http, status = None, current.response.status
except HTTP, e:
#action raises HTTP (can still be valid)
rtn = cache_model(cache_key, lambda : e.body, time_expire=time_expire)
http, status = HTTP(e.status, rtn, **e.headers), e.status
else:
#action raised a generic exception
http = None
else:
#no server-cache side involved
try:
#action returns something
rtn = func()
http, status = None, current.response.status
except HTTP, e:
#action raises HTTP (can still be valid)
status = e.status
http = HTTP(e.status, e.body, **e.headers)
else:
#action raised a generic exception
http = None
send_headers = False
if http and isinstance(valid_statuses, list):
if status in valid_statuses:
send_headers = True
elif valid_statuses is None:
if str(status)[0] in '123':
send_headers = True
if send_headers:
headers = {
'Pragma' : None,
'Expires' : expires,
'Cache-Control' : cache_control
}
current.response.headers.update(headers)
if cache_model and not send_headers:
#we cached already the value, but the status is not valid
#so we need to delete the cached value
cache_model(cache_key, None)
if http:
if send_headers:
http.headers.update(current.response.headers)
raise http
return rtn
wrapped_f.__name__ = func.__name__
wrapped_f.__doc__ = func.__doc__
return wrapped_f
return wrap
def __call__(self,
key=None,
time_expire=DEFAULT_TIME_EXPIRE,
cache_model=None):
"""
Decorator function that can be used to cache any function/method.
Example::
@cache('key', 5000, cache.ram)
def f():
return time.ctime()
When the function f is called, web2py tries to retrieve
the value corresponding to `key` from the cache of the
object exists and if it did not expire, else it calles the function `f`
and stores the output in the cache corresponding to `key`. In the case
the output of the function is returned.
:param key: the key of the object to be store or retrieved
:param time_expire: expiration of the cache in microseconds
:param cache_model: "ram", "disk", or other
(like "memcache" if defined). It defaults to "ram".
Notes
-----
`time_expire` is used to compare the curret time with the time when the
requested object was last saved in cache. It does not affect future
requests.
Setting `time_expire` to 0 or negative value forces the cache to
refresh.
If the function `f` is an action, we suggest using
@cache.client instead
"""
def tmp(func, cache=self, cache_model=cache_model):
return CacheAction(func, key, time_expire, self, cache_model)
return tmp
@staticmethod
def with_prefix(cache_model, prefix):
"""
allow replacing cache.ram with cache.with_prefix(cache.ram,'prefix')
it will add prefix to all the cache keys used.
"""
return lambda key, f, time_expire=DEFAULT_TIME_EXPIRE, prefix=prefix:\
cache_model(prefix + key, f, time_expire)
def lazy_cache(key=None, time_expire=None, cache_model='ram'):
"""
can be used to cache any function including in modules,
as long as the cached function is only called within a web2py request
if a key is not provided, one is generated from the function name
the time_expire defaults to None (no cache expiration)
if cache_model is "ram" then the model is current.cache.ram, etc.
"""
def decorator(f, key=key, time_expire=time_expire, cache_model=cache_model):
key = key or repr(f)
def g(*c, **d):
from gluon import current
return current.cache(key, time_expire, cache_model)(f)(*c, **d)
g.__name__ = f.__name__
return g
return decorator
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import fs_uae_launcher.fsui as fsui
from ..Amiga import Amiga
from ..CDManager import CDManager
from ..ChecksumTool import ChecksumTool
from ..Config import Config
from ..FloppyManager import FloppyManager
from ..I18N import _, ngettext
from ..Paths import Paths
from ..Settings import Settings
from .IconButton import IconButton
from .LauncherFileDialog import LauncherFileDialog
class MediaListGroup(fsui.Group):
def __init__(self, parent, cd_mode):
fsui.Group.__init__(self, parent)
self.layout = fsui.VerticalLayout()
self.cd_mode = cd_mode
if self.cd_mode:
self.file_key_prefix = "cdrom_image_"
self.file_key = "cdrom_image_{0}"
self.sha1_key = "x_cdrom_image_{0}_sha1"
else:
self.file_key_prefix = "floppy_image_"
self.file_key = "floppy_image_{0}"
self.sha1_key = "x_floppy_image_{0}_sha1"
self.heading_label = fsui.HeadingLabel(self, _("Media Swap List"))
self.layout.add(self.heading_label, margin=10)
self.layout.add_spacer(0)
hori_layout = fsui.HorizontalLayout()
self.layout.add(hori_layout, expand=True, fill=True)
self.list_view = fsui.ListView(self)
if self.cd_mode:
self.list_view.set_default_icon(fsui.Image(
"fs_uae_launcher:res/cdrom_16.png"))
else:
self.list_view.set_default_icon(fsui.Image(
"fs_uae_launcher:res/floppy_16.png"))
hori_layout.add(self.list_view, expand=True, fill=True, margin=10,
margin_right=0)
vert_layout = fsui.VerticalLayout()
hori_layout.add(vert_layout, fill=True)
add_button = IconButton(self, "add_button.png")
add_button.set_tooltip(_("Add Files to List"))
add_button.on_activate = self.on_add_button
vert_layout.add(add_button, margin=10)
remove_button = IconButton(self, "remove_button.png")
remove_button.set_tooltip(_("Remove Selected Files"))
remove_button.on_activate = self.on_remove_button
vert_layout.add(remove_button, margin=10)
clear_button = IconButton(self, "clear_button.png")
clear_button.set_tooltip(_("Clear List"))
clear_button.on_activate = self.on_clear_list
vert_layout.add(clear_button, margin=10)
self.update_list()
Config.add_listener(self)
def on_destroy(self):
Config.remove_listener(self)
def on_config(self, key, value):
if key.startswith(self.file_key_prefix):
self.update_list()
def create_list(self):
items = []
if self.cd_mode:
max_items = Amiga.MAX_CDROM_IMAGES
else:
max_items = Amiga.MAX_FLOPPY_IMAGES
for i in range(max_items):
path = Config.get(self.file_key.format(i))
sha1 = Config.get(self.sha1_key.format(i))
if not path:
continue
items.append((path, sha1))
return items
def update_list(self):
items = []
for path, sha1 in self.create_list():
dir, name = os.path.split(path)
if dir:
items.append("{0}\n{1}".format(name, dir))
else:
items.append(path)
self.list_view.set_items(items)
def on_clear_list(self):
if self.cd_mode:
CDManager.clear_cdrom_list()
else:
FloppyManager.clear_floppy_list()
def on_remove_button(self):
index = self.list_view.get_index()
existing_items = self.create_list()
if index >= 0 and index < len(existing_items):
del existing_items[index]
self.set_new_config(existing_items)
def on_add_button(self):
existing_items = self.create_list()
default_dir = Settings.get_floppies_dir()
if self.cd_mode:
dialog = LauncherFileDialog(self.get_window(),
_("Select Multiple CD-ROMs"), "cd", multiple=True)
else:
dialog = LauncherFileDialog(self.get_window(),
_("Select Multiple Floppies"), "floppy", multiple=True)
if not dialog.show():
return
paths = dialog.get_paths()
paths.sort()
checksum_tool = ChecksumTool(self.get_window())
for i, path in enumerate(paths):
sha1 = checksum_tool.checksum(path)
path = Paths.contract_path(path, default_dir)
dir, file = os.path.split(path)
if os.path.normcase(os.path.normpath(dir)) == \
os.path.normcase(os.path.normpath(default_dir)):
path = file
existing_items.append((path, sha1))
self.set_new_config(existing_items)
def set_new_config(self, items):
if self.cd_mode:
max_items = Amiga.MAX_CDROM_IMAGES
else:
max_items = Amiga.MAX_FLOPPY_IMAGES
set_list = []
for i in range(max(max_items, len(items))):
if i >= max_items:
break
elif i >= len(items):
path, sha1 = "", ""
else:
path, sha1 = items[i]
set_list.append((self.file_key.format(i), path))
set_list.append((self.sha1_key.format(i), sha1))
Config.set_multiple(set_list)
|
"""Synchronize Rating to Mediawiki.org for extensions."""
from apiary.tasks import BaseApiaryTask
import logging
import mwparserfromhell
LOGGER = logging.getLogger()
class MediawikiTasks(BaseApiaryTask):
"""Update extension data on mediawiki.org"""
def get_rating(self, extension_name):
"""Retrieve and calculate total rating for an extension"""
rating_properties = ['Has ease of installation rating', 'Has usability rating', 'Has documentation quality rating']
total_rating = 0
for my_property in rating_properties:
try:
wiki_return = self.bumble_bee.call({
'action': 'ask',
'query': ''.join([
"[[Category:Reviews]]",
"[[Has item::%s]]" % extension_name,
"|?%s|format=average" % my_property
])
})
rating = ((wiki_return['query']['results']).values()[0])['printouts'][my_property]
total_rating = total_rating + rating[0]
except Exception as e:
raise Exception("Error while querying for Rating for extension %s (%s)." % (extension_name, e))
return (total_rating / len(rating_properties) )
def get_mwpagetitle(self, extension_name):
"""Return the corresponding page for the extension on mw.o"""
try:
wiki_return = self.bumble_bee.call({
'action': 'ask',
'query': ''.join([
"[[%s]]" % extension_name,
'|?Has URL',
'|?Has MediaWiki.org title'
])
})
url = ((wiki_return['query']['results']).values()[0])['printouts']['Has URL']
mw_title = ((wiki_return['query']['results']).values()[0])['printouts']['Has MediaWiki.org title']
except Exception as e:
raise Exception("Error while querying for URL for extension %s (%s)." % (extension_name, e))
if mw_title:
return ("Extension:" + mw_title[0])
elif url and "www.mediawiki.org" in url[0]:
url = url[0]
return url[ url.rfind('/')+1 :]
else:
return None
def parse(self, title, wiki):
"""Function to parse MW page using mwparserfromhell"""
data = {
"action": "parse",
"prop": "wikitext",
"disablepp": 1,
"format": "json",
"page": title
}
wiki_return = wiki.call(data)
text = wiki_return['parse']['wikitext']['*']
return mwparserfromhell.parse(text)
def updatemediawiki(self, extension_name, title, data):
"""Edit a page on mediawiki.org using its title"""
wiki_return = self.mworg_bee.call({
'action': 'edit',
'title': title,
'text': data,
'bot': True,
'minor': True,
'assert': 'bot',
'summary': "Updating rating from [[wikiapiary:Main Page|WikiApiary]] for [[wikiapiary:%s|%s]]" % (extension_name, extension_name),
'token': self.mworg_bee_token
})
LOGGER.debug(wiki_return)
if 'error' in wiki_return:
raise Exception(wiki_return)
return wiki_return
def pushratings(self, extension_name):
"""Get rating information for an extension and write to mediawiki"""
rating = self.get_rating(extension_name)
mwtitle = self.get_mwpagetitle(extension_name)
if mwtitle is None:
LOGGER.info("No valid Mediawiki URL found for extension %s", extension_name)
return None
data = self.parse(mwtitle, self.mworg_bee)
# Loop through the templates we found looking for the one that contains extension data
for template in data.filter_templates():
if '|Extension}' in template.name:
template.add('WikiApiary rating', rating)
# Update ratings inside extension template
return self.updatemediawiki(extension_name, mwtitle, data)
def run(self, extension_name):
"""Execute tasks related to mediawiki.org"""
return self.pushratings(extension_name)
|
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gtk
from xl import common, providers
from xl.nls import gettext as _
from xlgui import icons
FAKEACCELGROUP = Gtk.AccelGroup()
def simple_separator(name, after):
def factory(menu, parent, context):
item = Gtk.SeparatorMenuItem()
return item
item = MenuItem(name, factory, after=after)
item._pos = 'last'
return item
def simple_menu_item(name, after, display_name=None, icon_name=None,
callback=None, callback_args=[], submenu=None,
accelerator=None, condition_fn=None):
"""
Factory function that should handle most cases for menus
:param name: Internal name for the item. must be unique within the menu.
:param after: List of ids which come before this item, this item will
be placed after the lowest of these.
:param display_name: Name as is to appear in the menu.
:param icon_name: Name of the icon to display, or None for no icon.
:param callback: The function to call when the menu item is activated.
signature: callback(widget, name, parent, context)
:param submenu: The Gtk.Menu that is to be the submenu of this item
:param accelerator: The keyboard shortcut to display next to the item.
This does NOT bind the key, that mus tbe done separately by
registering an Accelerator with providers.
:param condition_fn: A function to call when the menu is displayed. If
the function returns False, the menu item is not shown
signature: condition_fn(name, parent, context)
"""
def factory(menu, parent, context):
item = None
if condition_fn is not None and not condition_fn(name, parent, context):
return None
if display_name is not None:
if icon_name is not None:
item = Gtk.ImageMenuItem.new_from_stock(display_name)
image = Gtk.Image.new_from_icon_name(icon_name,
size=Gtk.IconSize.MENU)
item.set_image(image)
else:
item = Gtk.MenuItem.new_with_mnemonic(display_name)
else:
item = Gtk.ImageMenuItem.new_from_stock(icon_name)
if submenu is not None:
item.set_submenu(submenu)
if accelerator is not None:
key, mods = Gtk.accelerator_parse(accelerator)
item.add_accelerator('activate', FAKEACCELGROUP, key, mods,
Gtk.AccelFlags.VISIBLE)
if callback is not None:
item.connect('activate', callback, name,
parent, context, *callback_args)
return item
return MenuItem(name, factory, after=after)
def check_menu_item(name, after, display_name, checked_func, callback,
accelerator=None):
def factory(menu, parent, context):
item = Gtk.CheckMenuItem.new_with_mnemonic(display_name)
active = checked_func(name, parent, context)
item.set_active(active)
if accelerator is not None:
key, mods = Gtk.accelerator_parse(accelerator)
item.add_accelerator('activate', FAKEACCELGROUP, key, mods,
Gtk.AccelFlags.VISIBLE)
item.connect('activate', callback, name, parent, context)
return item
return MenuItem(name, factory, after=after)
def radio_menu_item(name, after, display_name, groupname, selected_func,
callback):
def factory(menu, parent, context):
for index, item in enumerate(menu._items):
if hasattr(item, 'groupname') and item.groupname == groupname:
break
else:
index = None
if index is not None:
try:
group_parent = menu.get_children()[index]
if not isinstance(group_parent, Gtk.RadioMenuItem):
group_parent = None
except IndexError:
group_parent = None
if group_parent:
group = group_parent.get_group()
else:
group = None
item = Gtk.RadioMenuItem.new_with_mnemonic(group, display_name)
active = selected_func(name, parent, context)
item.set_active(active)
item.connect('activate', callback, name, parent, context)
return item
return RadioMenuItem(name, factory, after=after, groupname=groupname)
class MenuItem(object):
__slots__ = ['name', 'after', '_factory', '_pos', '_provider_data']
def __init__(self, name, factory, after):
self.name = name
self.after = after
self._factory = factory
self._pos = 'normal' # Don't change this unless you have a REALLY good
# reason to. after= is the 'supported'
# method of ordering, this property is not
# considered public api and may change
# without warning.
def factory(self, menu, parent, context):
"""
The factory function is called when the menu is shown, and
should return a menu item. If it returns None, the item is
not shown.
"""
return self._factory(menu, parent, context)
def register(self, servicename, target=None):
'''
Shortcut for providers.register(), allows registration
for use with a ProviderMenu
'''
self._provider_data = (servicename, target)
return providers.register(servicename, self, target=target)
def unregister(self):
'''
Shortcut for providers.unregister()
'''
servicename, target = self._provider_data
return providers.unregister(servicename, self, target)
def __repr__(self):
return '<xlgui.widgets.MenuItem: %s>' % self.name
class RadioMenuItem(MenuItem):
__slots__ = ['groupname']
def __init__(self, name, factory, after, groupname):
MenuItem.__init__(self, name, factory, after)
self.groupname = groupname
class Menu(Gtk.Menu):
"""
Generic flexible menu with reliable
menu item order and context handling
"""
def __init__(self, parent, context_func=None, inherit_context=False):
"""
:param parent: the parent for this menu
:param context_func: a function for context
retrieval
:param inherit_context: If a submenu, inherit context function from
parent menu
"""
Gtk.Menu.__init__(self)
self._parent = parent
self._items = []
self.context_func = context_func
self.connect('show', lambda *e: self.regenerate_menu())
# GTK gets unhappy if we remove the menu items before it's done with them.
self.connect('hide', lambda *e: GLib.idle_add(self.clear_menu))
# Placeholder exists to make sure unity doesn't get confused (legacy?)
self.placeholder = Gtk.MenuItem.new_with_mnemonic('')
self._inherit_context = inherit_context
def get_context(self):
"""
Retrieves the menu context which
can contain various data
:returns: {'key1': 'value1', ...}
:rtype: dictionary
"""
if self._inherit_context:
return self.get_parent_shell().get_context()
elif self.context_func is None:
return {}
else:
return self.context_func(self._parent)
def add_item(self, item):
"""
Adds a menu item and triggers reordering
:param item: the menu item
:type item: :class:`MenuItem`
"""
self._items.append(item)
self.reorder_items()
def remove_item(self, item):
"""
Removes a menu item
:param item: the menu item
:type item: :class:`MenuItem`
"""
self._items.remove(item)
def clear_menu(self):
"""
Removes all menu items and submenus to prevent
references sticking around due to saved contexts
"""
self.append(self.placeholder)
children = self.get_children()
for c in children:
if c == self.placeholder: continue
c.set_submenu(None)
self.remove(c)
def reorder_items(self):
"""
Reorders all menu items
"""
pmap = {'first': 0, 'normal': 1, 'last': 2}
items = [common.PosetItem(i.name, i.after,
pmap[i._pos], value=i) \
for i in self._items]
items = common.order_poset(items)
self._items = [i.value for i in items]
def regenerate_menu(self):
"""
Regenerates the menu by retrieving
the context and calling the factory
method of all menu items
"""
context = self.get_context()
if self.placeholder in self.get_children():
self.remove(self.placeholder)
for item in self._items:
subitem = item.factory(self, self._parent, context)
if subitem is not None:
self.append(subitem)
self.show_all()
def popup(self, *args):
"""
Pops out the menu (Only if
there are items to show)
"""
if len(self._items) > 0:
if len(args) == 1:
event = args[0]
Gtk.Menu.popup(self, None, None, None, None, event.button, event.time)
else:
Gtk.Menu.popup(self, *args)
class ProviderMenu(providers.ProviderHandler, Menu):
'''
A menu that can be added to by registering a menu item with
the providers system. If desired, a menu item can be targeted
towards a specific parent widget.
'''
def __init__(self, name, parent):
providers.ProviderHandler.__init__(self, name, parent)
Menu.__init__(self, parent)
for p in self.get_providers():
self.on_provider_added(p)
def on_provider_added(self, provider):
self.add_item(provider)
def on_provider_removed(self, provider):
self.remove_item(provider)
class MultiProviderMenu(providers.MultiProviderHandler, Menu):
'''
A menu that can be added to by registering a menu item with
the providers system. If desired, a menu item can be targeted
towards a specific parent widget.
Supports retrieving menu items from multiple providers
'''
def __init__(self, names, parent):
providers.MultiProviderHandler.__init__(self, names, parent)
Menu.__init__(self, parent)
for p in self.get_providers():
self.on_provider_added(p)
def on_provider_added(self, provider):
self.add_item(provider)
def on_provider_removed(self, provider):
self.remove_item(provider)
|
from os import path as os_path, walk as os_walk, unlink as os_unlink
import operator
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.config import config, ConfigSelection, ConfigYesNo, getConfigListEntry, ConfigSubsection, ConfigText
from Components.ConfigList import ConfigListScreen
from Components.NimManager import nimmanager
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.ProgressBar import ProgressBar
from Components.ServiceList import refreshServiceList
from Components.ActionMap import ActionMap
from enigma import eFastScan, eDVBFrontendParametersSatellite
config.misc.fastscan = ConfigSubsection()
config.misc.fastscan.last_configuration = ConfigText(default = "()")
from enigma import eDVBFrontendParametersSatellite, eComponentScan, \
eDVBSatelliteEquipmentControl, eDVBFrontendParametersTerrestrial, \
eDVBFrontendParametersCable, eConsoleAppContainer, eDVBResourceManager
from Screens.ServiceScan import ServiceScan
from Components.NimManager import nimmanager
import os
class FastScan:
def __init__(self, text, progressbar, scanTuner = 0, transponderParameters = None, scanPid = 900, keepNumbers = False, keepSettings = False, providerName = 'Favorites'):
self.text = text
self.progressbar = progressbar
self.transponderParameters = transponderParameters
self.scanPid = scanPid
self.scanTuner = scanTuner
self.keepNumbers = keepNumbers
self.keepSettings = keepSettings
self.providerName = providerName
self.done = False
def execBegin(self):
self.text.setText(_('Scanning %s...') % self.providerName)
self.progressbar.setValue(0)
self.scan = eFastScan(self.scanPid, self.providerName, self.transponderParameters, self.keepNumbers, self.keepSettings)
self.scan.scanCompleted.get().append(self.scanCompleted)
self.scan.scanProgress.get().append(self.scanProgress)
fstfile = None
fntfile = None
for root, dirs, files in os_walk('/tmp/'):
for f in files:
if f.endswith('.bin'):
if '_FST' in f:
fstfile = os_path.join(root, f)
elif '_FNT' in f:
fntfile = os_path.join(root, f)
if fstfile and fntfile:
self.scan.startFile(fntfile, fstfile)
os_unlink(fstfile)
os_unlink(fntfile)
else:
self.scan.start(self.scanTuner)
def execEnd(self):
self.scan.scanCompleted.get().remove(self.scanCompleted)
self.scan.scanProgress.get().remove(self.scanProgress)
del self.scan
def scanProgress(self, progress):
self.progressbar.setValue(progress)
def scanCompleted(self, result):
self.done = True
if result < 0:
self.text.setText(_('Scanning failed!'))
else:
self.text.setText(ngettext('List version %d, found %d channel', 'List version %d, found %d channels', result) % (self.scan.getVersion(), result))
def destroy(self):
pass
def isDone(self):
return self.done
class FastScanStatus(Screen):
skin = """
<screen position="150,115" size="420,180" title="Fast Scan">
<widget name="frontend" pixmap="icons/scan-s.png" position="5,5" size="64,64" transparent="1" alphatest="on" />
<widget name="scan_state" position="10,120" zPosition="2" size="400,30" font="Regular;18" />
<widget name="scan_progress" position="10,155" size="400,15" pixmap="progress_big.png" borderWidth="2" borderColor="#cccccc" />
</screen>"""
def __init__(self, session, scanTuner = 0, transponderParameters = None, scanPid = 900, keepNumbers = False, keepSettings = False, providerName = 'Favorites'):
Screen.__init__(self, session)
self.setTitle(_("Fast Scan"))
self.scanPid = scanPid
self.scanTuner = scanTuner
self.transponderParameters = transponderParameters
self.keepNumbers = keepNumbers
self.keepSettings = keepSettings
self.providerName = providerName
self["frontend"] = Pixmap()
self["scan_progress"] = ProgressBar()
self["scan_state"] = Label(_("scan state"))
self.prevservice = self.session.nav.getCurrentlyPlayingServiceReference()
self.session.nav.stopService()
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.ok,
"cancel": self.cancel
})
self.onFirstExecBegin.append(self.doServiceScan)
def doServiceScan(self):
self["scan"] = FastScan(self["scan_state"], self["scan_progress"], self.scanTuner, self.transponderParameters, self.scanPid, self.keepNumbers, self.keepSettings, self.providerName)
def restoreService(self):
if self.prevservice:
self.session.nav.playService(self.prevservice)
def ok(self):
if self["scan"].isDone():
refreshServiceList()
self.restoreService()
self.close()
def cancel(self):
self.restoreService()
self.close()
class FastScanScreen(ConfigListScreen, Screen):
skin = """
<screen position="100,115" size="520,290" title="Fast Scan">
<widget name="config" position="10,10" size="500,250" scrollbarMode="showOnDemand" />
<widget name="introduction" position="10,265" size="500,25" font="Regular;20" halign="center" />
</screen>"""
def __init__(self, session, nimList):
Screen.__init__(self, session)
self.setTitle(_("Fast Scan"))
self.providers = {}
#hacky way
self.providers['Kontinent'] = (0, 900, True)
self.providers['Ntvplus'] = (0, 900, True)
self.providers['Raduga'] = (0, 900, True)
self.providers['Telekarta'] = (0, 900, True)
self.providers['Tricolor'] = (0, 900, True)
#orgin
self.providers['Canal Digitaal'] = (1, 900, True)
self.providers['TV Vlaanderen'] = (1, 910, True)
self.providers['TéléSAT'] = (0, 920, True)
self.providers['AustriaSat'] = (0, 950, False)
self.providers['Skylink Czech Republic'] = (1, 30, False)
self.providers['Skylink Slovak Republic'] = (1, 31, False)
self.providers['TéléSAT Astra3'] = (1, 920, True)
self.providers['AustriaSat Astra3'] = (1, 950, False)
self.providers['Canal Digitaal Astra 1'] = (0, 900, True)
self.providers['TV Vlaanderen Astra 1'] = (0, 910, True)
self.transponders = ((12515000, 22000000, eDVBFrontendParametersSatellite.FEC_5_6, 192,
eDVBFrontendParametersSatellite.Polarisation_Horizontal, eDVBFrontendParametersSatellite.Inversion_Unknown,
eDVBFrontendParametersSatellite.System_DVB_S, eDVBFrontendParametersSatellite.Modulation_QPSK,
eDVBFrontendParametersSatellite.RollOff_alpha_0_35, eDVBFrontendParametersSatellite.Pilot_Off),
(12070000, 27500000, eDVBFrontendParametersSatellite.FEC_3_4, 235,
eDVBFrontendParametersSatellite.Polarisation_Horizontal, eDVBFrontendParametersSatellite.Inversion_Unknown,
eDVBFrontendParametersSatellite.System_DVB_S, eDVBFrontendParametersSatellite.Modulation_QPSK,
eDVBFrontendParametersSatellite.RollOff_alpha_0_35, eDVBFrontendParametersSatellite.Pilot_Off))
self["actions"] = ActionMap(["SetupActions", "MenuActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"menu": self.closeRecursive,
}, -2)
providerList = list(x[0] for x in sorted(self.providers.iteritems(), key = operator.itemgetter(1)))
lastConfiguration = eval(config.misc.fastscan.last_configuration.value)
if not lastConfiguration:
lastConfiguration = (nimList[0][0], providerList[0], True, True, False)
self.scan_nims = ConfigSelection(default = lastConfiguration[0], choices = nimList)
self.scan_provider = ConfigSelection(default = lastConfiguration[1], choices = providerList)
self.scan_hd = ConfigYesNo(default = lastConfiguration[2])
self.scan_keepnumbering = ConfigYesNo(default = lastConfiguration[3])
self.scan_keepsettings = ConfigYesNo(default = lastConfiguration[4])
self.list = []
self.tunerEntry = getConfigListEntry(_("Tuner"), self.scan_nims)
self.list.append(self.tunerEntry)
self.scanProvider = getConfigListEntry(_("Provider"), self.scan_provider)
self.list.append(self.scanProvider)
self.scanHD = getConfigListEntry(_("HD list"), self.scan_hd)
self.list.append(self.scanHD)
self.list.append(getConfigListEntry(_("Use fastscan channel numbering"), self.scan_keepnumbering))
self.list.append(getConfigListEntry(_("Use fastscan channel names"), self.scan_keepsettings))
ConfigListScreen.__init__(self, self.list)
self["config"].list = self.list
self["config"].l.setList(self.list)
self.finished_cb = None
self["introduction"] = Label(_("Select your provider, and press OK to start the scan"))
def addSatTransponder(self, tlist, frequency, symbol_rate, polarisation, fec, inversion, orbital_position, system, modulation, rolloff, pilot):
print "Add Sat: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(polarisation) + " fec: " + str(fec) + " inversion: " + str(inversion) + " modulation: " + str(modulation) + " system: " + str(system) + " rolloff" + str(rolloff) + " pilot" + str(pilot)
print "orbpos: " + str(orbital_position)
parm = eDVBFrontendParametersSatellite()
parm.modulation = modulation
parm.system = system
parm.frequency = frequency * 1000
parm.symbol_rate = symbol_rate * 1000
parm.polarisation = polarisation
parm.fec = fec
parm.inversion = inversion
parm.orbital_position = orbital_position
parm.rolloff = rolloff
parm.pilot = pilot
tlist.append(parm)
def readXML(self, xml):
tlist = []
import xml.dom.minidom as minidom
xmldoc = "/usr/lib/enigma2/python/Plugins/SystemPlugins/FastScan/xml/" + xml + ".xml"
xmldoc = minidom.parse(xmldoc)
tr_list = xmldoc.getElementsByTagName('transporder')
for lista in tr_list:
frequency = lista.getAttribute("frequency")
symbolrate = lista.getAttribute("symbolrate")
fec = lista.getAttribute("fec")
orbpos = lista.getAttribute("orbpos")
pol = lista.getAttribute("pol")
system = lista.getAttribute("system")
modulation = lista.getAttribute("modulation")
self.frequency = frequency
self.symbolrate = symbolrate
if pol == "H":
pol = 0
elif pol == "V":
pol = 1
elif pol == "L":
pol = 2
elif pol == "R":
pol = 3
self.polarization = pol # 0 - H, 1 - V, 2- CL, 3 - CR
if fec == "Auto":
fec = 0
elif fec == "1/2":
fec = 1
elif fec == "2/3":
fec = 2
elif fec == "3/4":
fec = 3
elif fec == "3/5":
fec = 4
elif fec == "4/5":
fec = 5
elif fec == "5/6":
fec = 6
elif fec == "7/8":
fec = 7
elif fec == "8/9":
fec = 8
elif fec == "9/10":
fec = 9
self.fec = fec # 0 - Auto, 1 - 1/2, 2 - 2/3, 3 - 3/4, 4 - 3/5, 5 - 4/5, 6 - 5/6, 7 - 7/8, 8 - 8/9 , 9 - 9/10,
self.inversion = 2 # 0 - off, 1 -on, 2 - AUTO
self.orbpos = orbpos
if system == "DVBS":
system = 0
elif system == "DVBS2":
system = 1
self.system = system # DVB-S = 0, DVB-S2 = 1
if modulation == "QPSK":
modulation = 0
elif modulation == "8PSK":
modulation = 1
self.modulation = modulation # 0- QPSK, 1 -8PSK
self.rolloff = 0 #
self.pilot = 2 # 0 - off, 1 - on 2 - AUTO
print "add sat transponder"
self.addSatTransponder(tlist, int(self.frequency),
int(self.symbolrate),
int(self.polarization),
int(fec),
int(self.inversion),
int(orbpos),
int(self.system),
int(self.modulation),
int(self.rolloff),
int(self.pilot))
self.session.open(ServiceScan, [{"transponders": tlist, "feid": int(self.scan_nims.getValue()), "flags": 0, "networkid": 0}])
def keyGo(self):
prov = self.scan_provider.value.lower()
if prov == "tricolor" or prov == "kontinent" or prov == "telekarta" or prov == "ntvplus" or prov == "raduga":
self.readXML(self.scan_provider.value.lower())
else:
config.misc.fastscan.last_configuration.value = `(self.scan_nims.value, self.scan_provider.value, self.scan_hd.value, self.scan_keepnumbering.value, self.scan_keepsettings.value)`
config.misc.fastscan.save()
self.startScan()
def getTransponderParameters(self, number):
transponderParameters = eDVBFrontendParametersSatellite()
transponderParameters.frequency = self.transponders[number][0]
transponderParameters.symbol_rate = self.transponders[number][1]
transponderParameters.fec = self.transponders[number][2]
transponderParameters.orbital_position = self.transponders[number][3]
transponderParameters.polarisation = self.transponders[number][4]
transponderParameters.inversion = self.transponders[number][5]
transponderParameters.system = self.transponders[number][6]
transponderParameters.modulation = self.transponders[number][7]
transponderParameters.rolloff = self.transponders[number][8]
transponderParameters.pilot = self.transponders[number][9]
return transponderParameters
def startScan(self):
pid = self.providers[self.scan_provider.getValue()][1]
if self.scan_hd.getValue() and self.providers[self.scan_provider.value][2]:
pid += 1
if self.scan_nims.value:
self.session.open(FastScanStatus, scanTuner = int(self.scan_nims.getValue()),
transponderParameters = self.getTransponderParameters(self.providers[self.scan_provider.getValue()][0]),
scanPid = pid, keepNumbers = self.scan_keepnumbering.getValue(), keepSettings = self.scan_keepsettings.getValue(),
providerName = self.scan_provider.getText())
def keyCancel(self):
self.close()
def FastScanMain(session, **kwargs):
if session.nav.RecordTimer.isRecording():
session.open(MessageBox, _("A recording is currently running. Please stop the recording before trying to scan."), MessageBox.TYPE_ERROR)
else:
nimList = []
# collect all nims which are *not* set to "nothing"
for n in nimmanager.nim_slots:
if not n.isCompatible("DVB-S"):
continue
if n.config_mode == "nothing":
continue
if n.config_mode in ("loopthrough", "satposdepends"):
root_id = nimmanager.sec.getRoot(n.slot_id, int(n.config.connectedTo.value))
if n.type == nimmanager.nim_slots[root_id].type: # check if connected from a DVB-S to DVB-S2 Nim or vice versa
continue
nimList.append((str(n.slot), n.friendly_full_description))
if nimList:
session.open(FastScanScreen, nimList)
else:
session.open(MessageBox, _("No suitable sat tuner found!"), MessageBox.TYPE_ERROR)
def FastScanStart(menuid, **kwargs):
from Components.About import about
if menuid == "scan":
return [(_("Fast Scan"), FastScanMain, "fastscan", None)]
else:
return []
def Plugins(**kwargs):
if nimmanager.hasNimType("DVB-S"):
return PluginDescriptor(name=_("Fast Scan"), description="Scan Dutch/Belgian sat provider", where = PluginDescriptor.WHERE_MENU, fnc=FastScanStart)
else:
return []
|
from .utils import *
ISPECS = []
@ispec_ia32("16>[ {0f}{77} ]", mnemonic = "EMMS", type=type_cpu_state)
def ia32_nooperand(obj):
pass
@ispec_ia32("16>[ {d9} reg(3) 0 0011 ]", mnemonic = "FLD") # D9 C0+i
@ispec_ia32("16>[ {d9} reg(3) 1 0011 ]", mnemonic = "FXCH") # D9 C8+i
@ispec_ia32("16>[ {d8} reg(3) 0 1011 ]", mnemonic = "FCOM") # D8 D0+i
@ispec_ia32("16>[ {d8} reg(3) 1 1011 ]", mnemonic = "FCOMP") # D8 D8+i
@ispec_ia32("16>[ {dd} reg(3) 0 0011 ]", mnemonic = "FFREE") # DD C0+i
@ispec_ia32("16>[ {dd} reg(3) 0 1011 ]", mnemonic = "FST") # DD D0+i
@ispec_ia32("16>[ {dd} reg(3) 1 1011 ]", mnemonic = "FSTP") # DD D8+i
@ispec_ia32("16>[ {dd} reg(3) 0 0111 ]", mnemonic = "FUCOM") # DD E0+i
@ispec_ia32("16>[ {dd} reg(3) 1 0111 ]", mnemonic = "FUCOMP") # DD E8+i
def ia32_fpu_reg(obj, reg):
obj.operands = [env.st(reg)]
obj.type = type_data_processing
@ispec_ia32("*>[ {d8} /0 ]", mnemonic = "FADD", _size = 32)
@ispec_ia32("*>[ {d8} /1 ]", mnemonic = "FMUL", _size = 32)
@ispec_ia32("*>[ {d8} /2 ]", mnemonic = "FCOM", _size = 32)
@ispec_ia32("*>[ {d8} /3 ]", mnemonic = "FCOMP", _size = 32)
@ispec_ia32("*>[ {d8} /4 ]", mnemonic = "FSUB", _size = 32)
@ispec_ia32("*>[ {d8} /5 ]", mnemonic = "FSUBR", _size = 32)
@ispec_ia32("*>[ {d8} /6 ]", mnemonic = "FDIV", _size = 32)
@ispec_ia32("*>[ {d8} /7 ]", mnemonic = "FDIVR", _size = 32)
@ispec_ia32("*>[ {d9} /0 ]", mnemonic = "FLD", _size = 32)
@ispec_ia32("*>[ {d9} /2 ]", mnemonic = "FST", _size = 32)
@ispec_ia32("*>[ {d9} /3 ]", mnemonic = "FSTP", _size = 32)
@ispec_ia32("*>[ {d9} /4 ]", mnemonic = "FLDENV", _size = 28*8) #TODO : 16 bits size
@ispec_ia32("*>[ {d9} /5 ]", mnemonic = "FLDCW", _size = 16)
@ispec_ia32("*>[ {d9} /6 ]", mnemonic = "FNSTENV", _size = 28*8)
@ispec_ia32("*>[ {d9} /7 ]", mnemonic = "FNSTCW", _size = 16)
@ispec_ia32("*>[ {da} /0 ]", mnemonic = "FIADD", _size = 32)
@ispec_ia32("*>[ {da} /1 ]", mnemonic = "FIMUL", _size = 32)
@ispec_ia32("*>[ {da} /2 ]", mnemonic = "FICOM", _size = 32)
@ispec_ia32("*>[ {da} /3 ]", mnemonic = "FICOMP", _size = 32)
@ispec_ia32("*>[ {da} /4 ]", mnemonic = "FISUB", _size = 32)
@ispec_ia32("*>[ {da} /5 ]", mnemonic = "FISUBR", _size = 32)
@ispec_ia32("*>[ {da} /6 ]", mnemonic = "FIDIV", _size = 32)
@ispec_ia32("*>[ {da} /7 ]", mnemonic = "FIDIVR", _size = 32)
@ispec_ia32("*>[ {db} /0 ]", mnemonic = "FILD", _size = 32)
@ispec_ia32("*>[ {db} /1 ]", mnemonic = "FISTTP", _size = 32)
@ispec_ia32("*>[ {db} /2 ]", mnemonic = "FIST", _size = 32)
@ispec_ia32("*>[ {db} /3 ]", mnemonic = "FISTP", _size = 32)
@ispec_ia32("*>[ {db} /5 ]", mnemonic = "FLD", _size = 80)
@ispec_ia32("*>[ {db} /7 ]", mnemonic = "FSTP", _size = 80)
@ispec_ia32("*>[ {dc} /0 ]", mnemonic = "FADD", _size = 64)
@ispec_ia32("*>[ {dc} /1 ]", mnemonic = "FMUL", _size = 64)
@ispec_ia32("*>[ {dc} /2 ]", mnemonic = "FCOM", _size = 64)
@ispec_ia32("*>[ {dc} /3 ]", mnemonic = "FCOMP", _size = 64)
@ispec_ia32("*>[ {dc} /4 ]", mnemonic = "FSUB", _size = 64)
@ispec_ia32("*>[ {dc} /5 ]", mnemonic = "FSUBR", _size = 64)
@ispec_ia32("*>[ {dc} /6 ]", mnemonic = "FDIV", _size = 64)
@ispec_ia32("*>[ {dc} /7 ]", mnemonic = "FDIVR", _size = 64)
@ispec_ia32("*>[ {dd} /0 ]", mnemonic = "FLD", _size = 64)
@ispec_ia32("*>[ {dd} /1 ]", mnemonic = "FISTTP", _size = 64)
@ispec_ia32("*>[ {dd} /2 ]", mnemonic = "FST", _size = 64)
@ispec_ia32("*>[ {dd} /3 ]", mnemonic = "FSTP", _size = 64)
@ispec_ia32("*>[ {dd} /4 ]", mnemonic = "FRSTOR", _size = 108*8) #TODO : 16 bits size
@ispec_ia32("*>[ {dd} /6 ]", mnemonic = "FNSAVE", _size = 108*8) #TODO : 16 bits size
@ispec_ia32("*>[ {de} /0 ]", mnemonic = "FIADD", _size = 16)
@ispec_ia32("*>[ {de} /1 ]", mnemonic = "FIMUL", _size = 16)
@ispec_ia32("*>[ {de} /2 ]", mnemonic = "FICOM", _size = 16)
@ispec_ia32("*>[ {de} /3 ]", mnemonic = "FICOMP", _size = 16)
@ispec_ia32("*>[ {de} /4 ]", mnemonic = "FISUB", _size = 16)
@ispec_ia32("*>[ {de} /5 ]", mnemonic = "FISUBR", _size = 16)
@ispec_ia32("*>[ {de} /6 ]", mnemonic = "FIDIV", _size = 16)
@ispec_ia32("*>[ {de} /7 ]", mnemonic = "FIDIVR", _size = 16)
@ispec_ia32("*>[ {df} /0 ]", mnemonic = "FILD", _size = 16)
@ispec_ia32("*>[ {df} /1 ]", mnemonic = "FISTTP", _size = 16)
@ispec_ia32("*>[ {df} /2 ]", mnemonic = "FIST", _size = 16)
@ispec_ia32("*>[ {df} /3 ]", mnemonic = "FISTP", _size = 16)
@ispec_ia32("*>[ {df} /4 ]", mnemonic = "FBLD", _size = 80)
@ispec_ia32("*>[ {df} /5 ]", mnemonic = "FILD", _size = 64)
@ispec_ia32("*>[ {df} /6 ]", mnemonic = "FBSTP", _size = 80)
@ispec_ia32("*>[ {df} /7 ]", mnemonic = "FISTP", _size = 64)
@ispec_ia32("*>[ {9b}{d9} /7 ]", mnemonic = "FSTCW", _size = 16)
@ispec_ia32("*>[ {9b}{d9} /6 ]", mnemonic = "FSTENV", _size = 28*8)
@ispec_ia32("*>[ {9b}{dd} /6 ]", mnemonic = "FSAVE", _size = 108*8) #TODO : 16 bits size
@ispec_ia32("*>[ {0f}{ae} /0 ]", mnemonic = "FXSAVE", _size = 512*8)
@ispec_ia32("*>[ {0f}{ae} /1 ]", mnemonic = "FXRSTOR", _size = 512*8)
def ia32_fpu_mem(obj, Mod, RM, data, _size):
op1, data = getModRM(obj,Mod,RM,data)
if op1._is_reg: raise InstructionError(obj)
op1.size = _size
obj.operands = [op1]
obj.type = type_data_processing
@ispec_ia32("24>[ {9b}{df}{e0} ]", mnemonic = "FSTSW")
@ispec_ia32("16>[ {df}{e0} ]", mnemonic = "FNSTSW")
def ia32_fstsw_ax(obj):
obj.operands = [ env.getreg(0, 16) ]
obj.type = type_data_processing
@ispec_ia32("*>[ {9b}{dd} /7 ]", mnemonic = "FSTSW")
@ispec_ia32("*>[ {dd} /7 ]", mnemonic = "FNSTSW")
def ia32_fstsw(obj, Mod, RM, data):
op1,data = getModRM(obj,Mod,RM,data)
obj.operands = [op1]
obj.type = type_data_processing
@ispec_ia32("16>[ {d9}{d0} ]", mnemonic = "FNOP")
@ispec_ia32("16>[ {d9}{e0} ]", mnemonic = "FCHS")
@ispec_ia32("16>[ {d9}{e1} ]", mnemonic = "FABS")
@ispec_ia32("16>[ {d9}{e4} ]", mnemonic = "FTST")
@ispec_ia32("16>[ {d9}{e5} ]", mnemonic = "FXAM")
@ispec_ia32("16>[ {d9}{e8} ]", mnemonic = "FLD1")
@ispec_ia32("16>[ {d9}{e9} ]", mnemonic = "FLDL2T")
@ispec_ia32("16>[ {d9}{ea} ]", mnemonic = "FLDL2E")
@ispec_ia32("16>[ {d9}{eb} ]", mnemonic = "FLDPI")
@ispec_ia32("16>[ {d9}{ec} ]", mnemonic = "FLDLG2")
@ispec_ia32("16>[ {d9}{ed} ]", mnemonic = "FLDLN2")
@ispec_ia32("16>[ {d9}{ee} ]", mnemonic = "FLDZ")
@ispec_ia32("16>[ {d9}{f0} ]", mnemonic = "F2XM1")
@ispec_ia32("16>[ {d9}{f1} ]", mnemonic = "FYL2X")
@ispec_ia32("16>[ {d9}{f2} ]", mnemonic = "FPTAN")
@ispec_ia32("16>[ {d9}{f3} ]", mnemonic = "FPATAN")
@ispec_ia32("16>[ {d9}{f4} ]", mnemonic = "FXTRACT")
@ispec_ia32("16>[ {d9}{f5} ]", mnemonic = "FPREM1")
@ispec_ia32("16>[ {d9}{f6} ]", mnemonic = "FDECSTP")
@ispec_ia32("16>[ {d9}{f7} ]", mnemonic = "FINCSTP")
@ispec_ia32("16>[ {d9}{f8} ]", mnemonic = "FPREM")
@ispec_ia32("16>[ {d9}{f9} ]", mnemonic = "FYL2XP1")
@ispec_ia32("16>[ {d9}{fa} ]", mnemonic = "FSQRT")
@ispec_ia32("16>[ {d9}{fb} ]", mnemonic = "FSINCOS")
@ispec_ia32("16>[ {d9}{fc} ]", mnemonic = "FRNDINT")
@ispec_ia32("16>[ {d9}{fd} ]", mnemonic = "FSCALE")
@ispec_ia32("16>[ {d9}{fe} ]", mnemonic = "FSIN")
@ispec_ia32("16>[ {d9}{ff} ]", mnemonic = "FCOS")
@ispec_ia32("16>[ {da}{e9} ]", mnemonic = "FUCOMPP")
@ispec_ia32("16>[ {db}{e2} ]", mnemonic = "FNCLEX")
@ispec_ia32("16>[ {db}{e3} ]", mnemonic = "FNINIT")
@ispec_ia32("16>[ {de}{d9} ]", mnemonic = "FCOMPP")
@ispec_ia32("24>[ {9b}{db}{e2} ]", mnemonic = "FCLEX")
@ispec_ia32("24>[ {9b}{db}{e3} ]", mnemonic = "FINIT")
def fld_fpu_noop(obj):
obj.type = type_data_processing
@ispec_ia32("16>[ {d8} reg(3) 0 0111 ]", mnemonic = "FSUB", _src=None, _dest=0) # D8 E0+i
@ispec_ia32("16>[ {dc} reg(3) 1 0111 ]", mnemonic = "FSUB", _src=0, _dest=None) # DC E8+i
@ispec_ia32("16>[ {de} reg(3) 1 0111 ]", mnemonic = "FSUBP", _src=0, _dest=None) # DE E8+i
@ispec_ia32("16>[ {d8} reg(3) 1 0111 ]", mnemonic = "FSUBR", _src=None, _dest=0) # D8 E8+i
@ispec_ia32("16>[ {dc} reg(3) 0 0111 ]", mnemonic = "FSUBR", _src=0, _dest=None) # DC E0+i
@ispec_ia32("16>[ {de} reg(3) 0 0111 ]", mnemonic = "FSUBRP", _src=0, _dest=None) # DE E0+i
@ispec_ia32("16>[ {d8} reg(3) 0 0011 ]", mnemonic = "FADD", _src=None, _dest=0) # D8 C0+i
@ispec_ia32("16>[ {dc} reg(3) 0 0011 ]", mnemonic = "FADD", _src=0, _dest=None) # DC C0+i
@ispec_ia32("16>[ {de} reg(3) 0 0011 ]", mnemonic = "FADDP", _src=0, _dest=None) # DE C0+i
@ispec_ia32("16>[ {d8} reg(3) 0 1111 ]", mnemonic = "FDIV", _src=None, _dest=0) # D8 F0+i
@ispec_ia32("16>[ {dc} reg(3) 1 1111 ]", mnemonic = "FDIV", _src=0, _dest=None) # DC F8+i
@ispec_ia32("16>[ {de} reg(3) 1 1111 ]", mnemonic = "FDIVP", _src=0, _dest=None) # DE F8+i
@ispec_ia32("16>[ {d8} reg(3) 1 1111 ]", mnemonic = "FDIVR", _src=None, _dest=0) # D8 F8+i
@ispec_ia32("16>[ {dc} reg(3) 0 1111 ]", mnemonic = "FDIVR", _src=0, _dest=None) # DC F0+i
@ispec_ia32("16>[ {de} reg(3) 0 1111 ]", mnemonic = "FDIVRP", _src=0, _dest=None) # DE F0+i
@ispec_ia32("16>[ {d8} reg(3) 1 0011 ]", mnemonic = "FMUL", _src=None, _dest=0) # D8 C8+i
@ispec_ia32("16>[ {dc} reg(3) 1 0011 ]", mnemonic = "FMUL", _src=0, _dest=None) # DC C8+i
@ispec_ia32("16>[ {de} reg(3) 1 0011 ]", mnemonic = "FMULP", _src=0, _dest=None) # DE C8+i
@ispec_ia32("16>[ {da} reg(3) 0 0011 ]", mnemonic = "FCMOVB", _src=None, _dest=0) # DA C0+i
@ispec_ia32("16>[ {da} reg(3) 1 0011 ]", mnemonic = "FCMOVE", _src=None, _dest=0) # DA C8+i
@ispec_ia32("16>[ {da} reg(3) 0 1011 ]", mnemonic = "FCMOVBE", _src=None, _dest=0) # DA D0+i
@ispec_ia32("16>[ {da} reg(3) 1 1011 ]", mnemonic = "FCMOVU", _src=None, _dest=0) # DA D8+i
@ispec_ia32("16>[ {db} reg(3) 0 0011 ]", mnemonic = "FCMOVNB", _src=None, _dest=0) # DB C0+i
@ispec_ia32("16>[ {db} reg(3) 1 0011 ]", mnemonic = "FCMOVNE", _src=None, _dest=0) # DB C8+i
@ispec_ia32("16>[ {db} reg(3) 0 1011 ]", mnemonic = "FCMOVNBE", _src=None, _dest=0) # DB D0+i
@ispec_ia32("16>[ {db} reg(3) 1 1011 ]", mnemonic = "FCMOVNU", _src=None, _dest=0) # DB D8+i
@ispec_ia32("16>[ {db} reg(3) 0 1111 ]", mnemonic = "FCOMI", _src=None, _dest=0) # DB F0+i
@ispec_ia32("16>[ {df} reg(3) 0 1111 ]", mnemonic = "FCOMIP", _src=None, _dest=0) # DF F0+i
@ispec_ia32("16>[ {db} reg(3) 1 0111 ]", mnemonic = "FUCOMI", _src=None, _dest=0) # DB E8+i
@ispec_ia32("16>[ {df} reg(3) 1 0111 ]", mnemonic = "FUCOMIP", _src=None, _dest=0) # DF E8+i
def ia32_fpu_st(obj, reg, _dest, _src):
# FSUBP
if _dest is None and _src is None:
return
if _dest is None:
_dest = reg
elif _src is None:
_src = reg
op1 = env.st(_dest)
op2 = env.st(_src)
obj.operands = [op1, op2]
obj.type = type_data_processing
|
"""
Preferences is a collection of utilities to display, read & write preferences.
"""
from __future__ import absolute_import
import __init__
import cStringIO
import sys
from skeinforge_tools.skeinforge_utilities import gcodec
import os
import webbrowser
try:
import Tkinter
except:
print( 'You do not have Tkinter, which is needed for the graphical interface, you will only be able to use the command line.' )
print( 'Information on how to download Tkinter is at:\nwww.tcl.tk/software/tcltk/' )
__author__ = "Enrique Perez (perez_enrique@yahoo.com)"
__date__ = "$Date: 2008/23/04 $"
__license__ = "GPL 3.0"
globalIsMainLoopRunning = False
globalSpreadsheetSeparator = '\t'
def displayDialog( displayPreferences ):
"Display the preferences dialog."
readPreferences( displayPreferences )
root = Tkinter.Tk()
preferencesDialog = PreferencesDialog( displayPreferences, root )
global globalIsMainLoopRunning
if globalIsMainLoopRunning:
return
globalIsMainLoopRunning = True
root.mainloop()
globalIsMainLoopRunning = False
def getArchiveText( archivablePreferences ):
"Get the text representation of the archive."
archiveWriter = cStringIO.StringIO()
archiveWriter.write( 'Format is tab separated preferences.\n' )
for preference in archivablePreferences.archive:
preference.writeToArchiveWriter( archiveWriter )
return archiveWriter.getvalue()
def getFileInGivenDirectory( directory, fileName ):
"Get the file from the fileName or the lowercase fileName in the given directory."
directoryListing = os.listdir( directory )
if fileName in directoryListing:
return getFileTextGivenDirectoryFileName( directory, fileName )
lowerFilename = fileName.lower()
if lowerFilename in directoryListing:
return getFileTextGivenDirectoryFileName( directory, lowerFilename )
return ''
def getFileInGivenPreferencesDirectory( directory, fileName ):
"Get the file from the fileName or the lowercase fileName in the given directory, if there is no file look in the gcode_scripts folder in the preferences directory."
if directory == '':
directory = os.getcwd()
fileInGivenPreferencesDirectory = getFileInGivenDirectory( directory, fileName )
if fileInGivenPreferencesDirectory != '':
return fileInGivenPreferencesDirectory
gcodeDirectoryPath = os.path.join( getPreferencesDirectoryPath(), 'gcode_scripts' )
try:
os.mkdir( gcodeDirectoryPath )
except OSError:
pass
return getFileInGivenDirectory( gcodeDirectoryPath, fileName )
def getFileTextGivenDirectoryFileName( directory, fileName ):
"Get the entire text of a file with the given file name in the given directory."
absoluteFilePath = os.path.join( directory, fileName )
return gcodec.getFileText( absoluteFilePath )
preferencesDirectory = os.path.join( os.path.abspath(os.path.dirname(sys.argv[0])), 'prefs/skeinforge' )
def getPreferencesDirectoryPath():
"Get the preferences directory path, which is the home directory joined with .skeinforge."
global preferencesDirectory
return preferencesDirectory
def setPreferencesDirectoryPath(path):
"Get the preferences directory path, which is the home directory joined with .skeinforge."
global preferencesDirectory
preferencesDirectory = path
def getPreferencesFilePath( fileName ):
"Get the preferences file path, which is the home directory joined with .skeinforge and fileName."
directoryName = getPreferencesDirectoryPath()
try:
os.mkdir( directoryName )
except OSError:
pass
return os.path.join( directoryName, fileName )
def setStartFile(fileName):
"Get the preferences directory path, which is the home directory joined with .skeinforge."
global startFile
startFile = fileName
def setEndFile(fileName):
"Get the preferences directory path, which is the home directory joined with .skeinforge."
global endFile
endFile = fileName
def readPreferences( archivablePreferences ):
"Set an archive to the preferences read from a file."
text = gcodec.getFileText( archivablePreferences.fileNamePreferences )
if text == '':
print( 'Since the preferences file:' )
print( archivablePreferences.fileNamePreferences )
print( 'does not exist, the default preferences will be written to that file.' )
text = gcodec.getFileText( os.path.join( 'defaults', os.path.basename( archivablePreferences.fileNamePreferences ) ) )
if text != '':
readPreferencesFromText( archivablePreferences, text )
writePreferences( archivablePreferences )
else:
readPreferencesFromText( archivablePreferences, text )
applyPreferencesOverrides( archivablePreferences )
preferenceOverrides = []
def addPreferenceOverride( executeTitle, prefName, value ):
preferenceOverrides.append( ( executeTitle, prefName, value ) )
def applyPreferencesOverrides( archivablePreferences ):
"Apply any overrides specified."
for (exTitle,prefname,value) in preferenceOverrides:
if exTitle == archivablePreferences.executeTitle:
applySingleOverride( archivablePreferences,prefname,value )
def applySingleOverride( archivablePreferences,prefname,value ):
"Apply an override for a single preference."
for preference in archivablePreferences.archive:
if preference.name == prefname:
preference.setValueToString(value)
def readPreferencesFromText( archivablePreferences, text ):
"Set an archive to the preferences read from a text."
lines = gcodec.getTextLines( text )
preferenceTable = {}
for preference in archivablePreferences.archive:
preference.addToPreferenceTable( preferenceTable )
for lineIndex in xrange( len( lines ) ):
setArchiveToLine( lineIndex, lines, preferenceTable )
def setArchiveToLine( lineIndex, lines, preferenceTable ):
"Set an archive to a preference line."
line = lines[ lineIndex ]
splitLine = line.split( globalSpreadsheetSeparator )
if len( splitLine ) < 2:
return
filePreferenceName = splitLine[ 0 ]
if filePreferenceName in preferenceTable:
preferenceTable[ filePreferenceName ].setValueToSplitLine( lineIndex, lines, splitLine )
def setHelpPreferencesFileNameTitleWindowPosition( displayPreferences, fileNameHelp ):
"Set the help & preferences file path, the title and the window position archiver."
DotIndex = fileNameHelp.rfind( '.' )
lastDotIndex = fileNameHelp.rfind( '.' )
lowerName = fileNameHelp[ : lastDotIndex ]
lastTruncatedDotIndex = lowerName.rfind( '.' )
lowerName = lowerName[ lastTruncatedDotIndex + 1 : ]
displayPreferences.title = lowerName.replace( '_', ' ' ).capitalize() + ' Preferences'
windowPositionName = 'windowPosition' + displayPreferences.title
displayPreferences.windowPositionBeholdPreferences = WindowPosition().getFromValue( 'windowPositionBehold Preferences', '0+0' )
displayPreferences.archive.append( displayPreferences.windowPositionBeholdPreferences )
displayPreferences.fileNamePreferences = getPreferencesFilePath( lowerName + '.csv' )
displayPreferences.fileNameHelp = fileNameHelp
def writePreferences( archivablePreferences ):
"Write the preferences to a file."
gcodec.writeFileText( archivablePreferences.fileNamePreferences, getArchiveText( archivablePreferences ) )
class AddListboxSelection:
"A class to add the selection of a listbox preference."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.entry = Tkinter.Entry( preferencesDialog.master )
self.entry.bind( '<Return>', self.addSelectionWithEvent )
self.entry.grid( row = preferencesDialog.row, column = 1, columnspan = 2, sticky = Tkinter.W )
self.addButton = Tkinter.Button( preferencesDialog.master, text = 'Add Listbox Selection', command = self.addSelection )
self.addButton.grid( row = preferencesDialog.row, column = 0 )
preferencesDialog.row += 1
def addSelection( self ):
"Add the selection of a listbox preference."
entryText = self.entry.get()
if entryText == '':
print( 'To add to the selection, enter the material name.' )
return
self.entry.delete( 0, Tkinter.END )
self.listboxPreference.listPreference.value.append( entryText )
self.listboxPreference.listPreference.value.sort()
self.listboxPreference.listbox.delete( 0, Tkinter.END )
self.listboxPreference.value = entryText
self.listboxPreference.setListboxItems()
self.listboxPreference.setToDisplay()
def addSelectionWithEvent( self, event ):
"Add the selection of a listbox preference, given an event."
self.addSelection()
def addToPreferenceTable( self, preferenceTable ):
"Do nothing because the add listbox selection is not archivable."
pass
def getFromListboxPreference( self, listboxPreference ):
"Initialize."
self.listboxPreference = listboxPreference
return self
def setToDisplay( self ):
"Do nothing because the add listbox selection is not archivable."
pass
def writeToArchiveWriter( self, archiveWriter ):
"Do nothing because the add listbox selection is not archivable."
pass
class StringPreference:
"A class to display, read & write a string."
def __init__( self ):
"Set the update function to none."
self.updateFunction = None
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.entry = Tkinter.Entry( preferencesDialog.master )
self.setStateToValue()
self.entry.grid( row = preferencesDialog.row, column = 2, columnspan = 2, sticky = Tkinter.W )
self.label = Tkinter.Label( preferencesDialog.master, text = self.name )
self.label.grid( row = preferencesDialog.row, column = 0, columnspan = 2, sticky = Tkinter.W )
preferencesDialog.row += 1
def addToPreferenceTable( self, preferenceTable ):
"Add this to the preference table."
preferenceTable[ self.name ] = self
def getFromValue( self, name, value ):
"Initialize."
self.value = value
self.name = name
return self
def setStateToValue( self ):
"Set the entry to the value."
try:
self.entry.delete( 0, Tkinter.END )
self.entry.insert( 0, self.value )
except:
pass
def setToDisplay( self ):
"Set the string to the entry field."
valueString = self.entry.get()
self.setValueToString( valueString )
def setUpdateFunction( self, updateFunction ):
"Set the update function."
self.updateFunction = updateFunction
def setValueToSplitLine( self, lineIndex, lines, splitLine ):
"Set the value to the second word of a split line."
self.setValueToString( splitLine[ 1 ] )
def setValueToString( self, valueString ):
"Set the string to the value string."
self.value = valueString
def writeToArchiveWriter( self, archiveWriter ):
"Write tab separated name and value to the archive writer."
archiveWriter.write( self.name + globalSpreadsheetSeparator + str( self.value ) + '\n' )
class BooleanPreference( StringPreference ):
"A class to display, read & write a boolean."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.checkbutton = Tkinter.Checkbutton( preferencesDialog.master, command = self.toggleCheckbox, text = self.name )
self.checkbutton.grid( row = preferencesDialog.row, columnspan = 4, sticky = Tkinter.W )
self.setStateToValue()
preferencesDialog.row += 1
def setStateToValue( self ):
"Set the checkbox to the boolean."
try:
if self.value:
self.checkbutton.select()
else:
self.checkbutton.deselect()
except:
pass
def setToDisplay( self ):
"Do nothing because toggleCheckbox is handling the value."
pass
def setValueToString( self, valueString ):
"Set the boolean to the string."
self.value = ( valueString.lower() == 'true' )
def toggleCheckbox( self ):
"Workaround for Tkinter bug, toggle the value."
self.value = not self.value
self.setStateToValue()
if self.updateFunction != None:
self.updateFunction()
class DeleteListboxSelection( AddListboxSelection ):
"A class to delete the selection of a listbox preference."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.deleteButton = Tkinter.Button( preferencesDialog.master, text = "Delete Listbox Selection", command = self.deleteSelection )
self.deleteButton.grid( row = preferencesDialog.row, column = 0 )
preferencesDialog.row += 1
def deleteSelection( self ):
"Delete the selection of a listbox preference."
self.listboxPreference.setToDisplay()
if self.listboxPreference.value not in self.listboxPreference.listPreference.value:
return
self.listboxPreference.listPreference.value.remove( self.listboxPreference.value )
self.listboxPreference.listbox.delete( 0, Tkinter.END )
self.listboxPreference.setListboxItems()
self.listboxPreference.listbox.select_set( 0 )
self.listboxPreference.setToDisplay()
class DisplayToolButton:
"A class to display the tool preferences dialog."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
withSpaces = self.name.lower().replace( '_', ' ' )
words = withSpaces.split( ' ' )
capitalizedStrings = []
for word in words:
capitalizedStrings.append( word.capitalize() )
capitalizedName = ' '.join( capitalizedStrings )
self.displayButton = Tkinter.Button( preferencesDialog.master, activebackground = 'black', activeforeground = 'violet', command = self.displayTool, text = capitalizedName )
if preferencesDialog.displayToolButtonStart:
self.displayButton.grid( row = preferencesDialog.row, column = 0 )
preferencesDialog.row += 1
preferencesDialog.displayToolButtonStart = False
else:
self.displayButton.grid( row = preferencesDialog.row - 1, column = 3 )
preferencesDialog.displayToolButtonStart = True
def addToPreferenceTable( self, preferenceTable ):
"Do nothing because the add listbox selection is not archivable."
pass
def displayTool( self ):
"Display the tool preferences dialog."
pluginModule = gcodec.getModule( self.name, self.folderName, self.moduleFilename )
if pluginModule != None:
pluginModule.main()
def getFromFolderName( self, folderName, moduleFilename, name ):
"Initialize."
self.folderName = folderName
self.moduleFilename = moduleFilename
self.name = name
return self
def getLowerName( self ):
"Get the lower case name."
return self.name.lower()
def setToDisplay( self ):
"Do nothing because the display tool button is not archivable."
pass
def writeToArchiveWriter( self, archiveWriter ):
"Do nothing because the display tool button is not archivable."
pass
class DisplayToolButtonBesidePrevious( DisplayToolButton ):
"A class to display the tool preferences dialog beside the previous preference dialog element."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
withSpaces = self.name.lower().replace( '_', ' ' )
words = withSpaces.split( ' ' )
capitalizedStrings = []
for word in words:
capitalizedStrings.append( word.capitalize() )
capitalizedName = ' '.join( capitalizedStrings )
self.displayButton = Tkinter.Button( preferencesDialog.master, text = capitalizedName, command = self.displayTool )
self.displayButton.grid( row = preferencesDialog.row - 1, column = 2, columnspan = 2 )
class Filename( StringPreference ):
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
preferencesDialog.executables.append( self )
"A class to display, read & write a fileName."
def execute( self ):
try:
import tkFileDialog
summarized = gcodec.getSummarizedFilename( self.value )
initialDirectory = os.path.dirname( summarized )
if len( initialDirectory ) > 0:
initialDirectory += os.sep
else:
initialDirectory = "."
fileName = tkFileDialog.askopenfilename( filetypes = self.getFilenameFirstTypes(), initialdir = initialDirectory, initialfile = os.path.basename( summarized ), title = self.name )
if ( str( fileName ) == '()' or str( fileName ) == '' ):
self.wasCancelled = True
else:
self.value = fileName
except:
print( 'Oops, ' + self.name + ' could not get fileName.' )
def getFromFilename( self, fileTypes, name, value ):
"Initialize."
self.getFromValue( name, value )
self.fileTypes = fileTypes
self.wasCancelled = False
return self
def getFilenameFirstTypes( self ):
"Get the file types with the file type of the fileName moved to the front of the list."
basename = os.path.basename( self.value )
splitFile = basename.split( '.' )
allReadables = []
if len( self.fileTypes ) > 1:
for fileType in self.fileTypes:
allReadable = ( ( 'All Readable', fileType[ 1 ] ) )
allReadables.append( allReadable )
if len( splitFile ) < 1:
return self.fileTypes + allReadables
baseExtension = splitFile[ - 1 ]
for fileType in self.fileTypes:
fileExtension = fileType[ 1 ].split( '.' )[ - 1 ]
if fileExtension == baseExtension:
fileNameFirstTypes = self.fileTypes[ : ]
fileNameFirstTypes.remove( fileType )
return [ fileType ] + fileNameFirstTypes + allReadables
return self.fileTypes + allReadables
def setToDisplay( self ):
"Do nothing because the file dialog is handling the value."
pass
class FloatPreference( StringPreference ):
"A class to display, read & write a float."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.entry = Tkinter.Entry( preferencesDialog.master )
self.entry.insert( 0, str( self.value ) )
self.entry.grid( row = preferencesDialog.row, column = 3, sticky = Tkinter.W )
self.label = Tkinter.Label( preferencesDialog.master, text = self.name )
self.label.grid( row = preferencesDialog.row, column = 0, columnspan = 3, sticky = Tkinter.W )
preferencesDialog.row += 1
def setUpdateFunction( self, updateFunction ):
"Set the update function."
self.entry.bind( '<Return>', updateFunction )
def setValueToString( self, valueString ):
"Set the float to the string."
try:
self.value = float( valueString )
except:
print( 'Oops, can not read float' + self.name + ' ' + valueString )
class IntPreference( FloatPreference ):
"A class to display, read & write an int."
def setValueToString( self, valueString ):
"Set the integer to the string."
dotIndex = valueString.find( '.' )
if dotIndex > - 1:
valueString = valueString[ : dotIndex ]
try:
self.value = int( valueString )
except:
print( 'Oops, can not read integer ' + self.name + ' ' + valueString )
class LabelDisplay:
"A class to add a label."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.label = Tkinter.Label( preferencesDialog.master, text = self.name )
self.label.grid( row = preferencesDialog.row, column = 0, columnspan = 2, sticky = Tkinter.W )
preferencesDialog.row += 1
def addToPreferenceTable( self, preferenceTable ):
"Do nothing because the label display is not archivable."
pass
def getFromName( self, name ):
"Initialize."
self.name = name
return self
def getName( self ):
"Get name for key sorting."
return self.name
def setToDisplay( self ):
"Do nothing because the label display is not archivable."
pass
def writeToArchiveWriter( self, archiveWriter ):
"Do nothing because the label display is not archivable."
pass
class MenuButtonDisplay:
"A class to add a menu button."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.menuButton = Tkinter.Menubutton( preferencesDialog.master, borderwidth = 5, text = self.name, relief = Tkinter.RIDGE )
self.menuButton.grid( row = preferencesDialog.row, column = 0, columnspan = 2, sticky = Tkinter.W )
self.menuButton.menu = Tkinter.Menu( self.menuButton, tearoff = 0 )
self.menuButton[ 'menu' ] = self.menuButton.menu
preferencesDialog.row += 1
def addToPreferenceTable( self, preferenceTable ):
"Do nothing because the label display is not archivable."
pass
def getFromName( self, name ):
"Initialize."
self.radioVar = None
self.name = name
return self
def getName( self ):
"Get name for key sorting."
return self.name
def setToDisplay( self ):
"Do nothing because the label display is not archivable."
pass
def writeToArchiveWriter( self, archiveWriter ):
"Do nothing because the label display is not archivable."
pass
class MenuRadio( BooleanPreference ):
"A class to display, read & write a boolean with associated menu radio button."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.menuLength = self.menuButtonDisplay.menuButton.menu.index( Tkinter.END )
if self.menuLength == None:
self.menuLength = 0
else:
self.menuLength += 1
self.menuButtonDisplay.menuButton.menu.add_radiobutton( label = self.name, value = self.menuLength, variable = self.getIntVar() )
self.setDisplayState()
def getFromMenuButtonDisplay( self, menuButtonDisplay, name, value ):
"Initialize."
self.getFromValue( name, value )
self.menuButtonDisplay = menuButtonDisplay
return self
def getIntVar( self ):
"Get the IntVar for this radio button group."
if self.menuButtonDisplay.radioVar == None:
self.menuButtonDisplay.radioVar = Tkinter.IntVar()
return self.menuButtonDisplay.radioVar
def setToDisplay( self ):
"Set the boolean to the checkbox."
self.value = ( self.getIntVar().get() == self.menuLength )
def setDisplayState( self ):
"Set the checkbox to the boolean."
if self.value:
self.getIntVar().set( self.menuLength )
self.menuButtonDisplay.menuButton.menu.invoke( self.menuLength )
class ListPreference( StringPreference ):
def addToDialog( self, preferencesDialog ):
"Do nothing because the list preference does not have a graphical interface."
pass
def setToDisplay( self ):
"Do nothing because the list preference does not have a graphical interface."
pass
def setValueToSplitLine( self, lineIndex, lines, splitLine ):
"Set the value to the second and later words of a split line."
self.value = splitLine[ 1 : ]
def setValueToString( self, valueString ):
"Do nothing because the list preference does not have a graphical interface."
pass
def writeToArchiveWriter( self, archiveWriter ):
"Write tab separated name and list to the archive writer."
archiveWriter.write( self.name + globalSpreadsheetSeparator )
for item in self.value:
archiveWriter.write( item )
if item != self.value[ - 1 ]:
archiveWriter.write( globalSpreadsheetSeparator )
archiveWriter.write( '\n' )
class ListboxPreference( StringPreference ):
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
frame = Tkinter.Frame( preferencesDialog.master )
scrollbar = Tkinter.Scrollbar( frame, orient = Tkinter.VERTICAL )
self.listbox = Tkinter.Listbox( frame, selectmode = Tkinter.SINGLE, yscrollcommand = scrollbar.set )
scrollbar.config( command = self.listbox.yview )
scrollbar.pack( side = Tkinter.RIGHT, fill = Tkinter.Y )
self.listbox.pack( side = Tkinter.LEFT, fill = Tkinter.BOTH, expand = 1 )
self.setListboxItems()
frame.grid( row = preferencesDialog.row, columnspan = 4, sticky = Tkinter.W )
preferencesDialog.row += 1
def getFromListPreference( self, listPreference, name, value ):
"Initialize."
self.getFromValue( name, value )
self.listPreference = listPreference
return self
def setListboxItems( self ):
"Set the listbox items to the list preference."
for item in self.listPreference.value:
self.listbox.insert( Tkinter.END, item )
if self.value == item:
self.listbox.select_set( Tkinter.END )
def setToDisplay( self ):
"Set the selection value to the listbox selection."
valueString = self.listbox.get( Tkinter.ACTIVE )
self.setValueToString( valueString )
class Radio( BooleanPreference ):
"A class to display, read & write a boolean with associated radio button."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
self.radiobutton = Tkinter.Radiobutton( preferencesDialog.master, command = self.clickRadio, text = self.name, value = preferencesDialog.row, variable = self.getIntVar() )
self.radiobutton.grid( row = preferencesDialog.row, column = 0, columnspan = 2, sticky = Tkinter.W )
self.setDisplayState( preferencesDialog.row )
preferencesDialog.row += 1
def clickRadio( self ):
"Workaround for Tkinter bug, set the value."
self.getIntVar().set( self.radiobutton[ 'value' ] )
def getFromRadio( self, name, radio, value ):
"Initialize."
self.getFromValue( name, value )
self.radio = radio
return self
def getIntVar( self ):
"Get the IntVar for this radio button group."
if len( self.radio ) == 0:
self.radio.append( Tkinter.IntVar() )
return self.radio[ 0 ]
def setToDisplay( self ):
"Set the boolean to the checkbox."
self.value = ( self.getIntVar().get() == self.radiobutton[ 'value' ] )
def setDisplayState( self, row ):
"Set the checkbox to the boolean."
if self.value:
self.getIntVar().set( self.radiobutton[ 'value' ] )
self.radiobutton.select()
class RadioCapitalized( Radio ):
"A class to display, read & write a boolean with associated radio button."
def addToDialog( self, preferencesDialog ):
"Add this to the dialog."
withSpaces = self.name.lower().replace( '_', ' ' )
words = withSpaces.split( ' ' )
capitalizedStrings = []
for word in words:
capitalizedStrings.append( word.capitalize() )
capitalizedName = ' '.join( capitalizedStrings )
self.radiobutton = Tkinter.Radiobutton( preferencesDialog.master, command = self.clickRadio, text = capitalizedName, value = preferencesDialog.row, variable = self.getIntVar() )
self.radiobutton.grid( row = preferencesDialog.row, column = 0, columnspan = 2, sticky = Tkinter.W )
self.setDisplayState( preferencesDialog.row )
preferencesDialog.row += 1
def getLowerName( self ):
"Get the lower case name."
return self.name.lower()
class WindowPosition( StringPreference ):
"A class to display, read & write a window position."
def addToDialog( self, preferencesDialog ):
"Set the master to later get the geometry."
self.master = preferencesDialog.master
self.windowPositionName = 'windowPosition' + preferencesDialog.displayPreferences.title
self.setToDisplay()
def setToDisplay( self ):
"Set the string to the window position."
if self.name != self.windowPositionName:
return
geometryString = self.master.geometry()
if geometryString == '1x1+0+0':
return
firstPlusIndexPlusOne = geometryString.find( '+' ) + 1
self.value = geometryString[ firstPlusIndexPlusOne : ]
def setWindowPosition( self ):
"Set the window position."
geometryString = self.master.geometry()
if geometryString == '1x1+0+0':
return
firstPlusIndexPlusOne = geometryString.find( '+' ) + 1
if self.value.count( '+' ) == 1:
geometryString = geometryString[ : firstPlusIndexPlusOne ] + self.value
self.master.geometry( geometryString )
class PreferencesDialog:
def __init__( self, displayPreferences, master ):
"Add display preferences to the dialog."
self.column = 0
self.displayPreferences = displayPreferences
self.displayToolButtonStart = True
self.executables = []
self.master = master
self.row = 0
master.title( displayPreferences.title )
frame = Tkinter.Frame( master )
for preference in displayPreferences.archive:
preference.addToDialog( self )
if self.row < 20:
Tkinter.Label( master ).grid( row = self.row )
self.row += 1
cancelColor = 'red'
cancelTitle = 'Close'
if displayPreferences.saveTitle != None:
cancelTitle = 'Cancel'
if displayPreferences.executeTitle != None:
executeButton = Tkinter.Button( master, activebackground = 'black', activeforeground = 'blue', text = displayPreferences.executeTitle, command = self.execute )
executeButton.grid( row = self.row, column = self.column )
self.column += 1
helpButton = Tkinter.Button( master, activebackground = 'black', activeforeground = 'white', text = " ? ", command = self.openBrowser )
helpButton.grid( row = self.row, column = self.column )
self.column += 1
cancelButton = Tkinter.Button( master, activebackground = 'black', activeforeground = cancelColor, command = master.destroy, fg = cancelColor, text = cancelTitle )
cancelButton.grid( row = self.row, column = self.column )
self.column += 1
if displayPreferences.saveTitle != None:
saveButton = Tkinter.Button( master, activebackground = 'black', activeforeground = 'darkgreen', command = self.savePreferencesDestroy, fg = 'darkgreen', text = displayPreferences.saveTitle )
saveButton.grid( row = self.row, column = self.column )
self.setWindowPosition()
def execute( self ):
"The execute button was clicked."
for executable in self.executables:
executable.execute()
self.savePreferences()
self.displayPreferences.execute()
self.master.destroy()
def openBrowser( self ):
"Open the browser to the help page."
numberOfLevelsDeepInPackageHierarchy = 2
packageFilePath = os.path.abspath( __file__ )
for level in xrange( numberOfLevelsDeepInPackageHierarchy + 1 ):
packageFilePath = os.path.dirname( packageFilePath )
documentationPath = os.path.join( os.path.join( packageFilePath, 'documentation' ), self.displayPreferences.fileNameHelp )
os.system( webbrowser.get().name + ' ' + documentationPath )#used this instead of webbrowser.open() to workaround webbrowser open() bug
def savePreferences( self ):
"Set the preferences to the dialog then write them."
for preference in self.displayPreferences.archive:
preference.setToDisplay()
writePreferences( self.displayPreferences )
def savePreferencesDestroy( self ):
"Set the preferences to the dialog, write them, then destroy the window."
self.savePreferences()
self.master.destroy()
def setWindowPosition( self ):
"Set the window position if that preference exists."
windowPositionName = 'windowPosition' + self.displayPreferences.title
for preference in self.displayPreferences.archive:
if isinstance( preference, WindowPosition ):
if preference.name == windowPositionName:
self.master.update_idletasks()
preference.setWindowPosition()
self.master.update_idletasks()
return
|
from MenuList import MenuList
from Tools.Directories import SCOPE_ACTIVE_SKIN, resolveFilename
from enigma import RT_HALIGN_LEFT, eListboxPythonMultiContent, gFont
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import fileExists
import skin
from os import path
def row_delta_y():
font = skin.fonts['ChoiceList']
return (int(font[2]) - int(font[1])) / 2
def ChoiceEntryComponent(key = None, text = ['--']):
res = [text]
if text[0] == '--':
x, y, w, h = skin.parameters.get('ChoicelistDash', (0, 2, 800, 25))
res.append((eListboxPythonMultiContent.TYPE_TEXT,
x,
y,
w,
h,
0,
RT_HALIGN_LEFT,
'-' * 200))
else:
x, y, w, h = skin.parameters.get('ChoicelistName', (45, 2, 800, 25))
res.append((eListboxPythonMultiContent.TYPE_TEXT,
x,
y,
w,
h,
0,
RT_HALIGN_LEFT,
text[0]))
if key:
if key == 'expandable':
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/expandable.png')
elif key == 'expanded':
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/expanded.png')
elif key == 'verticalline':
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/verticalline.png')
elif key == 'bullet':
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/bullet.png')
else:
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, 'buttons/key_%s.png' % key)
if fileExists(pngfile):
png = LoadPixmap(pngfile)
x, y, w, h = skin.parameters.get('ChoicelistIcon', (5, 0, 35, 25))
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND,
x,
y,
w,
h,
png))
return res
class ChoiceList(MenuList):
def __init__(self, list, selection = 0, enableWrapAround = False):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
font = skin.fonts.get('ChoiceList', ('Regular', 20, 30))
self.l.setFont(0, gFont(font[0], font[1]))
self.l.setItemHeight(font[2])
self.ItemHeight = font[2]
self.selection = selection
def postWidgetCreate(self, instance):
MenuList.postWidgetCreate(self, instance)
self.moveToIndex(self.selection)
self.instance.setWrapAround(True)
def getItemHeight(self):
return self.ItemHeight
|
"""
Function.py
This file is part of ANNarchy.
Copyright (C) 2013-2016 Julien Vitay <julien.vitay@gmail.com>,
Helge Uelo Dinkelbach <helge.dinkelbach@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ANNarchy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import ANNarchy.core.Global as Global
from ANNarchy.parser.Equation import transform_condition
from .ParserTemplate import parser_dict, functions_dict, user_functions
import sympy as sp
from sympy.parsing.sympy_parser import parse_expr, standard_transformations, convert_xor, auto_number
import re
class FunctionParser(object):
'''
Class to analyse one equation.
'''
def __init__(self,
name,
expression,
description,
untouched = [],
method='explicit',
type=None):
'''
Parameters:
* name : The name of the variable
* expression: The expression as a string
* variables: a list of all the variables in the neuron/synapse
* local_variables: a list of the local variables
* global_variables: a list of the global variables
* method: the numerical method to use for ODEs
* type: forces the analyser to consider the equation as: simple, cond, ODE, inc
* untouched: list of terms which should not be modified
'''
self.args = description
self.eq = expression
# Copy the default functions dictionary
self.local_dict = functions_dict.copy()
# Add the arguments to the dictionary
for arg in self.args:
self.local_dict[arg] = sp.Symbol(arg)
# Add custom constants
for obj in Global._objects['constants']:
self.local_dict[obj.name] = sp.Symbol(obj.name)
# Add other functions
self.user_functions = user_functions.copy()
for func in [func[0] for func in Global._objects['functions']]:
self.user_functions[func] = func
self.local_dict[func] = sp.Function(func)
# Possibly conditionals (up to 10 per equation... dirty!)
for i in range(10):
self.local_dict['__conditional__'+str(i)] = sp.Symbol('__conditional__'+str(i))
def parse(self, part=None):
if not part:
part = self.eq
expression = transform_condition(part)
# Check if there is a == in the condition
if '==' in expression:
# Is it the only term, or are there other operations?
if '&' in expression or '|' in expression:
expression = re.sub(r'([\w\s.]+)==([\w\s.]+)', r'Equality(\1, \2)', expression)
else:
terms = expression.split('==')
expression = 'Equality(' + terms[0] + ', ' + terms[1] + ')'
# Check if there is a != in the condition
if '!=' in expression:
# Is it the only term, or are there other operations?
if '&' in expression or '|' in expression:
expression = re.sub(r'([\w\s.]+)!=([\w\s.]+)', r'Not(Equality(\1, \2))', expression)
else:
terms = expression.split('!=')
expression = 'Not(Equality(' + terms[0] + ', ' + terms[1] + '))'
try:
eq = parse_expr(expression,
local_dict = self.local_dict,
transformations = ((auto_number, convert_xor,))
)
except:
Global._print(expression)
Global._error('The function depends on unknown variables.')
return sp.ccode(eq, precision=8,
user_functions=self.user_functions)
def dependencies(self):
"For compatibility with Equation."
return self.args
|
import errno
import logging
import os
import re
import random
import string
import sys
from avocado.core import exit_codes
from avocado.core.settings import settings
from avocado.utils.process import pid_exists
from avocado.utils.stacktrace import log_exc_info
try:
from avocado.core.plugin_interfaces import JobPre, JobPost
except ImportError:
from avocado.plugins.base import JobPre, JobPost
from ..test import VirtTest
class LockCreationError(Exception):
"""
Represents any error situation when attempting to create a lock file
"""
pass
class OtherProcessHoldsLockError(Exception):
"""
Represents a condition where other process has the lock
"""
class VTJobLock(JobPre, JobPost):
name = 'vt-joblock'
description = 'Avocado-VT Job Lock/Unlock'
def __init__(self):
self.log = logging.getLogger("avocado.app")
self.lock_dir = os.path.expanduser(settings.get_value(
section="plugins.vtjoblock",
key="dir",
key_type=str,
default='/tmp'))
self.lock_file = None
def _create_self_lock_file(self, job):
"""
Creates the lock file for this job process
:param job: the currently running job
:type job: :class:`avocado.core.job.Job`
:raises: :class:`LockCreationError`
:returns: the full path for the lock file created
:rtype: str
"""
pattern = 'avocado-vt-joblock-%(jobid)s-%(uid)s-%(random)s.pid'
# the job unique id is already random, but, let's add yet another one
rand = ''.join([random.choice(string.ascii_lowercase + string.digits)
for i in xrange(8)])
path = pattern % {'jobid': job.unique_id,
'uid': os.getuid(),
'random': rand}
path = os.path.join(self.lock_dir, path)
try:
with open(path, 'w') as lockfile:
lockfile.write("%u" % os.getpid())
return path
except Exception as e:
raise LockCreationError(e)
def _get_lock_files(self):
"""
Get the list of lock file names under the current lock dir
:returns: a list with the full path of files that match the
lockfile pattern
:rtype: list
"""
try:
files = os.listdir(self.lock_dir)
pattern = re.compile(r'avocado-vt-joblock-[0-9a-f]{40}-[0-9]+'
'-[0-9a-z]{8}\.pid')
return [os.path.join(self.lock_dir, _) for _ in files
if pattern.match(_)]
except OSError as e:
if e.errno == errno.ENOENT:
return []
def _lock(self, job):
self.lock_file = self._create_self_lock_file(job)
lock_files = self._get_lock_files()
lock_files.remove(self.lock_file)
for path in lock_files:
try:
lock_pid = int(open(path, 'r').read())
except Exception:
msg = 'Cannot read PID from "%s".' % path
raise LockCreationError(msg)
else:
if pid_exists(lock_pid):
msg = 'File "%s" acquired by PID %u. ' % (path, lock_pid)
raise OtherProcessHoldsLockError(msg)
else:
try:
os.unlink(path)
except OSError:
self.log.warn("Unable to remove stale lock: %s", path)
def pre(self, job):
try:
if any(test_factory[0] is VirtTest
for test_factory in job.test_suite):
self._lock(job)
except Exception as detail:
msg = "Failure trying to set Avocado-VT job lock: %s" % detail
self.log.error(msg)
log_exc_info(sys.exc_info(), self.log.name)
sys.exit(exit_codes.AVOCADO_JOB_FAIL | job.exitcode)
def post(self, job):
if self.lock_file is not None:
os.unlink(self.lock_file)
|
import duplicity.backend
from duplicity import globals
if (globals.cf_backend and
globals.cf_backend.lower().strip() == 'pyrax'):
from ._cf_pyrax import PyraxBackend as CFBackend
else:
from ._cf_cloudfiles import CloudFilesBackend as CFBackend
duplicity.backend.register_backend("cf+http", CFBackend)
|
import re
import random
import pytest
from cfme.configure.configuration.region_settings import Tag
from cfme.containers.provider import (ContainersProvider, ContainersTestItem,
refresh_and_navigate)
from cfme.containers.image import Image, ImageCollection
from cfme.containers.project import Project, ProjectCollection
from cfme.containers.image_registry import (ImageRegistry,
ImageRegistryCollection)
from cfme.containers.node import Node, NodeCollection
from cfme.containers.pod import Pod, PodCollection
from cfme.containers.template import Template, TemplateCollection
from cfme.containers.container import Container, ContainerCollection
from cfme.utils.log import create_sublogger
from cfme.utils.wait import wait_for
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.blockers import BZ
pytestmark = [
pytest.mark.usefixtures('setup_provider'),
pytest.mark.tier(1),
pytest.mark.provider([ContainersProvider], scope='function')
]
logger = create_sublogger("smart_management")
TEST_ITEMS = [
pytest.mark.polarion('CMP-9948')(ContainersTestItem(
Container, 'CMP-9948', collection_obj=ContainerCollection)),
pytest.mark.polarion('CMP-10320')(ContainersTestItem(
Template, 'CMP-10320', collection_obj=TemplateCollection)),
pytest.mark.polarion('CMP-9992')(ContainersTestItem(
ImageRegistry, 'CMP-9992', collection_obj=ImageRegistryCollection)),
pytest.mark.polarion('CMP-9981')(ContainersTestItem(
Image, 'CMP-9981', collection_obj=ImageCollection)),
pytest.mark.polarion('CMP-9964')(ContainersTestItem(
Node, 'CMP-9964', collection_obj=NodeCollection)),
pytest.mark.polarion('CMP-9932')(ContainersTestItem(
Pod, 'CMP-9932', collection_obj=PodCollection)),
pytest.mark.polarion('CMP-9870')(ContainersTestItem(
Project, 'CMP-9870', collection_obj=ProjectCollection)),
pytest.mark.polarion('CMP-9854')(ContainersTestItem(
ContainersProvider, 'CMP-9854', collection_obj=None))
]
def get_object_name(obj):
return obj.__module__.title().split(".")[-1]
def set_random_tag(instance):
view = navigate_to(instance, 'EditTags')
logger.debug("Setting random tag")
random_cat = random.choice(view.form.tag_category.all_options).text
view.form.tag_category.select_by_visible_text(random_cat) # In order to get the right tags list
logger.debug("Selected category {cat}".format(cat=random_cat))
random_tag = random.choice([op for op in view.form.tag_name.all_options
if "select" not in op.text.lower()]).text
logger.debug("Selected value {tag}".format(tag=random_tag))
tag = Tag(display_name=random_tag, category=random_cat)
instance.add_tag(tag, details=False)
logger.debug("Tag configuration was saved")
return tag
def wait_for_tag(obj_inst):
# Waiting for some tag to appear at "My Company Tags" and return pop'ed last tag
def is_tag():
view = refresh_and_navigate(obj_inst, 'Details')
return view.entities.smart_management.read().get('My Company Tags', [])
last_tag = wait_for(is_tag, fail_condition=[], num_sec=30, delay=5).out
logger.debug("Last tag type: {t}".format(t=type(last_tag)))
return last_tag.pop() if isinstance(last_tag, list) else last_tag
@pytest.mark.parametrize('test_item', TEST_ITEMS,
ids=[ContainersTestItem.get_pretty_id(ti) for ti in TEST_ITEMS])
@pytest.mark.meta(blockers=[BZ(1479412,
forced_streams=['5.7'],
unblock=lambda test_item: test_item.obj != Container)])
def test_smart_management_add_tag(provider, appliance, test_item):
logger.debug("Setting smart mgmt tag to {obj_type}".format(obj_type=test_item.obj.__name__))
# validate no tag set to project
obj_inst = (provider if test_item.obj is ContainersProvider
else test_item.collection_obj(appliance).get_random_instances().pop())
logger.debug('Selected object is "{obj_name}"'.format(obj_name=obj_inst.name))
regex = r"([\w\s|\-|\*]+:([\w\s|\-|\*])+)|(No.*assigned)"
try:
# Remove all previous configured tags for given object
logger.debug('Starting cleaning old tags from '
'object "{obj_name}"'.format(obj_name=obj_inst.name))
obj_inst.remove_tags(obj_inst.get_tags())
logger.debug("All smart management tags was removed successfully")
except RuntimeError:
logger.debug("Fail to remove tags, checking if no tag set")
# Validate old tags formatting
assert re.match(regex, wait_for_tag(obj_inst).text_value), (
"Tag formatting is invalid! ")
logger.debug("No tag was set, continuing to main test")
# Config random tag for object\
random_tag_set = set_random_tag(obj_inst)
logger.debug("Fetching tag info for selected object")
# validate new tag format
tag_display_text = wait_for_tag(obj_inst)
logger.debug("Tag info: {info}".format(info=tag_display_text))
assert re.match(regex, tag_display_text), "Tag formatting is invalid! "
actual_tags_on_instance = obj_inst.get_tags()
# Validate tag seted successfully
assert len(actual_tags_on_instance) == 1, "Fail to set a tag for {obj_type}".format(
obj_type=get_object_name(test_item.obj))
actual_tags_on_instance = actual_tags_on_instance.pop()
# Validate tag value
assert actual_tags_on_instance.display_name == random_tag_set.display_name, \
"Tag value not correctly configured"
|
import re
import validators
from requests.compat import urljoin
from requests.utils import dict_from_cookiejar
from sickbeard import logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size, try_int
from sickrage.helper.exceptions import ex
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class IPTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
TorrentProvider.__init__(self, "IPTorrents")
self.enable_cookies = True
self.username = None
self.password = None
self.freeleech = False
self.minseed = None
self.minleech = None
self.custom_url = None
self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max
self.urls = {'base_url': 'https://iptorrents.eu',
'login': 'https://iptorrents.eu/take_login.php',
'search': 'https://iptorrents.eu/t?%s%s&q=%s&qf=#torrents'}
self.url = self.urls['base_url']
self.categories = '73=&60='
def login(self):
cookie_dict = dict_from_cookiejar(self.session.cookies)
if cookie_dict.get('uid') and cookie_dict.get('pass'):
return True
if self.cookies:
success, status = self.add_cookies_from_ui()
if not success:
logger.log(status, logger.INFO)
return False
login_params = {'username': self.username,
'password': self.password,
'login': 'submit'}
login_url = self.urls['login']
if self.custom_url:
if not validators.url(self.custom_url):
logger.log("Invalid custom url: {0}".format(self.custom_url), logger.WARNING)
return False
login_url = urljoin(self.custom_url, self.urls['login'].split(self.url)[1])
self.get_url(login_url, returns='text')
response = self.get_url(login_url, post_data=login_params, returns='text')
if not response:
logger.log(u"Unable to connect to provider", logger.WARNING)
return False
# Invalid username and password combination
if re.search('Invalid username and password combination', response):
logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
return False
# You tried too often, please try again after 2 hours!
if re.search('You tried too often', response):
logger.log(u"You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours", logger.WARNING)
return False
# Captcha!
if re.search('Captcha verification failed.', response):
logger.log(u"Stupid captcha", logger.WARNING)
return False
return True
def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
results = []
if not self.login():
return results
freeleech = '&free=on' if self.freeleech else ''
for mode in search_params:
items = []
logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_params[mode]:
if mode != 'RSS':
logger.log(u"Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
search_url = self.urls['search'] % (self.categories, freeleech, search_string)
search_url += ';o=seeders' if mode != 'RSS' else ''
if self.custom_url:
if not validators.url(self.custom_url):
logger.log("Invalid custom url: {0}".format(self.custom_url), logger.WARNING)
return results
search_url = urljoin(self.custom_url, search_url.split(self.url)[1])
data = self.get_url(search_url, returns='text')
if not data:
continue
try:
data = re.sub(r'(?im)<button.+?</button>', '', data, 0)
with BS4Parser(data, 'html5lib') as html:
if not html:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if html.find(text='No Torrents Found!'):
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
torrent_table = html.find('table', id='torrents')
torrents = torrent_table('tr') if torrent_table else []
# Continue only if one Release is found
if len(torrents) < 2:
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
for result in torrents[1:]:
try:
title = result('td')[1].find('a').text
download_url = urljoin(search_url, result('td')[3].find('a')['href'])
seeders = int(result.find('td', class_='ac t_seeders').text)
leechers = int(result.find('td', class_='ac t_leechers').text)
torrent_size = result('td')[5].text
size = convert_size(torrent_size) or -1
except (AttributeError, TypeError, KeyError):
continue
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG)
items.append(item)
except Exception as e:
logger.log(u"Failed parsing provider. Error: {0!r}".format(ex(e)), logger.ERROR)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
provider = IPTorrentsProvider()
|
from __future__ import unicode_literals
import frappe
from frappe import _
install_docs = [
{"doctype":"Role", "role_name":"Stock Manager", "name":"Stock Manager"},
{"doctype":"Role", "role_name":"Item Manager", "name":"Item Manager"},
{"doctype":"Role", "role_name":"Stock User", "name":"Stock User"},
{"doctype":"Role", "role_name":"Quality Manager", "name":"Quality Manager"},
{"doctype":"Item Group", "item_group_name":"All Item Groups", "is_group": 1},
{"doctype":"Item Group", "item_group_name":"Default",
"parent_item_group":"All Item Groups", "is_group": 0},
]
def get_warehouse_account_map(company=None):
if not frappe.flags.warehouse_account_map or frappe.flags.in_test:
warehouse_account = frappe._dict()
filters = {}
if company:
filters['company'] = company
for d in frappe.get_all('Warehouse',
fields = ["name", "account", "parent_warehouse", "company", "is_group"],
filters = filters,
order_by="lft, rgt"):
if not d.account:
d.account = get_warehouse_account(d, warehouse_account)
if d.account:
d.account_currency = frappe.db.get_value('Account', d.account, 'account_currency', cache=True)
warehouse_account.setdefault(d.name, d)
frappe.flags.warehouse_account_map = warehouse_account
return frappe.flags.warehouse_account_map
def get_warehouse_account(warehouse, warehouse_account=None):
account = warehouse.account
if not account and warehouse.parent_warehouse:
if warehouse_account:
if warehouse_account.get(warehouse.parent_warehouse):
account = warehouse_account.get(warehouse.parent_warehouse).account
else:
from frappe.utils.nestedset import rebuild_tree
rebuild_tree("Warehouse", "parent_warehouse")
else:
account = frappe.db.sql("""
select
account from `tabWarehouse`
where
lft <= %s and rgt >= %s and company = %s
and account is not null and ifnull(account, '') !=''
order by lft desc limit 1""", (warehouse.lft, warehouse.rgt, warehouse.company), as_list=1)
account = account[0][0] if account else None
if not account and warehouse.company:
account = get_company_default_inventory_account(warehouse.company)
if not account and warehouse.company:
frappe.throw(_("Please set Account in Warehouse {0} or Default Inventory Account in Company {1}")
.format(warehouse.name, warehouse.company))
return account
def get_company_default_inventory_account(company):
return frappe.get_cached_value('Company', company, 'default_inventory_account')
|
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import getdate, nowtime
from erpnext.healthcare.doctype.patient_appointment.test_patient_appointment import create_patient
from erpnext.healthcare.doctype.lab_test.lab_test import create_multiple
from erpnext.healthcare.doctype.healthcare_settings.healthcare_settings import get_receivable_account, get_income_account
from erpnext.healthcare.doctype.patient_medical_record.test_patient_medical_record import create_lab_test_template as create_blood_test_template
class TestLabTest(unittest.TestCase):
def test_lab_test_item(self):
lab_template = create_lab_test_template()
self.assertTrue(frappe.db.exists('Item', lab_template.item))
self.assertEqual(frappe.db.get_value('Item Price', {'item_code':lab_template.item}, 'price_list_rate'), lab_template.lab_test_rate)
lab_template.disabled = 1
lab_template.save()
self.assertEquals(frappe.db.get_value('Item', lab_template.item, 'disabled'), 1)
lab_template.reload()
lab_template.disabled = 0
lab_template.save()
def test_descriptive_lab_test(self):
lab_template = create_lab_test_template()
# blank result value not allowed as per template
lab_test = create_lab_test(lab_template)
lab_test.descriptive_test_items[0].result_value = 12
lab_test.descriptive_test_items[2].result_value = 1
lab_test.save()
self.assertRaises(frappe.ValidationError, lab_test.submit)
def test_sample_collection(self):
frappe.db.set_value('Healthcare Settings', 'Healthcare Settings', 'create_sample_collection_for_lab_test', 1)
lab_template = create_lab_test_template()
lab_test = create_lab_test(lab_template)
lab_test.descriptive_test_items[0].result_value = 12
lab_test.descriptive_test_items[1].result_value = 1
lab_test.descriptive_test_items[2].result_value = 2.3
lab_test.save()
# check sample collection created
self.assertTrue(frappe.db.exists('Sample Collection', {'sample': lab_template.sample}))
frappe.db.set_value('Healthcare Settings', 'Healthcare Settings', 'create_sample_collection_for_lab_test', 0)
lab_test = create_lab_test(lab_template)
lab_test.descriptive_test_items[0].result_value = 12
lab_test.descriptive_test_items[1].result_value = 1
lab_test.descriptive_test_items[2].result_value = 2.3
lab_test.save()
# sample collection should not be created
lab_test.reload()
self.assertEquals(lab_test.sample, None)
def test_create_lab_tests_from_sales_invoice(self):
sales_invoice = create_sales_invoice()
create_multiple('Sales Invoice', sales_invoice.name)
sales_invoice.reload()
self.assertIsNotNone(sales_invoice.items[0].reference_dn)
self.assertIsNotNone(sales_invoice.items[1].reference_dn)
def test_create_lab_tests_from_patient_encounter(self):
patient_encounter = create_patient_encounter()
create_multiple('Patient Encounter', patient_encounter.name)
patient_encounter.reload()
self.assertTrue(patient_encounter.lab_test_prescription[0].lab_test_created)
self.assertTrue(patient_encounter.lab_test_prescription[0].lab_test_created)
def create_lab_test_template(test_sensitivity=0, sample_collection=1):
medical_department = create_medical_department()
if frappe.db.exists('Lab Test Template', 'Insulin Resistance'):
return frappe.get_doc('Lab Test Template', 'Insulin Resistance')
template = frappe.new_doc('Lab Test Template')
template.lab_test_name = 'Insulin Resistance'
template.lab_test_template_type = 'Descriptive'
template.lab_test_code = 'Insulin Resistance'
template.lab_test_group = 'Services'
template.department = medical_department
template.is_billable = 1
template.lab_test_description = 'Insulin Resistance'
template.lab_test_rate = 2000
for entry in ['FBS', 'Insulin', 'IR']:
template.append('descriptive_test_templates', {
'particulars': entry,
'allow_blank': 1 if entry=='IR' else 0
})
if test_sensitivity:
template.sensitivity = 1
if sample_collection:
template.sample = create_lab_test_sample()
template.sample_qty = 5.0
template.save()
return template
def create_medical_department():
medical_department = frappe.db.exists('Medical Department', '_Test Medical Department')
if not medical_department:
medical_department = frappe.new_doc('Medical Department')
medical_department.department = '_Test Medical Department'
medical_department.save()
medical_department = medical_department.name
return medical_department
def create_lab_test(lab_template):
patient = create_patient()
lab_test = frappe.new_doc('Lab Test')
lab_test.template = lab_template.name
lab_test.patient = patient
lab_test.patient_sex = 'Female'
lab_test.save()
return lab_test
def create_lab_test_sample():
blood_sample = frappe.db.exists('Lab Test Sample', 'Blood Sample')
if blood_sample:
return blood_sample
sample = frappe.new_doc('Lab Test Sample')
sample.sample = 'Blood Sample'
sample.sample_uom = 'U/ml'
sample.save()
return sample.name
def create_sales_invoice():
patient = create_patient()
medical_department = create_medical_department()
insulin_resistance_template = create_lab_test_template()
blood_test_template = create_blood_test_template(medical_department)
sales_invoice = frappe.new_doc('Sales Invoice')
sales_invoice.patient = patient
sales_invoice.customer = frappe.db.get_value('Patient', patient, 'customer')
sales_invoice.due_date = getdate()
sales_invoice.company = '_Test Company'
sales_invoice.debit_to = get_receivable_account('_Test Company')
tests = [insulin_resistance_template, blood_test_template]
for entry in tests:
sales_invoice.append('items', {
'item_code': entry.item,
'item_name': entry.lab_test_name,
'description': entry.lab_test_description,
'qty': 1,
'uom': 'Nos',
'conversion_factor': 1,
'income_account': get_income_account(None, '_Test Company'),
'rate': entry.lab_test_rate,
'amount': entry.lab_test_rate
})
sales_invoice.set_missing_values()
sales_invoice.submit()
return sales_invoice
def create_patient_encounter():
patient = create_patient()
medical_department = create_medical_department()
insulin_resistance_template = create_lab_test_template()
blood_test_template = create_blood_test_template(medical_department)
patient_encounter = frappe.new_doc('Patient Encounter')
patient_encounter.patient = patient
patient_encounter.practitioner = create_practitioner()
patient_encounter.encounter_date = getdate()
patient_encounter.encounter_time = nowtime()
tests = [insulin_resistance_template, blood_test_template]
for entry in tests:
patient_encounter.append('lab_test_prescription', {
'lab_test_code': entry.item,
'lab_test_name': entry.lab_test_name
})
patient_encounter.submit()
return patient_encounter
def create_practitioner():
practitioner = frappe.db.exists('Healthcare Practitioner', '_Test Healthcare Practitioner')
if not practitioner:
practitioner = frappe.new_doc('Healthcare Practitioner')
practitioner.first_name = '_Test Healthcare Practitioner'
practitioner.gender = 'Female'
practitioner.op_consulting_charge = 500
practitioner.inpatient_visit_charge = 500
practitioner.save(ignore_permissions=True)
practitioner = practitioner.name
return practitioner
|
import grok
from app import Bioport
from interfaces import IBioport
from interfaces import IEnglishRequest
try:
from zope.i18n.interfaces import IUserPreferredLanguages # after python 2.6 upgrade
except ImportError:
from zope.app.publisher.browser import IUserPreferredLanguages # before python 2.6 upgrade
from zope.component import adapts
from zope.interface import alsoProvides
from zope.interface import implements
from zope.traversing.browser.absoluteurl import AbsoluteURL
from zope.traversing.browser.interfaces import IAbsoluteURL
class BioportTraverser(grok.Traverser):
"This traverser ensures that an english version of the site is available at /en"
grok.context(Bioport)
def traverse(self, name):
if name == 'en':
preferred_languages = IUserPreferredLanguages(self.request)
preferred_languages.setPreferredLanguages(['en'])
alsoProvides(self.request, IEnglishRequest)
return self.context
def language_switch(object, event):
"This is registered in zcml as a pre-traversal hook on Bioport"
context = object
request = event.request
preferred_languages = IUserPreferredLanguages(request)
preferred_languages.setPreferredLanguages(['nl'])
class EnglishAbsoluteURL(AbsoluteURL):
"This ensures that english requests will provide english urls"
adapts(IBioport, IEnglishRequest)
implements(IAbsoluteURL)
def __call__(self):
return super(EnglishAbsoluteURL, self).__call__() + '/en'
|
from . import Boolean
from . import Column
from . import GenericTable
from . import String
from . import Text
from . import Integer
from . import DateTime
from pyfaf.storage.jsontype import JSONType
import json
class PeriodicTask(GenericTable):
__tablename__ = "periodictasks"
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
task = Column(String(100), nullable=False)
enabled = Column(Boolean, nullable=False, default=True)
crontab_minute = Column(String(20), nullable=False, default="*")
crontab_hour = Column(String(20), nullable=False, default="*")
crontab_day_of_week = Column(String(20), nullable=False, default="*")
crontab_day_of_month = Column(String(20), nullable=False, default="*")
crontab_month_of_year = Column(String(20), nullable=False, default="*")
last_run_at = Column(DateTime, nullable=True)
args = Column(JSONType, nullable=False, default=[])
kwargs = Column(JSONType, nullable=False, default={})
@property
def is_run_action(self):
return self.task == "pyfaf.celery_tasks.run_action"
@property
def args_parsed(self):
return self._foo
@property
def nice_name(self):
return self.name
@property
def nice_task(self):
if self.is_run_action and len(self.args) > 0:
return "Action {0}".format(self.args[0])
return self.task
class TaskResult(GenericTable):
__tablename__ = "taskresult"
id = Column(String(50), primary_key=True)
task = Column(String(100), nullable=False)
finished_time = Column(DateTime, nullable=True)
state = Column(String(20), nullable=False)
retval = Column(Text, nullable=False, default="")
args = Column(JSONType, nullable=False, default=[])
kwargs = Column(JSONType, nullable=False, default={})
@property
def is_run_action(self):
return self.task == "pyfaf.celery_tasks.run_action"
@property
def nice_task(self):
if self.is_run_action and len(self.args) > 0:
return "Action {0}".format(self.args[0])
return self.task
@property
def nice_args(self):
return json.dumps(self.args, indent=4)
|
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: aos_blueprint
author: jeremy@apstra.com (@jeremyschulman)
version_added: "2.3"
short_description: Manage AOS blueprint instance
description:
- Apstra AOS Blueprint module let you manage your Blueprint easily. You can create
create and delete Blueprint by Name or ID. You can also use it to retrieve
all data from a blueprint. This module is idempotent
and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
name:
description:
- Name of the Blueprint to manage.
Only one of I(name) or I(id) can be set.
id:
description:
- AOS Id of the IP Pool to manage (can't be used to create a new IP Pool).
Only one of I(name) or I(id) can be set.
state:
description:
- Indicate what is the expected state of the Blueprint.
choices: ['present', 'absent', 'build-ready']
default: present
timeout:
description:
- When I(state=build-ready), this timeout identifies timeout in seconds to wait before
declaring a failure.
default: 5
template:
description:
- When creating a blueprint, this value identifies, by name, an existing engineering
design template within the AOS-server.
reference_arch:
description:
- When creating a blueprint, this value identifies a known AOS reference
architecture value. I(Refer to AOS-server documentation for available values).
'''
EXAMPLES = '''
- name: Creating blueprint
aos_blueprint:
session: "{{aos_session}}"
name: "my-blueprint"
design_template: "my-template"
reference_arch: two_stage_l3clos
state: present
- name: Access a blueprint and get content
aos_blueprint:
session: "{{aos_session}}"
name: "{{ blueprint_name }}"
design_template: "{{ blueprint_template }}"
state: present
register: bp
- name: Delete a blueprint
aos_blueprint:
session: "{{aos_session}}"
name: "my-blueprint"
state: absent
- name: Await blueprint build-ready, and obtain contents
aos_blueprint:
session: "{{aos_session}}"
name: "{{ blueprint_name }}"
state: build-ready
register: bp
'''
RETURNS = '''
name:
description: Name of the Blueprint
returned: always
type: str
sample: My-Blueprint
id:
description: AOS unique ID assigned to the Blueprint
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Information about the Blueprint
returned: always
type: dict
sample: {'...'}
contents:
description: Blueprint contents data-dictionary
returned: always
type: dict
sample: { ... }
build_errors:
description: When state='build-ready', and build errors exist, this contains list of errors
returned: only when build-ready returns fail
type: list
sample: [{...}, {...}]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.aos import get_aos_session, check_aos_version, find_collection_item
from ansible.module_utils.pycompat24 import get_exception
def create_blueprint(module, aos, name):
margs = module.params
try:
template_id = aos.DesignTemplates[margs['template']].id
# Create a new Object based on the name
blueprint = aos.Blueprints[name]
blueprint.create(template_id, reference_arch=margs['reference_arch'])
except:
exc = get_exception()
if 'UNPROCESSABLE ENTITY' in exc.message:
msg = 'likely missing dependencies'
else:
msg = exc.message
module.fail_json(msg="Unable to create blueprint: %s" % exc.message)
return blueprint
def ensure_absent(module, aos, blueprint):
if blueprint.exists is False:
module.exit_json(changed=False)
else:
if not module.check_mode:
try:
blueprint.delete()
except:
exc = get_exception()
module.fail_json(msg='Unable to delete blueprint, %s' % exc.message)
module.exit_json(changed=True,
id=blueprint.id,
name=blueprint.name)
def ensure_present(module, aos, blueprint):
margs = module.params
if blueprint.exists:
module.exit_json(changed=False,
id=blueprint.id,
name=blueprint.name,
value=blueprint.value,
contents=blueprint.contents)
else:
# Check if template is defined and is valid
if margs['template'] is None:
module.fail_json(msg="You must define a 'template' name to create a new blueprint, currently missing")
elif aos.DesignTemplates.find(label=margs['template']) is None:
module.fail_json(msg="You must define a Valid 'template' name to create a new blueprint, %s is not valid" % margs['template'])
# Check if reference_arch
if margs['reference_arch'] is None:
module.fail_json(msg="You must define a 'reference_arch' to create a new blueprint, currently missing")
if not module.check_mode:
blueprint = create_blueprint(module, aos, margs['name'])
module.exit_json(changed=True,
id=blueprint.id,
name=blueprint.name,
value=blueprint.value,
contents=blueprint.contents)
else:
module.exit_json(changed=True,
name=margs['name'])
def ensure_build_ready(module, aos, blueprint):
margs = module.params
if not blueprint.exists:
module.fail_json(msg='blueprint %s does not exist' % blueprint.name)
if blueprint.await_build_ready(timeout=margs['timeout']*1000):
module.exit_json(contents=blueprint.contents)
else:
module.fail_json(msg='blueprint %s has build errors',
build_erros=blueprint.build_errors)
def aos_blueprint(module):
margs = module.params
try:
aos = get_aos_session(module, margs['session'])
except:
module.fail_json(msg="Unable to login to the AOS server")
item_name = False
item_id = False
if margs['name'] is not None:
item_name = margs['name']
elif margs['id'] is not None:
item_id = margs['id']
#----------------------------------------------------
# Find Object if available based on ID or Name
#----------------------------------------------------
try:
my_blueprint = find_collection_item(aos.Blueprints,
item_name=item_name,
item_id=item_id)
except:
module.fail_json(msg="Unable to find the Blueprint based on name or ID, something went wrong")
#----------------------------------------------------
# Proceed based on State value
#----------------------------------------------------
if margs['state'] == 'absent':
ensure_absent(module, aos, my_blueprint)
elif margs['state'] == 'present':
ensure_present(module, aos, my_blueprint)
elif margs['state'] == 'build-ready':
ensure_build_ready(module, aos, my_blueprint)
def main():
module = AnsibleModule(
argument_spec=dict(
session=dict(required=True, type="dict"),
name=dict(required=False),
id=dict(required=False ),
state=dict(choices=[
'present', 'absent', 'build-ready'],
default='present'),
timeout=dict(type="int", default=5),
template=dict(required=False),
reference_arch=dict(required=False)
),
mutually_exclusive = [('name', 'id')],
required_one_of=[('name', 'id')],
supports_check_mode=True
)
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.0')
aos_blueprint(module)
if __name__ == '__main__':
main()
|
"""
Module implementing a model for user agent management.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import Qt, QModelIndex, QAbstractTableModel
class UserAgentModel(QAbstractTableModel):
"""
Class implementing a model for user agent management.
"""
def __init__(self, manager, parent=None):
"""
Constructor
@param manager reference to the user agent manager (UserAgentManager)
@param parent reference to the parent object (QObject)
"""
super(UserAgentModel, self).__init__(parent)
self.__manager = manager
self.__manager.changed.connect(self.__userAgentsChanged)
self.__headers = [
self.tr("Host"),
self.tr("User Agent String"),
]
def __userAgentsChanged(self):
"""
Private slot handling a change of the registered user agent strings.
"""
self.beginResetModel()
self.endResetModel()
def removeRows(self, row, count, parent=QModelIndex()):
"""
Public method to remove entries from the model.
@param row start row (integer)
@param count number of rows to remove (integer)
@param parent parent index (QModelIndex)
@return flag indicating success (boolean)
"""
if parent.isValid():
return False
if count <= 0:
return False
lastRow = row + count - 1
self.beginRemoveRows(parent, row, lastRow)
hostsList = self.__manager.allHostNames()
for index in range(row, lastRow + 1):
self.__manager.removeUserAgent(hostsList[index])
# removeEngine emits changed()
#self.endRemoveRows()
return True
def rowCount(self, parent=QModelIndex()):
"""
Public method to get the number of rows of the model.
@param parent parent index (QModelIndex)
@return number of rows (integer)
"""
if parent.isValid():
return 0
else:
return self.__manager.hostsCount()
def columnCount(self, parent=QModelIndex()):
"""
Public method to get the number of columns of the model.
@param parent parent index (QModelIndex)
@return number of columns (integer)
"""
return len(self.__headers)
def data(self, index, role):
"""
Public method to get data from the model.
@param index index to get data for (QModelIndex)
@param role role of the data to retrieve (integer)
@return requested data
"""
if index.row() >= self.__manager.hostsCount() or index.row() < 0:
return None
host = self.__manager.allHostNames()[index.row()]
userAgent = self.__manager.userAgent(host)
if userAgent is None:
return None
if role == Qt.DisplayRole:
if index.column() == 0:
return host
elif index.column() == 1:
return userAgent
return None
def headerData(self, section, orientation, role=Qt.DisplayRole):
"""
Public method to get the header data.
@param section section number (integer)
@param orientation header orientation (Qt.Orientation)
@param role data role (integer)
@return header data
"""
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
try:
return self.__headers[section]
except IndexError:
pass
return None
|
'''
Loads features from a test GFF3 file and uses them to test the coordinate comparison
functions within the biothings API.
Author: Joshua Orvis
'''
import argparse
import os
import biocodegff
import biothings
def main():
bin_dir = os.path.abspath(os.path.dirname(__file__))
test_gff_file = bin_dir + '/biothings_coordinate_comparisons.data'
(assemblies, features) = biocodegff.get_gff3_features( test_gff_file )
if features['TP03_0010'] < features['TP03_0012.t01_polypeptide']:
print("INFO: < positive check successful")
else:
print("ERROR: < check unsuccessful")
if features['TP03_0012'] < features['TP03_0012.t01_polypeptide']:
print("ERROR: < check unsuccessful")
else:
print("INFO: < negative check successful")
if features['TP03_0012'] > features['TP03_0010']:
print("INFO: > positive check successful")
else:
print("ERROR: > check unsuccessful")
if features['TP03_0010'] > features['TP03_0012.t01_polypeptide']:
print("ERROR: > check unsuccessful")
else:
print("INFO: > negative check successful")
if features['TP03_0012.t01_exon-auto15079'] <= features['TP03_0012.t01_polypeptide']:
print("INFO: <= positive check successful")
else:
print("ERROR: <= check unsuccessful")
if features['TP03_0010'] <= features['TP03_0012']:
print("ERROR: <= check unsuccessful")
else:
print("INFO: <= negative check successful")
if features['TP03_0012.t01_exon-auto15085'] >= features['TP03_0012.t01_polypeptide']:
print("INFO: >= positive check successful")
else:
print("ERROR: >= check unsuccessful")
if features['TP03_0010'] >= features['TP03_0012']:
print("ERROR: >= check unsuccessful")
else:
print("INFO: >= negative check successful")
if features['TP03_0012.t01_exon-auto15079'].overlaps_with(features['TP03_0012.t01_polypeptide']):
print("INFO: overlaps_with() positive check successful")
else:
print("ERROR: overlaps_with() positive check unsuccessful")
if features['TP03_0002'].overlaps_with(features['TP03_0010']):
print("ERROR: overlaps_with() negative check unsuccessful")
else:
print("INFO: overlaps_with() negative check successful")
overlap_size = features['TP03_0012.t01_polypeptide'].overlap_size_with(features['TP03_0012.t01_CDS-auto15079'])
if overlap_size == 224:
print("INFO: overlap_size_with() positive check successful")
else:
print("ERROR: overlap_size_with() positive check unsuccessful (overlap returned: {0})".format(overlap_size))
if features['TP03_0012.t01_polypeptide'].overlap_size_with(features['TP03_0012.t01_CDS-auto15085']) == 224:
print("INFO: overlap_size_with() negative check unsuccessful")
else:
print("ERROR: overlap_size_with() negative check successful")
if __name__ == '__main__':
main()
|
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'
}
DOCUMENTATION = '''
---
module: bigip_configsync_action
short_description: Perform different actions related to config-sync.
description:
- Allows one to run different config-sync actions. These actions allow
you to manually sync your configuration across multiple BIG-IPs when
those devices are in an HA pair.
version_added: "2.4"
options:
device_group:
description:
- The device group that you want to perform config-sync actions on.
required: True
sync_device_to_group:
description:
- Specifies that the system synchronizes configuration data from this
device to other members of the device group. In this case, the device
will do a "push" to all the other devices in the group. This option
is mutually exclusive with the C(sync_group_to_device) option.
choices:
- yes
- no
sync_most_recent_to_device:
description:
- Specifies that the system synchronizes configuration data from the
device with the most recent configuration. In this case, the device
will do a "pull" from the most recently updated device. This option
is mutually exclusive with the C(sync_device_to_group) options.
choices:
- yes
- no
overwrite_config:
description:
- Indicates that the sync operation overwrites the configuration on
the target.
default: no
choices:
- yes
- no
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
- Requires the objectpath Python package on the host. This is as easy as pip
install objectpath.
requirements:
- f5-sdk >= 2.2.3
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Sync configuration from device to group
bigip_configsync_actions:
device_group: "foo-group"
sync_device_to_group: yes
server: "lb01.mydomain.com"
user: "admin"
password: "secret"
validate_certs: no
delegate_to: localhost
- name: Sync configuration from most recent device to the current host
bigip_configsync_actions:
device_group: "foo-group"
sync_most_recent_to_device: yes
server: "lb01.mydomain.com"
user: "admin"
password: "secret"
validate_certs: no
delegate_to: localhost
- name: Perform an initial sync of a device to a new device group
bigip_configsync_actions:
device_group: "new-device-group"
sync_device_to_group: yes
server: "lb01.mydomain.com"
user: "admin"
password: "secret"
validate_certs: no
delegate_to: localhost
'''
RETURN = '''
'''
import time
import re
try:
from objectpath import Tree
HAS_OBJPATH = True
except ImportError:
HAS_OBJPATH = False
from ansible.module_utils.basic import BOOLEANS_TRUE
from ansible.module_utils.f5_utils import AnsibleF5Client
from ansible.module_utils.f5_utils import AnsibleF5Parameters
from ansible.module_utils.f5_utils import HAS_F5SDK
from ansible.module_utils.f5_utils import F5ModuleError
try:
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_attributes = []
returnables = []
@property
def direction(self):
if self.sync_device_to_group:
return 'to-group'
else:
return 'from-group'
@property
def sync_device_to_group(self):
result = self._cast_to_bool(self._values['sync_device_to_group'])
return result
@property
def sync_group_to_device(self):
result = self._cast_to_bool(self._values['sync_group_to_device'])
return result
@property
def force_full_push(self):
if self.overwrite_config:
return 'force-full-load-push'
else:
return ''
@property
def overwrite_config(self):
result = self._cast_to_bool(self._values['overwrite_config'])
return result
def _cast_to_bool(self, value):
if value is None:
return None
elif value in BOOLEANS_TRUE:
return True
else:
return False
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.want = Parameters(self.client.module.params)
def exec_module(self):
result = dict()
try:
changed = self.present()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
result.update(dict(changed=changed))
return result
def present(self):
if not self._device_group_exists():
raise F5ModuleError(
"The specified 'device_group' not not exist."
)
if self._sync_to_group_required():
raise F5ModuleError(
"This device group needs an initial sync. Please use "
"'sync_device_to_group'"
)
if self.exists():
return False
else:
return self.execute()
def _sync_to_group_required(self):
resource = self.read_current_from_device()
status = self._get_status_from_resource(resource)
if status == 'Awaiting Initial Sync' and self.want.sync_group_to_device:
return True
return False
def _device_group_exists(self):
result = self.client.api.tm.cm.device_groups.device_group.exists(
name=self.want.device_group
)
return result
def execute(self):
self.execute_on_device()
self._wait_for_sync()
return True
def exists(self):
resource = self.read_current_from_device()
status = self._get_status_from_resource(resource)
if status == 'In Sync':
return True
else:
return False
def execute_on_device(self):
sync_cmd = 'config-sync {0} {1} {2}'.format(
self.want.direction,
self.want.device_group,
self.want.force_full_push
)
self.client.api.tm.cm.exec_cmd(
'run',
utilCmdArgs=sync_cmd
)
def _wait_for_sync(self):
# Wait no more than half an hour
resource = self.read_current_from_device()
for x in range(1, 180):
time.sleep(3)
status = self._get_status_from_resource(resource)
# Changes Pending:
# The existing device has changes made to it that
# need to be sync'd to the group.
#
# Awaiting Initial Sync:
# This is a new device group and has not had any sync
# done yet. You _must_ `sync_device_to_group` in this
# case.
#
# Not All Devices Synced:
# A device group will go into this state immediately
# after starting the sync and stay until all devices finish.
#
if status in ['Changes Pending']:
details = self._get_details_from_resource(resource)
self._validate_pending_status(details)
pass
elif status in ['Awaiting Initial Sync', 'Not All Devices Synced']:
pass
elif status == 'In Sync':
return
else:
raise F5ModuleError(status)
def read_current_from_device(self):
result = self.client.api.tm.cm.sync_status.load()
return result
def _get_status_from_resource(self, resource):
resource.refresh()
entries = resource.entries.copy()
k, v = entries.popitem()
status = v['nestedStats']['entries']['status']['description']
return status
def _get_details_from_resource(self, resource):
resource.refresh()
stats = resource.entries.copy()
tree = Tree(stats)
details = list(tree.execute('$..*["details"]["description"]'))
result = details[::-1]
return result
def _validate_pending_status(self, details):
"""Validate the content of a pending sync operation
This is a hack. The REST API is not consistent with its 'status' values
so this method is here to check the returned strings from the operation
and see if it reported any of these inconsistencies.
:param details:
:raises F5ModuleError:
"""
pattern1 = r'.*(?P<msg>Recommended\s+action.*)'
for detail in details:
matches = re.search(pattern1, detail)
if matches:
raise F5ModuleError(matches.group('msg'))
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
sync_device_to_group=dict(
type='bool'
),
sync_most_recent_to_device=dict(
type='bool'
),
overwrite_config=dict(
type='bool',
default='no'
),
device_group=dict(
required=True
)
)
self.f5_product_name = 'bigip'
self.required_one_of = [
['sync_device_to_group', 'sync_most_recent_to_device']
]
self.mutually_exclusive = [
['sync_device_to_group', 'sync_most_recent_to_device']
]
self.required_one_of = [
['sync_device_to_group', 'sync_most_recent_to_device']
]
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
if not HAS_OBJPATH:
raise F5ModuleError("The python objectpath module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
mutually_exclusive=spec.mutually_exclusive,
required_one_of=spec.required_one_of,
f5_product_name=spec.f5_product_name
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
"""
================================================================================
Base Processor
================================================================================
| This is a template that is used to add additional processors to
| the package.
| Written By: Matthew Stadelman
| Date Written: 2016/02/26
| Last Modifed: 2016/10/25
"""
import os
from .. import _get_logger
logger = _get_logger(__name__)
class BaseProcessor(object):
r"""
Only required parameter is a data field object, initializes properties
defined by subclassses.
"""
def __init__(self, field):
# initializing properties
self.action = 'base'
self.args = {}
self.infile = field.infile
self.data_vector = None
self.data_map = None
self.outfile_name = os.path.basename(self.infile) if self.infile else ''
self.outfile_content = None
self.output_key = None
self.processed_data = None
# copying field data
field.copy_data(self)
@classmethod
def _add_subparser(cls, subparser):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
msg = 'This method must be implemented by a specific '
msg += 'data processing class'
raise NotImplementedError(msg)
def setup(self, **kwargs):
r"""
Sets or resets arguments
"""
self.args.update(kwargs)
def process(self, **kwargs):
r"""
Calls the subclassed routine process_data to create outfile content
"""
if not self.args:
msg = 'No arguments have been set, use setup(**kwargs) method'
logger.error(msg)
return
#
self._process_data(**kwargs)
def _process_data(self, **kwargs):
r"""
Not implemented
"""
msg = 'This method must be implemented by a specific '
msg += 'data processing class'
raise NotImplementedError(msg)
def gen_output(self, **kwargs):
r"""
Calls the subclassed routine output_data to create outfile content
"""
if not self.processed_data:
msg = 'No data has been processed. Run process() method'
logger.error(msg)
return
#
self._output_data(**kwargs)
def _output_data(self, filename=None, delim=','):
r"""
Not implemented
"""
msg = 'This method must be implemented by a specific '
msg += 'data processing class'
raise NotImplementedError(msg)
def copy_processed_data(self, data_dict, alt_key=False):
r"""
Copys the current processed data array to a dict object using a
key defined in the subclass initialization or a key supplied by the
alt_key keyword.
"""
if not self.processed_data:
msg = 'No data has been processed. Run process() method'
logger.error(msg)
return
#
key = alt_key if alt_key else self.output_key
data_dict[key] = self.processed_data
def print_data(self):
r"""
Writes the data processor's data the screen
"""
if (not self.outfile_content):
msg = 'No output content. Run gen_output() method'
logger.error(msg)
return
#
print(self.outfile_content)
print('')
def write_data(self, path=os.path.realpath(os.curdir)):
r"""
Writes the data processor's data to its outfile
"""
if (not self.outfile_content):
msg = 'No output content. Run gen_output() method'
logger.error(msg)
return
#
filename = os.path.join(path, self.outfile_name)
with open(filename, 'w') as f:
f.write(self.outfile_content)
#
logger.info('Output saved as: ' + filename)
|
"""Unit tests for :mod:`gwpy.cli.coherence`
"""
from ... import cli
from .base import _TestCliProduct
from .test_spectrum import TestCliSpectrum as _TestCliSpectrum
class TestCliCoherence(_TestCliSpectrum):
TEST_CLASS = cli.Coherence
ACTION = 'coherence'
TEST_ARGS = _TestCliProduct.TEST_ARGS + [
'--chan', 'Y1:TEST-CHANNEL', '--secpfft', '0.25',
]
def test_init(self, prod):
assert prod.chan_list == ['X1:TEST-CHANNEL', 'Y1:TEST-CHANNEL']
assert prod.ref_chan == prod.chan_list[0]
def test_get_suptitle(self, prod):
assert prod.get_suptitle() == 'Coherence: {0}'.format(
prod.chan_list[0])
|
import time
from atn import core_utils
node_name = core_utils.get_node_name()
session_id = core_utils.get_session_id()
t = time.time()
print "NEM ID (1): %d (%f s)" % (core_utils.get_nem_id(), time.time() - t)
t = time.time()
print "NEM ID (2): %d (%f s)" % (core_utils.get_nem_id(node_name=node_name), time.time() - t)
t = time.time()
print "NEM ID (3): %d (%f s)" % (core_utils.get_nem_id(session_id=session_id), time.time() - t)
t = time.time()
print "NEM ID (4): %d (%f s)" % (core_utils.get_nem_id(node_name=node_name, session_id=session_id), time.time() - t)
t = time.time()
print "NODE ID (1): %d (%f s)" % (core_utils.get_node_number(), time.time() - t)
t = time.time()
print "NODE ID (2): %d (%f s)" % (core_utils.get_node_number(node_name=node_name), time.time() - t)
t = time.time()
print "NODE ID (3): %d (%f s)" % (core_utils.get_node_number(session_id=session_id), time.time() - t)
t = time.time()
print "NODE ID (4): %d (%f s)" % (core_utils.get_node_number(node_name=node_name, session_id=session_id), time.time() - t)
t = time.time()
x, y = core_utils.get_xy()
print "NODE XY (1): (%f, %f) (%f s)" % (x, y, time.time() - t)
t = time.time()
x, y = core_utils.get_xy(node_name=node_name)
print "NODE XY (2): (%f, %f) (%f s)" % (x, y, time.time() - t)
t = time.time()
x, y = core_utils.get_xy(session_id=session_id)
print "NODE XY (3): (%f, %f) (%f s)" % (x, y, time.time() - t)
t = time.time()
x, y = core_utils.get_xy(node_name=node_name, session_id=session_id)
print "NODE XY (4): (%f, %f) (%f s)" % (x, y, time.time() - t)
|
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_model
from haystack.backends import BaseEngine, BaseSearchBackend, BaseSearchQuery, log_query, EmptyResults
from haystack.constants import ID, DJANGO_CT, DJANGO_ID
from haystack.exceptions import MissingDependency, MoreLikeThisError
from haystack.inputs import PythonData, Clean, Exact
from haystack.models import SearchResult
from haystack.utils import get_identifier
from haystack.utils import log as logging
try:
from pysolr import Solr, SolrError
except ImportError:
raise MissingDependency("The 'solr' backend requires the installation of 'pysolr'. Please refer to the documentation.")
class SolrSearchBackend(BaseSearchBackend):
# Word reserved by Solr for special use.
RESERVED_WORDS = (
'AND',
'NOT',
'OR',
'TO',
)
# Characters reserved by Solr for special use.
# The '\\' must come first, so as not to overwrite the other slash replacements.
RESERVED_CHARACTERS = (
'\\', '+', '-', '&&', '||', '!', '(', ')', '{', '}',
'[', ']', '^', '"', '~', '*', '?', ':',
)
def __init__(self, connection_alias, **connection_options):
super(SolrSearchBackend, self).__init__(connection_alias, **connection_options)
if not 'URL' in connection_options:
raise ImproperlyConfigured("You must specify a 'URL' in your settings for connection '%s'." % connection_alias)
self.conn = Solr(connection_options['URL'], timeout=self.timeout)
self.log = logging.getLogger('haystack')
def update(self, index, iterable, commit=True):
docs = []
for obj in iterable:
try:
docs.append(index.full_prepare(obj))
except UnicodeDecodeError:
if not self.silently_fail:
raise
# We'll log the object identifier but won't include the actual object
# to avoid the possibility of that generating encoding errors while
# processing the log message:
self.log.error(u"UnicodeDecodeError while preparing object for update", exc_info=True, extra={
"data": {
"index": index,
"object": get_identifier(obj)
}
})
if len(docs) > 0:
try:
self.conn.add(docs, commit=commit, boost=index.get_field_weights())
except (IOError, SolrError), e:
if not self.silently_fail:
raise
self.log.error("Failed to add documents to Solr: %s", e)
def remove(self, obj_or_string, commit=True):
solr_id = get_identifier(obj_or_string)
try:
kwargs = {
'commit': commit,
ID: solr_id
}
self.conn.delete(**kwargs)
except (IOError, SolrError), e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Solr: %s", solr_id, e)
def clear(self, models=[], commit=True):
try:
if not models:
# *:* matches all docs in Solr
self.conn.delete(q='*:*', commit=commit)
else:
models_to_delete = []
for model in models:
models_to_delete.append("%s:%s.%s" % (DJANGO_CT, model._meta.app_label, model._meta.module_name))
self.conn.delete(q=" OR ".join(models_to_delete), commit=commit)
# Run an optimize post-clear. http://wiki.apache.org/solr/FAQ#head-9aafb5d8dff5308e8ea4fcf4b71f19f029c4bb99
self.conn.optimize()
except (IOError, SolrError), e:
if not self.silently_fail:
raise
if len(models):
self.log.error("Failed to clear Solr index of models '%s': %s", ','.join(models_to_delete), e)
else:
self.log.error("Failed to clear Solr index: %s", e)
@log_query
def search(self, query_string, **kwargs):
if len(query_string) == 0:
return {
'results': [],
'hits': 0,
}
search_kwargs = self.build_search_kwargs(query_string, **kwargs)
try:
raw_results = self.conn.search(query_string, **search_kwargs)
except (IOError, SolrError), e:
if not self.silently_fail:
raise
self.log.error("Failed to query Solr using '%s': %s", query_string, e)
raw_results = EmptyResults()
return self._process_results(raw_results, highlight=kwargs.get('highlight'), result_class=kwargs.get('result_class', SearchResult), distance_point=kwargs.get('distance_point'))
def build_search_kwargs(self, query_string, sort_by=None, start_offset=0, end_offset=None,
fields='', highlight=False, facets=None,
date_facets=None, query_facets=None,
narrow_queries=None, spelling_query=None,
within=None, dwithin=None, distance_point=None,
models=None, limit_to_registered_models=None,
result_class=None):
kwargs = {'fl': '* score'}
if fields:
if isinstance(fields, (list, set)):
fields = " ".join(fields)
kwargs['fl'] = fields
if sort_by is not None:
if sort_by in ['distance asc', 'distance desc'] and distance_point:
# Do the geo-enabled sort.
lng, lat = distance_point['point'].get_coords()
kwargs['sfield'] = distance_point['field']
kwargs['pt'] = '%s,%s' % (lat, lng)
if sort_by == 'distance asc':
kwargs['sort'] = 'geodist() asc'
else:
kwargs['sort'] = 'geodist() desc'
else:
if sort_by.startswith('distance '):
warnings.warn("In order to sort by distance, you must call the '.distance(...)' method.")
# Regular sorting.
kwargs['sort'] = sort_by
if start_offset is not None:
kwargs['start'] = start_offset
if end_offset is not None:
kwargs['rows'] = end_offset - start_offset
if highlight is True:
kwargs['hl'] = 'true'
kwargs['hl.fragsize'] = '200'
if self.include_spelling is True:
kwargs['spellcheck'] = 'true'
kwargs['spellcheck.collate'] = 'true'
kwargs['spellcheck.count'] = 1
if spelling_query:
kwargs['spellcheck.q'] = spelling_query
if facets is not None:
kwargs['facet'] = 'on'
kwargs['facet.field'] = facets
if date_facets is not None:
kwargs['facet'] = 'on'
kwargs['facet.date'] = date_facets.keys()
kwargs['facet.date.other'] = 'none'
for key, value in date_facets.items():
kwargs["f.%s.facet.date.start" % key] = self.conn._from_python(value.get('start_date'))
kwargs["f.%s.facet.date.end" % key] = self.conn._from_python(value.get('end_date'))
gap_by_string = value.get('gap_by').upper()
gap_string = "%d%s" % (value.get('gap_amount'), gap_by_string)
if value.get('gap_amount') != 1:
gap_string += "S"
kwargs["f.%s.facet.date.gap" % key] = '+%s/%s' % (gap_string, gap_by_string)
if query_facets is not None:
kwargs['facet'] = 'on'
kwargs['facet.query'] = ["%s:%s" % (field, value) for field, value in query_facets]
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if models and len(models):
model_choices = sorted(['%s.%s' % (model._meta.app_label, model._meta.module_name) for model in models])
elif limit_to_registered_models:
# Using narrow queries, limit the results to only models handled
# with the current routers.
model_choices = self.build_models_list()
else:
model_choices = []
if len(model_choices) > 0:
if narrow_queries is None:
narrow_queries = set()
narrow_queries.add('%s:(%s)' % (DJANGO_CT, ' OR '.join(model_choices)))
if narrow_queries is not None:
kwargs['fq'] = list(narrow_queries)
if within is not None:
from haystack.utils.geo import generate_bounding_box
kwargs.setdefault('fq', [])
((min_lat, min_lng), (max_lat, max_lng)) = generate_bounding_box(within['point_1'], within['point_2'])
# Bounding boxes are min, min TO max, max. Solr's wiki was *NOT*
# very clear on this.
bbox = '%s:[%s,%s TO %s,%s]' % (within['field'], min_lat, min_lng, max_lat, max_lng)
kwargs['fq'].append(bbox)
if dwithin is not None:
kwargs.setdefault('fq', [])
lng, lat = dwithin['point'].get_coords()
geofilt = '{!geofilt pt=%s,%s sfield=%s d=%s}' % (lat, lng, dwithin['field'], dwithin['distance'].km)
kwargs['fq'].append(geofilt)
# Check to see if the backend should try to include distances
# (Solr 4.X+) in the results.
if self.distance_available and distance_point:
# In early testing, you can't just hand Solr 4.X a proper bounding box
# & request distances. To enable native distance would take calculating
# a center point & a radius off the user-provided box, which kinda
# sucks. We'll avoid it for now, since Solr 4.x's release will be some
# time yet.
# kwargs['fl'] += ' _dist_:geodist()'
pass
return kwargs
def more_like_this(self, model_instance, additional_query_string=None,
start_offset=0, end_offset=None, models=None,
limit_to_registered_models=None, result_class=None, **kwargs):
from haystack import connections
# Deferred models will have a different class ("RealClass_Deferred_fieldname")
# which won't be in our registry:
model_klass = model_instance._meta.concrete_model
index = connections[self.connection_alias].get_unified_index().get_index(model_klass)
field_name = index.get_content_field()
params = {
'fl': '*,score',
}
if start_offset is not None:
params['start'] = start_offset
if end_offset is not None:
params['rows'] = end_offset
narrow_queries = set()
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if models and len(models):
model_choices = sorted(['%s.%s' % (model._meta.app_label, model._meta.module_name) for model in models])
elif limit_to_registered_models:
# Using narrow queries, limit the results to only models handled
# with the current routers.
model_choices = self.build_models_list()
else:
model_choices = []
if len(model_choices) > 0:
if narrow_queries is None:
narrow_queries = set()
narrow_queries.add('%s:(%s)' % (DJANGO_CT, ' OR '.join(model_choices)))
if additional_query_string:
narrow_queries.add(additional_query_string)
if narrow_queries:
params['fq'] = list(narrow_queries)
query = "%s:%s" % (ID, get_identifier(model_instance))
try:
raw_results = self.conn.more_like_this(query, field_name, **params)
except (IOError, SolrError), e:
if not self.silently_fail:
raise
self.log.error("Failed to fetch More Like This from Solr for document '%s': %s", query, e)
raw_results = EmptyResults()
return self._process_results(raw_results, result_class=result_class)
def _process_results(self, raw_results, highlight=False, result_class=None, distance_point=None):
from haystack import connections
results = []
hits = raw_results.hits
facets = {}
spelling_suggestion = None
if result_class is None:
result_class = SearchResult
if hasattr(raw_results, 'facets'):
facets = {
'fields': raw_results.facets.get('facet_fields', {}),
'dates': raw_results.facets.get('facet_dates', {}),
'queries': raw_results.facets.get('facet_queries', {}),
}
for key in ['fields']:
for facet_field in facets[key]:
# Convert to a two-tuple, as Solr's json format returns a list of
# pairs.
facets[key][facet_field] = zip(facets[key][facet_field][::2], facets[key][facet_field][1::2])
if self.include_spelling is True:
if hasattr(raw_results, 'spellcheck'):
if len(raw_results.spellcheck.get('suggestions', [])):
# For some reason, it's an array of pairs. Pull off the
# collated result from the end.
spelling_suggestion = raw_results.spellcheck.get('suggestions')[-1]
unified_index = connections[self.connection_alias].get_unified_index()
indexed_models = unified_index.get_indexed_models()
for raw_result in raw_results.docs:
app_label, model_name = raw_result[DJANGO_CT].split('.')
additional_fields = {}
model = get_model(app_label, model_name)
if model and model in indexed_models:
for key, value in raw_result.items():
index = unified_index.get_index(model)
string_key = str(key)
if string_key in index.fields and hasattr(index.fields[string_key], 'convert'):
additional_fields[string_key] = index.fields[string_key].convert(value)
else:
additional_fields[string_key] = self.conn._to_python(value)
del(additional_fields[DJANGO_CT])
del(additional_fields[DJANGO_ID])
del(additional_fields['score'])
if raw_result[ID] in getattr(raw_results, 'highlighting', {}):
additional_fields['highlighted'] = raw_results.highlighting[raw_result[ID]]
if distance_point:
additional_fields['_point_of_origin'] = distance_point
if raw_result.get('__dist__'):
from haystack.utils.geo import Distance
additional_fields['_distance'] = Distance(km=float(raw_result['__dist__']))
else:
additional_fields['_distance'] = None
result = result_class(app_label, model_name, raw_result[DJANGO_ID], raw_result['score'], **additional_fields)
results.append(result)
else:
hits -= 1
return {
'results': results,
'hits': hits,
'facets': facets,
'spelling_suggestion': spelling_suggestion,
}
def build_schema(self, fields):
content_field_name = ''
schema_fields = []
for field_name, field_class in fields.items():
field_data = {
'field_name': field_class.index_fieldname,
'type': 'text_en',
'indexed': 'true',
'stored': 'true',
'multi_valued': 'false',
}
if field_class.document is True:
content_field_name = field_class.index_fieldname
# DRL_FIXME: Perhaps move to something where, if none of these
# checks succeed, call a custom method on the form that
# returns, per-backend, the right type of storage?
if field_class.field_type in ['date', 'datetime']:
field_data['type'] = 'date'
elif field_class.field_type == 'integer':
field_data['type'] = 'long'
elif field_class.field_type == 'float':
field_data['type'] = 'float'
elif field_class.field_type == 'boolean':
field_data['type'] = 'boolean'
elif field_class.field_type == 'ngram':
field_data['type'] = 'ngram'
elif field_class.field_type == 'edge_ngram':
field_data['type'] = 'edge_ngram'
elif field_class.field_type == 'location':
field_data['type'] = 'location'
if field_class.is_multivalued:
field_data['multi_valued'] = 'true'
if field_class.stored is False:
field_data['stored'] = 'false'
# Do this last to override `text` fields.
if field_class.indexed is False:
field_data['indexed'] = 'false'
# If it's text and not being indexed, we probably don't want
# to do the normal lowercase/tokenize/stemming/etc. dance.
if field_data['type'] == 'text_en':
field_data['type'] = 'string'
# If it's a ``FacetField``, make sure we don't postprocess it.
if hasattr(field_class, 'facet_for'):
# If it's text, it ought to be a string.
if field_data['type'] == 'text_en':
field_data['type'] = 'string'
schema_fields.append(field_data)
return (content_field_name, schema_fields)
def extract_file_contents(self, file_obj):
"""Extract text and metadata from a structured file (PDF, MS Word, etc.)
Uses the Solr ExtractingRequestHandler, which is based on Apache Tika.
See the Solr wiki for details:
http://wiki.apache.org/solr/ExtractingRequestHandler
Due to the way the ExtractingRequestHandler is implemented it completely
replaces the normal Haystack indexing process with several unfortunate
restrictions: only one file per request, the extracted data is added to
the index with no ability to modify it, etc. To simplify the process and
allow for more advanced use we'll run using the extract-only mode to
return the extracted data without adding it to the index so we can then
use it within Haystack's normal templating process.
Returns None if metadata cannot be extracted; otherwise returns a
dictionary containing at least two keys:
:contents:
Extracted full-text content, if applicable
:metadata:
key:value pairs of text strings
"""
try:
return self.conn.extract(file_obj)
except StandardError, e:
self.log.warning(u"Unable to extract file contents: %s", e,
exc_info=True, extra={"data": {"file": file_obj}})
return None
class SolrSearchQuery(BaseSearchQuery):
def matching_all_fragment(self):
return '*:*'
def add_spatial(self, lat, lon, sfield, distance, filter='bbox'):
"""Adds spatial query parameters to search query"""
kwargs = {
'lat': lat,
'long': long,
'sfield': sfield,
'distance': distance,
}
self.spatial_query.update(kwargs)
def add_order_by_distance(self, lat, long, sfield):
"""Orders the search result by distance from point."""
kwargs = {
'lat': lat,
'long': long,
'sfield': sfield,
}
self.order_by_distance.update(kwargs)
def build_query_fragment(self, field, filter_type, value):
from haystack import connections
query_frag = ''
if not hasattr(value, 'input_type_name'):
# Handle when we've got a ``ValuesListQuerySet``...
if hasattr(value, 'values_list'):
value = list(value)
if isinstance(value, basestring):
# It's not an ``InputType``. Assume ``Clean``.
value = Clean(value)
else:
value = PythonData(value)
# Prepare the query using the InputType.
prepared_value = value.prepare(self)
if not isinstance(prepared_value, (set, list, tuple)):
# Then convert whatever we get back to what pysolr wants if needed.
prepared_value = self.backend.conn._from_python(prepared_value)
# 'content' is a special reserved word, much like 'pk' in
# Django's ORM layer. It indicates 'no special field'.
if field == 'content':
index_fieldname = ''
else:
index_fieldname = u'%s:' % connections[self._using].get_unified_index().get_index_fieldname(field)
filter_types = {
'contains': u'%s',
'startswith': u'%s*',
'exact': u'%s',
'gt': u'{%s TO *}',
'gte': u'[%s TO *]',
'lt': u'{* TO %s}',
'lte': u'[* TO %s]',
}
if value.post_process is False:
query_frag = prepared_value
else:
if filter_type in ['contains', 'startswith']:
if value.input_type_name == 'exact':
query_frag = prepared_value
else:
# Iterate over terms & incorportate the converted form of each into the query.
terms = []
for possible_value in prepared_value.split(' '):
terms.append(filter_types[filter_type] % self.backend.conn._from_python(possible_value))
if len(terms) == 1:
query_frag = terms[0]
else:
query_frag = u"(%s)" % " AND ".join(terms)
elif filter_type == 'in':
in_options = []
for possible_value in prepared_value:
in_options.append(u'"%s"' % self.backend.conn._from_python(possible_value))
query_frag = u"(%s)" % " OR ".join(in_options)
elif filter_type == 'range':
start = self.backend.conn._from_python(prepared_value[0])
end = self.backend.conn._from_python(prepared_value[1])
query_frag = u'["%s" TO "%s"]' % (start, end)
elif filter_type == 'exact':
if value.input_type_name == 'exact':
query_frag = prepared_value
else:
prepared_value = Exact(prepared_value).prepare(self)
query_frag = filter_types[filter_type] % prepared_value
else:
if value.input_type_name != 'exact':
prepared_value = Exact(prepared_value).prepare(self)
query_frag = filter_types[filter_type] % prepared_value
if len(query_frag) and not query_frag.startswith('(') and not query_frag.endswith(')'):
query_frag = "(%s)" % query_frag
return u"%s%s" % (index_fieldname, query_frag)
def build_alt_parser_query(self, parser_name, query_string='', **kwargs):
if query_string:
kwargs['v'] = query_string
kwarg_bits = []
for key in sorted(kwargs.keys()):
if isinstance(kwargs[key], basestring) and ' ' in kwargs[key]:
kwarg_bits.append(u"%s='%s'" % (key, kwargs[key]))
else:
kwarg_bits.append(u"%s=%s" % (key, kwargs[key]))
return u"{!%s %s}" % (parser_name, ' '.join(kwarg_bits))
def build_params(self, spelling_query=None, **kwargs):
search_kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class
}
order_by_list = None
if self.order_by:
if order_by_list is None:
order_by_list = []
for order_by in self.order_by:
if order_by.startswith('-'):
order_by_list.append('%s desc' % order_by[1:])
else:
order_by_list.append('%s asc' % order_by)
search_kwargs['sort_by'] = ", ".join(order_by_list)
if self.date_facets:
search_kwargs['date_facets'] = self.date_facets
if self.distance_point:
search_kwargs['distance_point'] = self.distance_point
if self.dwithin:
search_kwargs['dwithin'] = self.dwithin
if self.end_offset is not None:
search_kwargs['end_offset'] = self.end_offset
if self.facets:
search_kwargs['facets'] = list(self.facets)
if self.fields:
search_kwargs['fields'] = self.fields
if self.highlight:
search_kwargs['highlight'] = self.highlight
if self.models:
search_kwargs['models'] = self.models
if self.narrow_queries:
search_kwargs['narrow_queries'] = self.narrow_queries
if self.query_facets:
search_kwargs['query_facets'] = self.query_facets
if self.within:
search_kwargs['within'] = self.within
if spelling_query:
search_kwargs['spelling_query'] = spelling_query
return search_kwargs
def run(self, spelling_query=None, **kwargs):
"""Builds and executes the query. Returns a list of search results."""
final_query = self.build_query()
search_kwargs = self.build_params(spelling_query, **kwargs)
results = self.backend.search(final_query, **search_kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
self._facet_counts = self.post_process_facets(results)
self._spelling_suggestion = results.get('spelling_suggestion', None)
def run_mlt(self, **kwargs):
"""Builds and executes the query. Returns a list of search results."""
if self._more_like_this is False or self._mlt_instance is None:
raise MoreLikeThisError("No instance was provided to determine 'More Like This' results.")
additional_query_string = self.build_query()
search_kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class,
}
if self.end_offset is not None:
search_kwargs['end_offset'] = self.end_offset - self.start_offset
results = self.backend.more_like_this(self._mlt_instance, additional_query_string, **search_kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
class SolrEngine(BaseEngine):
backend = SolrSearchBackend
query = SolrSearchQuery
|
from pychess.System import conf
from pychess.Utils.const import FAN_PIECES, BLACK, WHITE
__label__ = _("Html Diagram")
__ending__ = "html"
__append__ = False
SIZE = 40
BORDER_SIZE = SIZE // 4 + SIZE // 8
FILL = SIZE - BORDER_SIZE
FONT_SIZE = SIZE - 4
style = """
.chessboard {
width: %spx;
height: %spx;
font-family: "DejaVu Serif", "DejaVu", serif;
line-height: %spx;
}
.black {
float: left;
width: %spx;
height: %spx;
background-color: #999;
font-size:%spx;
text-align:center;
display: table-cell;
vertical-align:middle;
}
.white {
float: left;
width: %spx;
height: %spx;
background-color: #fff;
font-size:%spx;
text-align:center;
display: table-cell;
vertical-align:middle;
}
.fill-top {
float: left;
width: %spx;
height: %spx;
color: #ffff;
display: table-cell;
}
.top-corner {
float: left;
width: %spx;
height: %spx;
background-color: #333;
display: table-cell;
}
.top {
float: left;
width: %spx;
height: %spx;
background-color: #333;
display: table-cell;
}
.fill-side {
float: left;
width: %spx;
height: %spx;
color: #ffff;
display: table-cell;
}
.side {
float: left;
width: %spx;
height: %spx;
color: #ffff;
background-color: #333;
font-size: %spx;
text-align:center;
display: table-cell;
vertical-align:middle;
}
.bottom-corner {
float: left;
width: %spx;
height: %spx;
background-color: #333;
display: table-cell;
}
.bottom {
float: left;
width: %spx;
height: %spx;
color: #ffff;
background-color: #333;
font-size: %spx;
line-height: %spx;
text-align: center;
display: table-cell;
}
""" % (
SIZE * 10, SIZE * 10, SIZE, # chessboard
SIZE, SIZE, FONT_SIZE, # black
SIZE, SIZE, FONT_SIZE, # white
FILL, BORDER_SIZE, # fill-top
BORDER_SIZE, BORDER_SIZE, # top-corner
SIZE, BORDER_SIZE, # top
FILL, SIZE, # fill-side
BORDER_SIZE, SIZE, BORDER_SIZE, # side
BORDER_SIZE, BORDER_SIZE, # bottom-corner
SIZE, BORDER_SIZE, BORDER_SIZE, BORDER_SIZE # bottom
)
def save(file, model, position=None, flip=False):
"""Export the current position into a .html file using html+css"""
print("<html><head><meta http-equiv='Content-Type' content='text/html;charset=UTF-8'>", file=file)
print("<style type='text/css'>%s" % style, file=file)
print("</style></head><body><div class='chessboard'>", file=file)
show_cords = conf.get("showCords")
cords_side = "12345678" if flip else "87654321"
cords_bottom = "HGFEDCBA" if flip else "ABCDEFGH"
data = model.boards[position].data[:]
board = ""
# header
if show_cords:
board += "<div class='fill-top'></div>"
board += "<div class='top-corner'></div>"
for cord in range(8):
board += "<div class='top'></div>"
board += "<div class='top-corner'></div>"
board += "<div class='fill-top'></div>"
for j, row in enumerate(data if flip else reversed(data)):
if show_cords:
board += "<div class='fill-side'></div>"
board += "<div class='side'>%s</div>" % cords_side[j]
for i in range(8):
if j % 2 == 0:
color = "white" if i % 2 == 0 else "black"
else:
color = "white" if i % 2 == 1 else "black"
piece = row.get(i)
if piece is not None:
if piece.color == BLACK:
piece_fan = FAN_PIECES[BLACK][piece.piece]
else:
piece_fan = FAN_PIECES[WHITE][piece.piece]
board += "<div class='%s'>%s</div>" % (color, piece_fan)
else:
board += "<div class='%s'></div>" % color
if show_cords:
board += "<div class='side'></div>"
board += "<div class='fill-side'></div>"
board += "\n"
if show_cords:
board += "<div class='fill-top'></div>"
board += "<div class='bottom-corner'></div>"
for cord in cords_bottom:
board += "<div class='bottom'>%s</div>" % cord
board += "<div class='bottom-corner'></div>"
board += "<div class='fill-top'></div>"
print(board, file=file)
print("</div></body></html>", file=file)
file.close()
if __name__ == "__main__":
from pychess.Utils.GameModel import GameModel
model = GameModel()
with open("/home/tamas/board.html", "w") as fi:
save(fi, model, position=0, flip=True)
|
import csv,sys,os
project_dir= os.path.dirname(os.path.abspath(__file__))+'/gde'
sys.path.append(project_dir)
os.environ['DJANGO_SETTINGS_MODULE']='settings'
import django
django.setup()
from app.models import Setor, Campus, Conarq, GrupoConarq, ClassificaArquivosIfes
data = csv.reader(open("setor.csv"),delimiter=",")
for row in data:
if row[0] != 'ID_UNIDADE':
campus = Campus()
setor = Setor()
setor.id_unidade_responsavel = row[0]
setor.id_unidade = row[1]
campus.nome = row[4]
setor.sigla = row[3]
setor.nome = row[3] + " - " + row[5]
if Campus.objects.filter(nome=campus.nome).exists():
campus = Campus.objects.get(nome=campus.nome)
setor.campus = campus
setor.save()
else:
campus.save()
campus = Campus.objects.get(nome=campus.nome)
setor.campus = campus
setor.save()
data = csv.reader(open("codigos.csv"),delimiter=",")
for row in data:
if row[0] != 'Classe Geral':
conarq = Conarq()
grupoConarq = GrupoConarq()
classificaArquivosIfes = ClassificaArquivosIfes()
conarq.codigo = row[1]
conarq.assunto = row[3]
conarq.faseCorrente = row[4]
conarq.faseIntermediaria = row[5]
conarq.destinacaoFinal = row[6]
conarq.observacoes = row[7]
grupoConarq.codigo = row[0]
grupoConarq.nome = row[8]
classificaArquivosIfes.codigo = row[2]
if GrupoConarq.objects.filter(codigo=grupoConarq.codigo).exists():
conarq.codGrupo = GrupoConarq.objects.get(codigo=grupoConarq.codigo)
conarq.save()
classificaArquivosIfes.conarq = conarq
else:
grupoConarq.save()
conarq.codGrupo = grupoConarq
conarq.save()
classificaArquivosIfes.conarq = conarq
classificaArquivosIfes.save()
|
class testcls1:
def __init__(self):
self.__private = 1
|
import os
from decimal import Decimal
from sqlalchemy.util import KeyedTuple
from tests.command.exporter import load
from tests.testcase import DbTestCase
from tests.mock.sources import MockSource
from dbmanagr.command import exporter
from dbmanagr.exception import UnknownTableException, UnknownColumnException, \
UnknownConnectionException
from dbmanagr.utils import mute_stderr
from dbmanagr.dto.mapper import to_dto
from dbmanagr.model.row import Row
def test_exporter():
os.environ['UNITTEST'] = 'True'
for test in load():
yield test,
del os.environ['UNITTEST']
class DifferTestCase(DbTestCase):
def test_yaml_value(self):
"""Tests the exporter.writer.yaml_value function"""
DbTestCase.connection.close()
DbTestCase.connection = MockSource().list()[0]
DbTestCase.connection.connect()
con = DbTestCase.connection
user = con.table('user2')
user_dto = to_dto(user)
self.assertEqual(
u'!!null null',
exporter.writer.yaml_value(
to_dto(user.column('id')), user_dto, None))
self.assertEqual(
u'!!float 3.141500',
exporter.writer.yaml_value(
to_dto(user.column('score')), user_dto, 3.141500))
self.assertEqual(
u'Yes',
exporter.writer.yaml_value(
to_dto(user.column('deleted')), user_dto, True))
self.assertEqual(
u'!!str Yes',
exporter.writer.yaml_value(
to_dto(user.column('url')), user_dto, 'Yes'))
self.assertEqual(
u'!!int 3',
exporter.writer.yaml_value(
to_dto(user.column('score')), user_dto, Decimal(3.0)))
self.assertEqual(
u'!!float 3.100000',
exporter.writer.yaml_value(
to_dto(user.column('score')), user_dto, Decimal(3.1)))
def test_unknown_connection(self):
"""Tests unknown connection"""
self.assertRaises(
UnknownConnectionException,
exporter.run,
['unknown'])
def test_unknown_table(self):
"""Tests unknown tables"""
self.assertRaises(
Exception,
exporter.run,
['dbmanagr.sqlite'])
self.assertRaises(
UnknownTableException,
exporter.run,
['dbmanagr.sqlite/unknown?'])
def test_unknown_column(self):
"""Tests unknown columns"""
self.assertRaises(
UnknownColumnException,
exporter.run,
['dbmanagr.sqlite/user2?', '-i', 'unknown'])
self.assertRaises(
UnknownColumnException,
exporter.run,
['dbmanagr.sqlite/user2?', '-x', 'unknown'])
def test_foreign_keys(self):
"""Tests foreign keys"""
pass
def test_writer(self):
"""Tests the writer"""
import sys
sys.argv = ['']
self.assertRaises(
SystemExit,
mute_stderr(exporter.main))
self.assertEqual(
0,
exporter.main(['dbmanagr.sqlite/user?id=1']))
def test_execute(self):
"""Tests the exporter.execute function"""
self.assertRaises(
SystemExit,
mute_stderr(exporter.execute),
[]
)
def test_row_item(self):
"""Tests the RowItem class"""
row = Row(None, KeyedTuple(
[
'', -1, '', '', None, None, '',
'', '', '', -1, -1],
labels=[
'database_name', 'pid', 'username', 'client',
'transaction_start', 'query_start', 'state', 'query',
'blocked', 'blocked_by', 'transaction_duration',
'query_duration']
))
item = exporter.RowItem(row, None, None, None)
self.assertEqual(item, item)
self.assertEqual(
item,
exporter.RowItem.from_json({
'row': row,
'include': None,
'exclude': None,
'substitutes': None
})
)
|
import numpy as np
from ..random import RandomArbitraryPdf
np.random.seed(12324)
def test_return_right_pdf():
x = np.arange(10.)
f = x ** 2.
rand = RandomArbitraryPdf(x, f)
draws = rand(10000)
draws.sort()
# there should be very few small numbers
assert draws[5000] > 5
# but a few of them
assert draws[2] <= 3
# check range
assert draws[0] >=0
assert draws[-1] < 10.
def test_unequal_bins():
'''Almost all results should be from the 1-100 range because we are talking
probability DENSITY.'''
rand = RandomArbitraryPdf(np.array([0,1,100]), np.ones(3))
draws = rand(10000)
draws.sort()
assert draws[1000] > 1
|
from import_relative import *
Mbase_subcmd = import_relative('base_subcmd', '..')
class ShowAutoEval(Mbase_subcmd.DebuggerShowBoolSubcommand):
"Show Python evaluation of unrecognized debugger commands"
min_abbrev = len('autoe')
pass
if __name__ == '__main__':
Mhelper = import_relative('__demo_helper__', '.', 'trepan')
Mhelper.demo_run(ShowAutoEval)
pass
|
"""
Copyright (c) 2016 "Vade Secure"
...
This file is part of test-automation-framework.
test-automation-framework is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from unittest import TestCase
from os.path import dirname, join
import time
from selenium.webdriver.common.by import By
from testlinktool.wrapper.UITestCase import UITestCase
class WaitingElement(TestCase):
def setUp(self):
self.tested_object = UITestCase()
self.tested_object.get_phantomjs()
self.tested_object.driver.get("file://" + join(dirname(__file__), "html_file.html"))
def test_waiting_existing_element(self):
self.assertTrue(self.tested_object.wait_element(By.ID, "already_existing", 5))
def test_waiting_hidden_element(self):
self.assertTrue(self.tested_object.element_does_not_appear_after_waiting(By.ID, "hidden", 5))
def test_waiting_element_discovered_after_sometime(self):
self.assertTrue(self.tested_object.element_does_not_appear(By.ID, "timeouted"))
self.assertFalse(self.tested_object.element_does_not_appear_after_waiting(By.ID, "timeouted", 30))
def test_clickable(self):
self.assertRaises(AssertionError, self.tested_object.assertIsClickable, By.ID, "non_clickable")
self.assertRaises(AssertionError, self.tested_object.assertIsClickable, By.ID, "timeout_non_clickable")
self.tested_object.assertIsClickable(By.ID, "timeout_non_clickable", 30)
self.assertRaises(AssertionError, self.tested_object.assertIsNotClickable, By.ID, "clickable")
self.tested_object.assertIsNotClickable(By.ID, "timeout_clickable", 30)
class TestSelection(TestCase):
def test_select_with_value(self):
self.tested_object = UITestCase()
self.tested_object.get_phantomjs()
self.tested_object.driver.get("file://" + join(dirname(__file__), "html_file.html"))
self.tested_object.select_option_with_text(By.NAME, "selected", value="1")
self.tested_object.wait_to_be_clickable_then_click(By.NAME, "subform", 1)
self.assertIn("selected=1", self.tested_object.driver.current_url)
def test_select_with_text(self):
self.tested_object = UITestCase()
self.tested_object.get_phantomjs()
self.tested_object.driver.get("file://" + join(dirname(__file__), "html_file.html"))
self.tested_object.select_option_with_text(By.NAME, "selected", visible_text="number1")
self.tested_object.wait_to_be_clickable_then_click(By.NAME, "subform", 1)
self.assertIn("selected=1", self.tested_object.driver.current_url)
def test_select_not_existing(self):
self.tested_object = UITestCase()
self.tested_object.get_phantomjs()
self.tested_object.driver.get("file://" + join(dirname(__file__), "html_file.html"))
self.assertRaises(Exception, self.tested_object.select_option_with_text, By.NAME, "selected", value="9")
|
'''**********************************************************************
Copyright (C) 2009-2016 The Freeciv-web project
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
***********************************************************************'''
import os
import sys
import time
import datetime
import mysql.connector
import configparser
import http.client
settings = configparser.ConfigParser()
settings.read("../../pbem/settings.ini")
mysql_user=settings.get("Config", "mysql_user")
mysql_database=settings.get("Config", "mysql_database");
mysql_password=settings.get("Config", "mysql_password");
server_map = {};
is_first_check = True;
def increment_metaserver_stats():
result = None;
cursor = None;
cnx = None;
try:
cnx = mysql.connector.connect(user=mysql_user, database=mysql_database, password=mysql_password)
cursor = cnx.cursor()
query = ("INSERT INTO games_played_stats (statsDate, gameType, gameCount) VALUES (CURDATE(), 3, 1) ON DUPLICATE KEY UPDATE gameCount = gameCount + 1;")
cursor.execute(query);
cnx.commit();
finally:
cursor.close()
cnx.close()
def poll_metaserver():
global is_first_check;
global server_map;
conn = http.client.HTTPConnection("meta.freeciv.org")
conn.request("GET", "/");
r1 = conn.getresponse();
html_doc = r1.read();
all_rows = html_doc.decode('utf-8').split("<tr>");
rows = all_rows[2:len(all_rows)-1];
for row in rows:
cells = row.split("<");
hostname_port = cells[2];
state = cells[11];
if (hostname_port in server_map):
old_state = server_map[hostname_port];
if ("Pregame" in old_state and "Running" in state):
# new game: existing server transitions from pregame to running state.
print("new game started for: " + hostname_port);
increment_metaserver_stats();
elif "Running" in state and not is_first_check and not hostname_port in server_map:
# new game: new server starts directly in running state.
print("new game started for: " + hostname_port);
increment_metaserver_stats();
server_map[hostname_port] = state;
is_first_check = False;
if __name__ == '__main__':
print("Freeciv-web meta.freeciv.org stats");
while (1):
try:
time.sleep(1);
poll_metaserver();
time.sleep(60*10); #poll every 10 minutes.
except Exception as e:
print(e);
|
import xlwt
from datetime import datetime
from openerp.addons.report_xls.report_xls import report_xls
from openerp.addons.report_xls.utils import rowcol_to_cell
from openerp.addons.account_financial_report_webkit.report.general_ledger \
import GeneralLedgerWebkit
from openerp.tools.translate import _
_column_sizes = [
('date', 12),
('period', 12),
('move', 20),
('journal', 12),
('account_code', 12),
('partner', 30),
('label', 45),
('counterpart', 30),
('debit', 15),
('credit', 15),
('cumul_bal', 15),
('curr_bal', 15),
('curr_code', 7),
]
class general_ledger_xls(report_xls):
column_sizes = [x[1] for x in _column_sizes]
def generate_xls_report(self, _p, _xs, data, objects, wb):
ws = wb.add_sheet(_p.report_name[:31])
ws.panes_frozen = True
ws.remove_splits = True
ws.portrait = 0 # Landscape
ws.fit_width_to_pages = 1
row_pos = 0
# set print header/footer
ws.header_str = self.xls_headers['standard']
ws.footer_str = self.xls_footers['standard']
# cf. account_report_general_ledger.mako
initial_balance_text = {'initial_balance': _('Computed'),
'opening_balance': _('Opening Entries'),
False: _('No')}
# Title
cell_style = xlwt.easyxf(_xs['xls_title'])
report_name = ' - '.join([_p.report_name.upper(),
_p.company.partner_id.name,
_p.company.currency_id.name])
c_specs = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=cell_style)
# write empty row to define column sizes
c_sizes = self.column_sizes
c_specs = [('empty%s' % i, 1, c_sizes[i], 'text', None)
for i in range(0, len(c_sizes))]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, set_column_size=True)
# Header Table
cell_format = _xs['bold'] + _xs['fill_blue'] + _xs['borders_all']
cell_style = xlwt.easyxf(cell_format)
cell_style_center = xlwt.easyxf(cell_format + _xs['center'])
c_specs = [
('coa', 2, 0, 'text', _('Chart of Account')),
('fy', 1, 0, 'text', _('Fiscal Year')),
('df', 3, 0, 'text', _p.filter_form(data) ==
'filter_date' and _('Dates Filter') or _('Periods Filter')),
('af', 1, 0, 'text', _('Accounts Filter')),
('tm', 2, 0, 'text', _('Target Moves')),
('ib', 2, 0, 'text', _('Initial Balance')),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=cell_style_center)
cell_format = _xs['borders_all']
cell_style = xlwt.easyxf(cell_format)
cell_style_center = xlwt.easyxf(cell_format + _xs['center'])
c_specs = [
('coa', 2, 0, 'text', _p.chart_account.name),
('fy', 1, 0, 'text', _p.fiscalyear.name if _p.fiscalyear else '-'),
]
df = _('From') + ': '
if _p.filter_form(data) == 'filter_date':
df += _p.start_date if _p.start_date else u''
else:
df += _p.start_period.name if _p.start_period else u''
df += ' ' + _('To') + ': '
if _p.filter_form(data) == 'filter_date':
df += _p.stop_date if _p.stop_date else u''
else:
df += _p.stop_period.name if _p.stop_period else u''
c_specs += [
('df', 3, 0, 'text', df),
('af', 1, 0, 'text', _p.accounts(data) and ', '.join(
[account.code for account in _p.accounts(data)]) or _('All')),
('tm', 2, 0, 'text', _p.display_target_move(data)),
('ib', 2, 0, 'text', initial_balance_text[
_p.initial_balance_mode]),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=cell_style_center)
ws.set_horz_split_pos(row_pos)
row_pos += 1
# Column Title Row
cell_format = _xs['bold']
c_title_cell_style = xlwt.easyxf(cell_format)
# Column Header Row
cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
c_hdr_cell_style = xlwt.easyxf(cell_format)
c_hdr_cell_style_right = xlwt.easyxf(cell_format + _xs['right'])
c_hdr_cell_style_center = xlwt.easyxf(cell_format + _xs['center'])
c_hdr_cell_style_decimal = xlwt.easyxf(
cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
# Column Initial Balance Row
cell_format = _xs['italic'] + _xs['borders_all']
c_init_cell_style = xlwt.easyxf(cell_format)
c_init_cell_style_decimal = xlwt.easyxf(
cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
c_specs = [
('date', 1, 0, 'text', _('Date'), None, c_hdr_cell_style),
('period', 1, 0, 'text', _('Period'), None, c_hdr_cell_style),
('move', 1, 0, 'text', _('Entry'), None, c_hdr_cell_style),
('journal', 1, 0, 'text', _('Journal'), None, c_hdr_cell_style),
('account_code', 1, 0, 'text',
_('Account'), None, c_hdr_cell_style),
('partner', 1, 0, 'text', _('Partner'), None, c_hdr_cell_style),
('label', 1, 0, 'text', _('Label'), None, c_hdr_cell_style),
('counterpart', 1, 0, 'text',
_('Counterpart'), None, c_hdr_cell_style),
('debit', 1, 0, 'text', _('Debit'), None, c_hdr_cell_style_right),
('credit', 1, 0, 'text', _('Credit'),
None, c_hdr_cell_style_right),
('cumul_bal', 1, 0, 'text', _('Cumul. Bal.'),
None, c_hdr_cell_style_right),
]
if _p.amount_currency(data):
c_specs += [
('curr_bal', 1, 0, 'text', _('Curr. Bal.'),
None, c_hdr_cell_style_right),
('curr_code', 1, 0, 'text', _('Curr.'),
None, c_hdr_cell_style_center),
]
c_hdr_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
# cell styles for ledger lines
ll_cell_format = _xs['borders_all']
ll_cell_style = xlwt.easyxf(ll_cell_format)
ll_cell_style_center = xlwt.easyxf(ll_cell_format + _xs['center'])
ll_cell_style_date = xlwt.easyxf(
ll_cell_format + _xs['left'],
num_format_str=report_xls.date_format)
ll_cell_style_decimal = xlwt.easyxf(
ll_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
cnt = 0
for account in objects:
display_initial_balance = account.init_balance and \
(account.init_balance.get(
'debit', 0.0) != 0.0 or account.
init_balance.get('credit', 0.0) != 0.0)
display_ledger_lines = account.ledger_lines
if _p.display_account_raw(data) == 'all' or \
(display_ledger_lines or display_initial_balance):
# TO DO : replace cumul amounts by xls formulas
cnt += 1
cumul_debit = 0.0
cumul_credit = 0.0
cumul_balance = 0.0
cumul_balance_curr = 0.0
c_specs = [
('acc_title', 11, 0, 'text',
' - '.join([account.code, account.name])),
]
row_data = self.xls_row_template(
c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, c_title_cell_style)
row_pos = self.xls_write_row(ws, row_pos, c_hdr_data)
row_start = row_pos
if display_initial_balance:
cumul_debit = account.init_balance.get('debit') or 0.0
cumul_credit = account.init_balance.get('credit') or 0.0
cumul_balance = account.init_balance.get(
'init_balance') or 0.0
cumul_balance_curr = account.init_balance.get(
'init_balance_currency') or 0.0
c_specs = [('empty%s' % x, 1, 0, 'text', None)
for x in range(6)]
c_specs += [
('init_bal', 1, 0, 'text', _('Initial Balance')),
('counterpart', 1, 0, 'text', None),
('debit', 1, 0, 'number', cumul_debit,
None, c_init_cell_style_decimal),
('credit', 1, 0, 'number', cumul_credit,
None, c_init_cell_style_decimal),
('cumul_bal', 1, 0, 'number', cumul_balance,
None, c_init_cell_style_decimal),
]
if _p.amount_currency(data):
c_specs += [
('curr_bal', 1, 0, 'number', cumul_balance_curr,
None, c_init_cell_style_decimal),
('curr_code', 1, 0, 'text', None),
]
row_data = self.xls_row_template(
c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, c_init_cell_style)
for line in account.ledger_lines:
cumul_debit += line.get('debit') or 0.0
cumul_credit += line.get('credit') or 0.0
cumul_balance_curr += line.get('amount_currency') or 0.0
cumul_balance += line.get('balance') or 0.0
label_elements = [line.get('lname') or '']
if line.get('invoice_number'):
label_elements.append(
"(%s)" % (line['invoice_number'],))
label = ' '.join(label_elements)
if line.get('ldate'):
c_specs = [
('ldate', 1, 0, 'date', datetime.strptime(
line['ldate'], '%Y-%m-%d'), None,
ll_cell_style_date),
]
else:
c_specs = [
('ldate', 1, 0, 'text', None),
]
c_specs += [
('period', 1, 0, 'text',
line.get('period_code') or ''),
('move', 1, 0, 'text', line.get('move_name') or ''),
('journal', 1, 0, 'text', line.get('jcode') or ''),
('account_code', 1, 0, 'text', account.code),
('partner', 1, 0, 'text',
line.get('partner_name') or ''),
('label', 1, 0, 'text', label),
('counterpart', 1, 0, 'text',
line.get('counterparts') or ''),
('debit', 1, 0, 'number', line.get('debit', 0.0),
None, ll_cell_style_decimal),
('credit', 1, 0, 'number', line.get('credit', 0.0),
None, ll_cell_style_decimal),
('cumul_bal', 1, 0, 'number', cumul_balance,
None, ll_cell_style_decimal),
]
if _p.amount_currency(data):
c_specs += [
('curr_bal', 1, 0, 'number', line.get(
'amount_currency') or 0.0, None,
ll_cell_style_decimal),
('curr_code', 1, 0, 'text', line.get(
'currency_code') or '', None,
ll_cell_style_center),
]
row_data = self.xls_row_template(
c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, ll_cell_style)
debit_start = rowcol_to_cell(row_start, 8)
debit_end = rowcol_to_cell(row_pos - 1, 8)
debit_formula = 'SUM(' + debit_start + ':' + debit_end + ')'
credit_start = rowcol_to_cell(row_start, 9)
credit_end = rowcol_to_cell(row_pos - 1, 9)
credit_formula = 'SUM(' + credit_start + ':' + credit_end + ')'
balance_debit = rowcol_to_cell(row_pos, 8)
balance_credit = rowcol_to_cell(row_pos, 9)
balance_formula = balance_debit + '-' + balance_credit
c_specs = [
('acc_title', 7, 0, 'text',
' - '.join([account.code, account.name])),
('cum_bal', 1, 0, 'text',
_('Cumulated Balance on Account'),
None, c_hdr_cell_style_right),
('debit', 1, 0, 'number', None,
debit_formula, c_hdr_cell_style_decimal),
('credit', 1, 0, 'number', None,
credit_formula, c_hdr_cell_style_decimal),
('balance', 1, 0, 'number', None,
balance_formula, c_hdr_cell_style_decimal),
]
if _p.amount_currency(data):
if account.currency_id:
c_specs += [('curr_bal', 1, 0, 'number',
cumul_balance_curr, None,
c_hdr_cell_style_decimal)]
else:
c_specs += [('curr_bal', 1, 0, 'text', None)]
c_specs += [('curr_code', 1, 0, 'text', None)]
row_data = self.xls_row_template(
c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, c_hdr_cell_style)
row_pos += 1
general_ledger_xls('report.account.account_report_general_ledger_xls',
'account.account',
parser=GeneralLedgerWebkit)
|
{
'name': 'Stock PHP status selection',
'version': '0.1',
'category': 'web',
'description': '''
Override procedure for select product to populate
''',
'author': 'Micronaet S.r.l. - Nicola Riolini',
'website': 'http://www.micronaet.it',
'license': 'AGPL-3',
'depends': [
'base',
'stock_php_status',
],
'init_xml': [],
'demo': [],
'data': [],
'active': False,
'installable': True,
'auto_install': False,
}
|
from contextlib import contextmanager, closing
import sys
import os
from io import StringIO
import builtins
import signal
import threading
import platform
import tempfile
from coalib.misc.MutableValue import MutableValue
@contextmanager
def subprocess_timeout(sub_process, seconds, kill_pg=False):
"""
Kill subprocess if the sub process takes more the than the timeout.
:param sub_process: The sub process to run.
:param seconds: The number of seconds to allow the test to run for. If
set to 0 or a negative value, it waits indefinitely.
Floats can be used to specify units smaller than
seconds.
:param kill_pg: Boolean whether to kill the process group or only this
process. (not applicable for windows)
"""
timedout = MutableValue(False)
if seconds <= 0:
yield timedout
return
finished = threading.Event()
if platform.system() == "Windows": # pragma: no cover
kill_pg = False
def kill_it():
finished.wait(seconds)
if not finished.is_set():
timedout.value = True
if kill_pg:
pgid = os.getpgid(sub_process.pid)
os.kill(sub_process.pid, signal.SIGINT)
if kill_pg:
os.killpg(pgid, signal.SIGINT)
thread = threading.Thread(name='timeout-killer', target=kill_it)
try:
thread.start()
yield timedout
finally:
finished.set()
thread.join()
@contextmanager
def replace_stdout(replacement):
"""
Replaces stdout with the replacement, yields back to the caller and then
reverts everything back.
"""
_stdout = sys.stdout
sys.stdout = replacement
try:
yield
finally:
sys.stdout = _stdout
@contextmanager
def suppress_stdout():
"""
Suppresses everything going to stdout.
"""
with open(os.devnull, "w") as devnull, replace_stdout(devnull):
yield
@contextmanager
def retrieve_stdout():
"""
Yields a StringIO object from which one can read everything that was
printed to stdout. (It won't be printed to the real stdout!)
Example usage:
with retrieve_stdout() as stdout:
print("something") # Won't print to the console
what_was_printed = stdout.getvalue() # Save the value
"""
with closing(StringIO()) as sio, replace_stdout(sio):
oldprint = builtins.print
try:
# Overriding stdout doesn't work with libraries, this ensures even
# cached variables take this up. Well... it works.
def newprint(*args, **kwargs):
kwargs['file'] = sio
oldprint(*args, **kwargs)
builtins.print = newprint
yield sio
finally:
builtins.print = oldprint
@contextmanager
def simulate_console_inputs(*inputs):
"""
Does some magic to simulate the given inputs to any calls to the `input`
builtin. This yields back an InputGenerator object so you can check
which input was already used and append any additional inputs you want.
Example:
with simulate_console_inputs(0, 1, 2) as generator:
assert(input() == 0)
assert(generator.last_input == 0)
generator.inputs.append(3)
assert(input() == 1)
assert(input() == 2)
assert(input() == 3)
assert(generator.last_input == 3)
:param inputs: Any inputs to simulate.
:raises ValueError: Raised when was asked for more input but there's no
more provided.
"""
class InputGenerator:
def __init__(self, inputs):
self.last_input = -1
self.inputs = inputs
def generate_input(self, prompt=''):
print(prompt, end="")
self.last_input += 1
try:
return self.inputs[self.last_input]
except IndexError:
raise ValueError("Asked for more input, but no more was "
"provided from `simulate_console_inputs`.")
input_generator = InputGenerator(list(inputs))
_input = builtins.input
builtins.input = input_generator.generate_input
try:
yield input_generator
finally:
builtins.input = _input
@contextmanager
def make_temp(suffix="", prefix="tmp", dir=None):
"""
Creates a temporary file with a closed stream and deletes it when done.
:return: A contextmanager retrieving the file path.
"""
temporary = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
os.close(temporary[0])
try:
yield temporary[1]
finally:
os.remove(temporary[1])
@contextmanager
def prepare_file(lines,
filename,
force_linebreaks=True,
create_tempfile=True,
tempfile_kwargs={}):
"""
Can create a temporary file (if filename is None) with the lines.
Can also add a trailing newline to each line specified if needed.
:param lines: The lines from the file. (list of strings)
:param filename: The filename to be prepared.
:param force_linebreaks: Whether to append newlines at each line if needed.
:param create_tempfile: Whether to save lines in tempfile if needed.
:param tempfile_kwargs: Kwargs passed to tempfile.mkstemp().
"""
if force_linebreaks:
for i, line in enumerate(lines):
lines[i] = line if line.endswith("\n") else line + "\n"
if not create_tempfile and filename is None:
filename = "dummy_file_name"
if not isinstance(filename, str) and create_tempfile:
with make_temp(**tempfile_kwargs) as filename:
with open(filename, 'w') as file:
file.writelines(lines)
yield lines, filename
else:
yield lines, filename
|
from weboob.browser import LoginBrowser, URL, need_login
from weboob.exceptions import BrowserIncorrectPassword
from weboob.capabilities.messages import Message
from .pages import LoginPage, LoginErrorPage, ThreadPage, Tweet, TrendsPage,\
TimelinePage, HomeTimelinePage, SearchTimelinePage
__all__ = ['TwitterBrowser']
class TwitterBrowser(LoginBrowser):
BASEURL = 'https://twitter.com/'
thread_page = URL(u'(?P<user>.+)/status/(?P<_id>.+)', ThreadPage)
login_error = URL(u'login/error.+', LoginErrorPage)
tweet = URL(u'i/tweet/create', Tweet)
trends = URL(u'trends', TrendsPage)
search = URL(u'i/search/timeline', SearchTimelinePage)
profil = URL(u'i/profiles/show/(?P<path>.+)/timeline/with_replies', HomeTimelinePage)
timeline = URL(u'i/timeline', TimelinePage)
login = URL(u'', LoginPage)
def do_login(self):
self.login.go()
if not self.page.logged:
self.authenticity_token = self.page.login(self.username, self.password)
if not self.page.logged or self.login_error.is_here():
raise BrowserIncorrectPassword()
@need_login
def get_me(self):
return self.login.stay_or_go().get_me()
@need_login
def iter_threads(self):
return self.timeline.go().iter_threads()
def get_trendy_subjects(self):
if self.username:
return self.get_logged_trendy_subject()
else:
return self.trends.open().get_trendy_subjects()
@need_login
def get_logged_trendy_subject(self):
return self.trends.open().get_trendy_subjects()
@need_login
def post(self, thread, message):
datas = {'place_id': '',
'tagged_users': ''}
datas['authenticity_token'] = self.authenticity_token
datas['status'] = message
if thread:
datas['in_reply_to_status_id'] = thread.id.split('#')[-1]
self.tweet.open(data=datas)
def get_thread(self, _id, thread=None, seen=None):
splitted_id = _id.split('#')
if not thread:
thread = self.thread_page.go(_id=splitted_id[1].split('.')[-1], user=splitted_id[0]).get_thread(obj=thread)
title_content = thread.title.split('\n\t')[-1]
thread.root = Message(thread=thread,
id=splitted_id[1].split('.')[-1],
title=title_content[:50] if len(title_content) > 50 else title_content,
sender=splitted_id[0],
receivers=None,
date=thread.date,
parent=thread.root,
content=title_content,
signature=u'',
children=[]
)
if seen and (_id not in seen):
thread.root.flags = Message.IS_UNREAD
comments = self.thread_page.stay_or_go(_id=splitted_id[1].split('.')[-1], user=splitted_id[0]).iter_comments()
for comment in comments:
comment.thread = thread
comment.parent = thread.root
if seen and comment.id not in seen.keys():
comment.flags = Message.IS_UNREAD
thread.root.children.append(comment)
return thread
def get_tweets_from_profil(self, path):
return self.profil.go(path=path).iter_threads()
def get_tweets_from_hashtag(self, path):
return self.get_tweets_from_search(u'#%s' % path if not path.startswith('#') else path)
def get_tweets_from_search(self, path):
params = {'q': "%s" % path,
'src': 'typd',
'f': 'realtime'}
return self.search.go(params=params).iter_threads(params=params)
|
import re
from weboob.browser.pages import HTMLPage
from weboob.browser.elements import ItemElement, method
from weboob.browser.filters.standard import CleanText, Env, Duration
from weboob.capabilities.video import BaseVideo
from weboob.tools.misc import to_unicode
class VideoPage(HTMLPage):
@method
class get_video(ItemElement):
klass = BaseVideo
obj_id = Env('id')
obj_title = CleanText('//title')
obj_nsfw = True
obj_ext = u'flv'
obj_duration = CleanText('//div[@id="video_text"]') & Duration
def obj_url(self):
real_id = int(self.env['id'].split('-')[-1])
response = self.page.browser.open('http://www.youjizz.com/videos/embed/%s' % real_id)
data = response.text
video_file_urls = re.findall(r'"(http://[^",]+\.youjizz\.com[^",]+\.flv(?:\?[^"]*)?)"', data)
if len(video_file_urls) == 0:
raise ValueError('Video URL not found')
elif len(video_file_urls) > 1:
raise ValueError('Many video file URL found')
else:
return to_unicode(video_file_urls[0])
|
{
'name' : 'INECO STOCK PRICE',
'version' : '0.1',
'depends' : ['base','stock','ineco_stock'],
'author' : 'Mr.Tititab Srisookco',
'category': 'INECO',
'description': """
1. Add Price Unit in stock move.
""",
'website': 'http://www.ineco.co.th',
'data': [],
'update_xml': [
'stock_view.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
'images': [],
}
|
from openerp.workflow import wkf_service
from openerp.osv import orm
from openerp.tools.translate import _
class workflow_service(wkf_service.workflow_service):
def __init__(self, *args, **kw):
super(workflow_service, self).__init__(*args, **kw)
def trg_last_action(self, uid, model, obj_id, cr):
'''
This function returns information about last workflow activity
'''
cr.execute("SELECT * FROM wkf_instance WHERE res_id=%s AND res_type=%s", (obj_id, model))
rows = cr.fetchall()
if len(rows) > 1:
raise orm.except_orm(_('Warning'), _('More than one result returned...'))
inst_id, wkf_id, uid, res_id, res_type, state = rows[0]
cr.execute("SELECT act_id, inst_id, state FROM wkf_workitem WHERE inst_id=%s", (inst_id, ))
rows = cr.fetchall()
if len(rows) > 1:
raise orm.except_orm(_('Warning'), _('More than one result returned...'))
act_id, inst_id, state = rows[0]
cr.execute("SELECT id, wkf_id, name, action FROM wkf_activity WHERE id=%s ORDER BY id", (act_id, ))
rows = cr.fetchall()
if len(rows) > 1:
raise orm.except_orm(_('Warning'), _('More than one result returned...'))
return dict(zip(('wkf_activity_id', 'wkf_instance_id', 'name', 'action'), rows[0]))
workflow_service()
|
import django.db.models.deletion
import oscar.models.fields
import simple_history.models
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('courses', '0010_migrate_partner_data_to_courses'),
('catalogue', '0038_coupon_enterprise_id_attribute'),
]
operations = [
migrations.CreateModel(
name='HistoricalProduct',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('structure', models.CharField(choices=[(b'standalone', 'Stand-alone product'), (b'parent', 'Parent product'), (b'child', 'Child product')], default=b'standalone', max_length=10, verbose_name='Product structure')),
('upc', oscar.models.fields.NullCharField(db_index=True, help_text='Universal Product Code (UPC) is an identifier for a product which is not specific to a particular supplier. Eg an ISBN for a book.', max_length=64, verbose_name='UPC')),
('title', models.CharField(blank=True, max_length=255, verbose_name='Title')),
('slug', models.SlugField(max_length=255, verbose_name='Slug')),
('description', models.TextField(blank=True, verbose_name='Description')),
('rating', models.FloatField(editable=False, null=True, verbose_name='Rating')),
('date_created', models.DateTimeField(blank=True, editable=False, verbose_name='Date created')),
('date_updated', models.DateTimeField(blank=True, db_index=True, editable=False, verbose_name='Date updated')),
('is_discountable', models.BooleanField(default=True, help_text='This flag indicates if this product can be used in an offer or not', verbose_name='Is discountable?')),
('expires', models.DateTimeField(blank=True, help_text='Last date/time on which this product can be purchased.', null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('course', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='courses.Course')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(blank=True, db_constraint=False, help_text="Only choose a parent product if you're creating a child product. For example if this is a size 4 of a particular t-shirt. Leave blank if this is a stand-alone product (i.e. there is only one version of this product).", null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='catalogue.Product', verbose_name='Parent product')),
('product_class', models.ForeignKey(blank=True, db_constraint=False, help_text='Choose what type of product this is', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='catalogue.ProductClass', verbose_name='Product type')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical Product',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalProductAttributeValue',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('value_text', models.TextField(blank=True, null=True, verbose_name='Text')),
('value_integer', models.IntegerField(blank=True, null=True, verbose_name='Integer')),
('value_boolean', models.NullBooleanField(verbose_name='Boolean')),
('value_float', models.FloatField(blank=True, null=True, verbose_name='Float')),
('value_richtext', models.TextField(blank=True, null=True, verbose_name='Richtext')),
('value_date', models.DateField(blank=True, null=True, verbose_name='Date')),
('value_datetime', models.DateTimeField(blank=True, null=True, verbose_name='DateTime')),
('value_file', models.TextField(blank=True, max_length=255, null=True)),
('value_image', models.TextField(blank=True, max_length=255, null=True)),
('entity_object_id', models.PositiveIntegerField(blank=True, editable=False, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('attribute', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='catalogue.ProductAttribute', verbose_name='Attribute')),
('entity_content_type', models.ForeignKey(blank=True, db_constraint=False, editable=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='contenttypes.ContentType')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('product', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='catalogue.Product', verbose_name='Product')),
('value_option', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='catalogue.AttributeOption', verbose_name='Value option')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical Product attribute value',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
|
from weboob.tools.browser import BaseBrowser
from weboob.tools.capabilities.paste import image_mime
from StringIO import StringIO
from .pages import PageHome, PageImage, PageError
__all__ = ['PixtoilelibreBrowser']
class PixtoilelibreBrowser(BaseBrowser):
PROTOCOL = 'http'
DOMAIN = 'pix.toile-libre.org'
ENCODING = None
PAGES = {'%s://%s/' % (PROTOCOL, DOMAIN): PageHome,
r'%s://%s/\?action=upload': PageError,
r'%s://%s/\?img=(.+)' % (PROTOCOL, DOMAIN): PageImage}
def post_image(self, filename, contents, private=False, description=''):
self.location('/')
assert self.is_on_page(PageHome)
mime = image_mime(contents.encode('base64'))
self.select_form(nr=0)
self.form.find_control('private').items[0].selected = private
self.form['description'] = description or ''
self.form.find_control('img').add_file(StringIO(contents), filename=filename, content_type=mime)
self.submit()
assert self.is_on_page(PageImage)
return self.page.get_info()
def get_contents(self, id):
return self.readurl('%s://%s/upload/original/%s' % (self.PROTOCOL, self.DOMAIN, id))
|
{
'name': 'Broken deprecated module for tests MQT',
'license': 'AGPL-3',
'author': 'Odoo Community Association (OCA)',
'version': '8.0.0.1.0.0',
'depends': [
'base',
],
'data': [
],
}
|
"""
Tests specific to the CourseRerunState Model and Manager.
"""
from __future__ import absolute_import
from django.test import TestCase
from opaque_keys.edx.locations import CourseLocator
from six import text_type
from course_action_state.managers import CourseRerunUIStateManager
from course_action_state.models import CourseRerunState
from student.tests.factories import UserFactory
class TestCourseRerunStateManager(TestCase):
"""
Test class for testing the CourseRerunUIStateManager.
"""
def setUp(self):
super(TestCourseRerunStateManager, self).setUp()
self.source_course_key = CourseLocator("source_org", "source_course_num", "source_run")
self.course_key = CourseLocator("test_org", "test_course_num", "test_run")
self.created_user = UserFactory()
self.display_name = "destination course name"
self.expected_rerun_state = {
'created_user': self.created_user,
'updated_user': self.created_user,
'course_key': self.course_key,
'source_course_key': self.source_course_key,
"display_name": self.display_name,
'action': CourseRerunUIStateManager.ACTION,
'should_display': True,
'message': "",
}
def verify_rerun_state(self):
"""
Gets the rerun state object for self.course_key and verifies that the values
of its fields equal self.expected_rerun_state.
"""
found_rerun = CourseRerunState.objects.find_first(course_key=self.course_key)
found_rerun_state = {key: getattr(found_rerun, key) for key in self.expected_rerun_state}
self.assertDictEqual(found_rerun_state, self.expected_rerun_state)
return found_rerun
def dismiss_ui_and_verify(self, rerun):
"""
Updates the should_display field of the rerun state object for self.course_key
and verifies its new state.
"""
user_who_dismisses_ui = UserFactory()
CourseRerunState.objects.update_should_display(
entry_id=rerun.id,
user=user_who_dismisses_ui,
should_display=False,
)
self.expected_rerun_state.update({
'updated_user': user_who_dismisses_ui,
'should_display': False,
})
self.verify_rerun_state()
def initiate_rerun(self):
CourseRerunState.objects.initiated(
source_course_key=self.source_course_key,
destination_course_key=self.course_key,
user=self.created_user,
display_name=self.display_name,
)
def test_rerun_initiated(self):
self.initiate_rerun()
self.expected_rerun_state.update(
{'state': CourseRerunUIStateManager.State.IN_PROGRESS}
)
self.verify_rerun_state()
def test_rerun_succeeded(self):
# initiate
self.initiate_rerun()
# set state to succeed
CourseRerunState.objects.succeeded(course_key=self.course_key)
self.expected_rerun_state.update({
'state': CourseRerunUIStateManager.State.SUCCEEDED,
})
rerun = self.verify_rerun_state()
# dismiss ui and verify
self.dismiss_ui_and_verify(rerun)
def test_rerun_failed(self):
# initiate
self.initiate_rerun()
# set state to fail
exception = Exception("failure in rerunning")
try:
raise exception
except:
CourseRerunState.objects.failed(course_key=self.course_key)
self.expected_rerun_state.update(
{'state': CourseRerunUIStateManager.State.FAILED}
)
self.expected_rerun_state.pop('message')
rerun = self.verify_rerun_state()
self.assertIn(text_type(exception), rerun.message)
# dismiss ui and verify
self.dismiss_ui_and_verify(rerun)
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bp_cupid', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Landkreis',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('plz_von', models.IntegerField(verbose_name='von', default=0)),
('plz_bis', models.IntegerField(verbose_name='bis', default=0)),
('name', models.CharField(verbose_name='Name', default='', max_length=20)),
('orte', models.CharField(verbose_name='Orte', default='', max_length=100)),
],
options={
'verbose_name': 'Landkreis',
'verbose_name_plural': 'Landkreise',
},
),
]
|
"""
Django admin page for bulk email models
"""
from config_models.admin import ConfigurationModelAdmin
from django.contrib import admin
from lms.djangoapps.bulk_email.forms import CourseAuthorizationAdminForm, CourseEmailTemplateForm
from lms.djangoapps.bulk_email.models import (
BulkEmailFlag,
CourseAuthorization,
CourseEmail,
CourseEmailTemplate,
Optout
)
class CourseEmailAdmin(admin.ModelAdmin):
"""Admin for course email."""
readonly_fields = ('sender',)
class OptoutAdmin(admin.ModelAdmin):
"""Admin for optouts."""
list_display = ('user', 'course_id')
class CourseEmailTemplateAdmin(admin.ModelAdmin):
"""Admin for course email templates."""
form = CourseEmailTemplateForm
fieldsets = (
(None, {
# make the HTML template display above the plain template:
'fields': ('html_template', 'plain_template', 'name'),
'description': '''
Enter template to be used by course staff when sending emails to enrolled students.
The HTML template is for HTML email, and may contain HTML markup. The plain template is
for plaintext email. Both templates should contain the string '{{message_body}}' (with
two curly braces on each side), to indicate where the email text is to be inserted.
Other tags that may be used (surrounded by one curly brace on each side):
{platform_name} : the name of the platform
{course_title} : the name of the course
{course_root} : the URL path to the root of the course
{course_language} : the course language. The default is None.
{course_url} : the course's full URL
{email} : the user's email address
{account_settings_url} : URL at which users can change account preferences
{email_settings_url} : URL at which users can change course email preferences
{course_image_url} : URL for the course's course image.
Will return a broken link if course doesn't have a course image set.
Note that there is currently NO validation on tags, so be careful. Typos or use of
unsupported tags will cause email sending to fail.
'''
}),
)
# Turn off the action bar (we have no bulk actions)
actions = None
def has_add_permission(self, request):
"""Enable the ability to add new templates, as we want to be able to define multiple templates."""
return True
def has_delete_permission(self, request, obj=None):
"""
Disables the ability to remove existing templates, as we'd like to make sure we don't have dangling references.
"""
return False
class CourseAuthorizationAdmin(admin.ModelAdmin):
"""Admin for enabling email on a course-by-course basis."""
form = CourseAuthorizationAdminForm
fieldsets = (
(None, {
'fields': ('course_id', 'email_enabled'),
'description': '''
Enter a course id in the following form: course-v1:Org+CourseNumber+CourseRun, eg course-v1:edX+DemoX+Demo_Course
Do not enter leading or trailing slashes. There is no need to surround the course ID with quotes.
Validation will be performed on the course name, and if it is invalid, an error message will display.
To enable email for the course, check the "Email enabled" box, then click "Save".
'''
}),
)
admin.site.register(CourseEmail, CourseEmailAdmin)
admin.site.register(Optout, OptoutAdmin)
admin.site.register(CourseEmailTemplate, CourseEmailTemplateAdmin)
admin.site.register(CourseAuthorization, CourseAuthorizationAdmin)
admin.site.register(BulkEmailFlag, ConfigurationModelAdmin)
|
import clv_tray_category
|
from spack import *
class Asdcplib(AutotoolsPackage):
"""AS-DCP and AS-02 File Access Library."""
homepage = "https://github.com/cinecert/asdcplib"
url = "https://github.com/cinecert/asdcplib/archive/rel_2_10_35.tar.gz"
version('2_10_35', sha256='a68eec9ae0cc363f75331dc279c6dd6d3a9999a9e5f0a4405fd9afa8a29ca27b')
version('2_10_34', sha256='faa54ee407c1afceb141e08dae9ebf83b3f839e9c49a1793ac741ec6cdee5c3c')
version('2_10_33', sha256='16fafb5da3d46b0f44570ef9780c85dd82cca60106a9e005e538809ea1a95373')
version('2_10_32', sha256='fe5123c49980ee3fa25dea876286f2ac974d203bfcc6c77fc288a59025dee3ee')
depends_on('m4', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('openssl', type=('build', 'link'))
def configure_args(self):
spec = self.spec
args = ['--with-openssl={0}'.format(spec['openssl'].prefix)]
return args
|
from spack import *
class Graphmap(MakefilePackage):
"""A highly sensitive and accurate mapper for long, error-prone reads"""
homepage = "https://github.com/isovic/graphmap"
git = "https://github.com/isovic/graphmap.git"
version('0.3.0', commit='eb8c75d68b03be95464318afa69b645a59f8f6b7')
def edit(self, spec, prefix):
mkdirp(prefix.bin)
makefile = FileFilter('Makefile')
makefile.filter('/usr/bin/graphmap', prefix.bin.graphmap)
def build(self, spec, prefix):
make('modules')
make()
|
from spack import *
class Snappy(CMakePackage):
"""A fast compressor/decompressor: https://code.google.com/p/snappy"""
homepage = "https://github.com/google/snappy"
url = "https://github.com/google/snappy/archive/1.1.7.tar.gz"
version('1.1.7', 'ee9086291c9ae8deb4dac5e0b85bf54a')
variant('shared', default=True, description='Build shared libraries')
variant('pic', default=True, description='Build position independent code')
def cmake_args(self):
spec = self.spec
args = [
'-DCMAKE_INSTALL_LIBDIR:PATH={0}'.format(
self.prefix.lib),
'-DBUILD_SHARED_LIBS:BOOL={0}'.format(
'ON' if '+shared' in spec else 'OFF')
]
return args
def flag_handler(self, name, flags):
flags = list(flags)
if '+pic' in self.spec and name in ('cflags', 'cxxflags'):
flags.append(self.compiler.pic_flag)
return (None, None, flags)
@run_after('install')
def install_pkgconfig(self):
mkdirp(self.prefix.lib.pkgconfig)
with open(join_path(self.prefix.lib.pkgconfig, 'snappy.pc'), 'w') as f:
f.write('prefix={0}\n'.format(self.prefix))
f.write('exec_prefix=${prefix}\n')
f.write('libdir={0}\n'.format(self.prefix.lib))
f.write('includedir={0}\n'.format(self.prefix.include))
f.write('\n')
f.write('Name: Snappy\n')
f.write('Description: A fast compressor/decompressor.\n')
f.write('Version: {0}\n'.format(self.spec.version))
f.write('Cflags: -I${includedir}\n')
f.write('Libs: -L${libdir} -lsnappy\n')
|
"""
These tests use only the store. They insert instances with known text
and run sparql with fts functions to check the results.
"""
import dbus
import unittest
import random
from common.utils import configuration as cfg
import unittest2 as ut
from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
class TestFTSFunctions (CommonTrackerStoreTest):
"""
Insert data with text and check the fts:xxxx functions are returning the expected results
"""
def test_fts_rank (self):
"""
1. Insert a Contact1 with 'abcdefxyz' as fullname and nickname
2. Insert a Contact2 with 'abcdefxyz' as fullname
2. Insert a Contact3 with 'abcdefxyz' as fullname and twice in nickname
3. Query sorting by fts:rank
EXPECTED: The 3 contacts in order: 3, 1, 2
4. Remove the created resources
"""
insert_sparql = """
INSERT {
<contact://test/fts-function/rank/1> a nco:PersonContact ;
nco:fullname 'abcdefxyz' ;
nco:nickname 'abcdefxyz' .
<contact://test/fts-function/rank/2> a nco:PersonContact ;
nco:fullname 'abcdefxyz' .
<contact://test/fts-function/rank/3> a nco:PersonContact ;
nco:fullname 'abcdefxyz' ;
nco:nickname 'abcdefxyz abcdefxyz' .
}
"""
self.tracker.update (insert_sparql)
query = """
SELECT ?contact WHERE {
?contact a nco:PersonContact ;
fts:match 'abcdefxyz' .
} ORDER BY DESC (fts:rank(?contact))
"""
results = self.tracker.query (query)
self.assertEquals (len(results), 3)
self.assertEquals (results[0][0], "contact://test/fts-function/rank/3")
self.assertEquals (results[1][0], "contact://test/fts-function/rank/1")
self.assertEquals (results[2][0], "contact://test/fts-function/rank/2")
delete_sparql = """
DELETE {
<contact://test/fts-function/rank/1> a rdfs:Resource .
<contact://test/fts-function/rank/2> a rdfs:Resource .
<contact://test/fts-function/rank/3> a rdfs:Resource .
}
"""
self.tracker.update (delete_sparql)
def test_fts_offsets (self):
"""
1. Insert a Contact1 with 'abcdefxyz' as fullname and nickname
2. Insert a Contact2 with 'abcdefxyz' as fullname
2. Insert a Contact3 with 'abcdefxyz' as fullname and twice in nickname
3. Query fts:offsets for 'abcdefxyz'
EXPECTED: The 3 contacts in insertion order, with 2, 1 and 3 pairs (prop, offset=1) each
4. Remove the created resources
"""
insert_sparql = """
INSERT {
<contact://test/fts-function/offset/1> a nco:PersonContact ;
nco:fullname 'abcdefxyz' ;
nco:nickname 'abcdefxyz' .
<contact://test/fts-function/offset/2> a nco:PersonContact ;
nco:fullname 'abcdefxyz' .
<contact://test/fts-function/offset/3> a nco:PersonContact ;
nco:fullname 'abcdefxyz' ;
nco:nickname 'abcdefxyz abcdefxyz' .
}
"""
self.tracker.update (insert_sparql)
query = """
SELECT fts:offsets (?contact) WHERE {
?contact a nco:PersonContact ;
fts:match 'abcdefxyz' .
}
"""
results = self.tracker.query (query)
self.assertEquals (len(results), 3)
self.assertEquals (results[0][0], 'nco:fullname,0,nco:nickname,0')
self.assertEquals (results[1][0], 'nco:fullname,0')
self.assertEquals (results[2][0], 'nco:fullname,0,nco:nickname,0,nco:nickname,10')
delete_sparql = """
DELETE {
<contact://test/fts-function/offset/1> a rdfs:Resource .
<contact://test/fts-function/offset/2> a rdfs:Resource .
<contact://test/fts-function/offset/3> a rdfs:Resource .
}
"""
self.tracker.update (delete_sparql)
if __name__ == '__main__':
ut.main()
|
"""TIP3P potential, constraints and dynamics."""
from math import pi, sin, cos
import numpy as np
import ase.units as units
from ase.parallel import world
from ase.md.md import MolecularDynamics
qH = 0.417
sigma0 = 3.15061
epsilon0 = 0.1521 * units.kcal / units.mol
rOH = 0.9572
thetaHOH = 104.52 / 180 * pi
class TIP3P:
def __init__(self, rc=9.0, width=1.0):
self.energy = None
self.forces = None
self.rc1 = rc - width
self.rc2 = rc
def get_spin_polarized(self):
return False
def update(self, atoms):
if (self.energy is None or
len(self.numbers) != len(atoms) or
(self.numbers != atoms.get_atomic_numbers()).any()):
self.calculate(atoms)
elif ((self.positions != atoms.get_positions()).any() or
(self.pbc != atoms.get_pbc()).any() or
(self.cell != atoms.get_cell()).any()):
self.calculate(atoms)
def calculation_required(self, atoms, quantities):
if len(quantities) == 0:
return False
return (self.energy is None or
len(self.numbers) != len(atoms) or
(self.numbers != atoms.get_atomic_numbers()).any() or
(self.positions != atoms.get_positions()).any() or
(self.pbc != atoms.get_pbc()).any() or
(self.cell != atoms.get_cell()).any())
def get_potential_energy(self, atoms):
self.update(atoms)
return self.energy
def get_forces(self, atoms):
self.update(atoms)
return self.forces.copy()
def get_stress(self, atoms):
raise NotImplementedError
def calculate(self, atoms):
self.positions = atoms.get_positions().copy()
self.cell = atoms.get_cell().copy()
self.pbc = atoms.get_pbc().copy()
natoms = len(atoms)
nH2O = natoms // 3
assert self.pbc.all()
C = self.cell.diagonal()
assert not (self.cell - np.diag(C)).any()
assert (C >= 2 * self.rc2).all()
self.numbers = atoms.get_atomic_numbers()
Z = self.numbers.reshape((-1, 3))
assert (Z[:, 1:] == 1).all() and (Z[:, 0] == 8).all()
R = self.positions.reshape((nH2O, 3, 3))
RO = R[:, 0]
self.energy = 0.0
self.forces = np.zeros((natoms, 3))
if world.size == 1:
mya = list(range(nH2O - 1))
else:
rank = world.rank
size = world.size
assert nH2O // (2 * size) == 0
mynH2O = nH2O // 2 // size
mya = (list(range(rank * n, (rank + 1) * n)) +
list(range((size - rank - 1) * n, (size - rank) * n)))
q = np.empty(3)
q[:] = qH * (units.Hartree * units.Bohr)**0.5
q[0] *= -2
for a in mya:
DOO = (RO[a + 1:] - RO[a] + 0.5 * C) % C - 0.5 * C
dOO = (DOO**2).sum(axis=1)**0.5
x1 = dOO > self.rc1
x2 = dOO < self.rc2
f = np.zeros(nH2O - a - 1)
f[x2] = 1.0
dfdd = np.zeros(nH2O - a - 1)
x12 = np.logical_and(x1, x2)
d = (dOO[x12] - self.rc1) / (self.rc2 - self.rc1)
f[x12] -= d**2 * (3.0 - 2.0 * d)
dfdd[x12] -= 6.0 / (self.rc2 - self.rc1) * d * (1.0 - d)
y = (sigma0 / dOO)**6
y2 = y**2
e = 4 * epsilon0 * (y2 - y)
self.energy += np.dot(e, f)
dedd = 24 * epsilon0 * (2 * y2 - y) / dOO * f - e * dfdd
F = (dedd / dOO)[:, np.newaxis] * DOO
self.forces[(a + 1) * 3::3] += F
self.forces[a * 3] -= F.sum(axis=0)
for i in range(3):
D = (R[a + 1:] - R[a, i] + 0.5 * C) % C - 0.5 * C
d = (D**2).sum(axis=2)**0.5
e = q[i] * q / d
self.energy += np.dot(f, e).sum()
F = (e / d**2 * f[:, np.newaxis])[:, :, np.newaxis] * D
F[:, 0] -= (e.sum(axis=1) * dfdd / dOO)[:, np.newaxis] * DOO
self.forces[(a + 1) * 3:] += F.reshape((-1, 3))
self.forces[a * 3 + i] -= F.sum(axis=0).sum(axis=0)
self.energy = world.sum(self.energy)
world.sum(self.forces)
class H2OConstraint:
"""Constraint object for a rigid H2O molecule."""
def __init__(self, r=rOH, theta=thetaHOH, iterations=23, masses=None):
self.r = r
self.theta = theta
self.iterations = iterations
self.m = masses
def set_masses(self, masses):
self.m = masses
def adjust_positions(self, old, new):
bonds = [(0, 1, self.r), (0, 2, self.r)]
if self.theta:
bonds.append((1, 2, sin(self.theta / 2) * self.r * 2))
for iter in range(self.iterations):
for i, j, r in bonds:
D = old[i::3] - old[j::3]
m1 = self.m[i]
m2 = self.m[j]
a = new[i::3]
b = new[j::3]
B = a - b
x = (D**2).sum(axis=1)
y = (D * B).sum(axis=1)
z = (B**2).sum(axis=1) - r**2
k = m1 * m2 / (m1 + m2) * ((y**2 - x * z)**0.5 - y) / x
k.shape = (-1, 1)
a += k / m1 * D
b -= k / m2 * D
def adjust_forces(self, positions, forces):
pass
def copy(self):
return H2OConstraint(self.r, self.theta, self.iterations, self.m)
class Verlet(MolecularDynamics):
def step(self, f):
atoms = self.atoms
m = atoms.get_masses()[:, np.newaxis]
v = self.atoms.get_velocities()
r0 = atoms.get_positions()
r = r0 + self.dt * v + self.dt**2 * f / m
atoms.set_positions(r)
r = atoms.get_positions()
v = (r - r0) / self.dt
self.atoms.set_velocities(v)
return atoms.get_forces()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.