text stringlengths 6 947k | repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1
value | license stringclasses 15
values | size int64 6 947k | score float64 0 0.34 |
|---|---|---|---|---|---|---|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.opus_package import OpusPackage
class package(OpusPackage):
name = 'psrc'
required_opus_packages = ["opus_core", "opus_emme2", "urbansim"]
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/psrc/opus_package_info.py | Python | gpl-2.0 | 292 | 0.006849 |
# Make sure you name your file with className.py
from hint.hint_class_helpers.find_matches import find_matches
class Prob2_Part1:
"""
Author: Shen Ting Ang
Date: 10/11/2016
"""
def check_attempt(self, params):
self.attempt = params['attempt'] #student's attempt
self.answer = params['answer'] #solution
self.att_tree = params['att_tree'] #attempt tree
self.ans_tree = params['ans_tree'] #solution tree
matches = find_matches(params)
matching_node = [m[0] for m in matches]
try:
if '^' not in self.attempt:
hint='Missing ^ in the answer. '
return hint + 'What is the probability of a specific combination of 3 coin flips? ', '1/2^3'
#check if the form of the parse tree has the right
#shape: an operator and two leafs that correspond to
#the operands
elif 'C(' not in self.attempt and '!' not in self.attempt:
hint='Missing choose function in the answer. '
return hint + 'How many possible ways are there to get 2 questions correct out of 5 questions? C(5,_)', '2'
else:
return "",""
except Exception:
return '',''
def get_problems(self):
self.problem_list = ["Combinatorics/GrinsteadSnell3.2.18/part1"]
return self.problem_list
| zhenzhai/edx-platform | common/lib/sandbox-packages/hint/hint_class/Week3/Prob2_Part1.py | Python | agpl-3.0 | 1,401 | 0.013562 |
"""
Common distributions with standard parameterizations in Python
@author : Spencer Lyon <spencer.lyon@stern.nyu.edu>
@date : 2014-12-31 15:59:31
"""
from math import sqrt
import numpy as np
__all__ = ["CanDistFromScipy"]
pdf_docstr = r"""
Evaluate the probability density function, which is defined as
.. math::
{pdf_tex}
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the pdf
Returns
-------
out : {ret1_type}
The pdf of the distribution evaluated at x
Notes
-----
For applicable distributions, equivalent to calling `d__dist_name(x,
*args, log=0)` from R
"""
logpdf_docstr = r"""
Evaluate the log of the pdf, where the pdf is defined as
.. math::
{pdf_tex}
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the log of the pdf
Returns
-------
out : {ret1_type}
The log of pdf of the distribution evaluated at x
Notes
-----
For applicable distributions, equivalent to calling `d__dist_name(x,
*args, log=1)` from R
"""
cdf_docstr = r"""
Evaluate the cumulative density function
.. math::
{cdf_tex}
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the cdf
Returns
-------
out : {ret1_type}
The cdf of the distribution evaluated at x
Notes
-----
For applicable distributions, equivalent to calling `p__dist_name(x,
*args, lower.tail=1, log.p=0)` from R
"""
logcdf_docstr = r"""
Evaluate the log of the cdf, where the cdf is defined as
.. math::
{cdf_tex}
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the log of the cdf
Returns
-------
out : {ret1_type}
The log of cdf of the distribution evaluated at x
Notes
-----
For applicable distributions, equivalent to calling `p__dist_name(x,
*args, lower.tail=1, log.p=1)` from R
"""
rvs_docstr = r"""
Draw random samples from the distribution
Parameters
----------
size : tuple
A tuple specifying the dimensions of an array to be filled with
random samples
Returns
-------
out : {ret1_type}
The random sample(s) requested
"""
sf_docstr = r"""
Compute the survival function (or complementary cumulative density
function) of the distribution at given points. This is defined as
.. math::
sf(x) = ccdf(x) = 1 - cdf(x)
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the sf (ccdf)
Returns
-------
out : {ret1_type}
One minus the cdf of the distribution evaluated at x
Notes
-----
For applicable distributions, equivalent to calling `p__dist_name(x,
*args, lower.tail=0, log.p=0)` from R
"""
logsf_docstr = r"""
Compute the log of the survival function (or complementary cumulative
density function) of the distribution at given points. This is defined
as
.. math::
\log(sf(x)) = \log(ccdf(x)) = \log(1 - cdf(x))
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the log of the sf (ccdf)
Returns
-------
out : {ret1_type}
Log of one minus the cdf of the distribution evaluated at x
Notes
-----
For applicable distributions, equivalent to calling `p__dist_name(x,
*args, lower.tail=1, log.p=1)` from R
"""
isf_docstr = r"""
Compute the inverse of the survival function (or complementary
cumulative density function) of the distribution at given points. This
is commonly used to find critical values of a distribution
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the log of the sf (ccdf)
Returns
-------
out : {ret1_type}
Log of one minus the cdf of the distribution evaluated at x
Examples
--------
>>> d.isf([0.1, 0.05, 0.01]) # upper tail critical values
"""
ppf_docstr = r"""
Compute the percent point function (or quantile), which is the inverse
of the cdf. This is commonly used to compute critical values.
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the log of the sf (ccdf)
Returns
-------
out : {ret1_type}
Log of one minus the cdf of the distribution evaluated at x
Examples
--------
>>> d.isf([0.1, 0.05, 0.01]) # upper tail critical values
Notes
-----
The ppf(x) = ccdf(1 - x), for x in (0, 1)
For applicable distributions, equivalent to calling `q__dist_name(x,
*args, lower.tail=1, log.p=0)` from R
"""
rand_docstr = r"""
Draw random samples from the distribution
Parameters
----------
*args : int
Integer arguments are taken to be the dimensions of an array that
should be filled with random samples
Returns
-------
out : {ret1_type}
The random sample(s) requested
Examples
--------
>>> samples = d.rand(2, 2, 3); samples.shape # 2, 3, 3 array of samples
(2, 3, 3)
>>> type(d.rand())
numpy.float64
"""
ll_docstr = r"""
The loglikelihood of the distribution with respect to all the samples
in x. Equivalent to sum(d.logpdf(x))
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the log likelihood
Returns
-------
out : scalar
The log-likelihood of the observations in x
"""
invlogcdf_docstr = r"""
Evaluate inverse function of the logcdf of the distribution at x
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the inverse of the log of the cdf
Returns
-------
out : {ret1_type}
The random variable(s) such that the log of the cdf is equal to x
Notes
-----
For applicable distributions, equivalent to calling `q__dist_name(x,
*args, lower.tail=1, log.p=1)` from R
"""
cquantile_docstr = r"""
Evaluate the complementary quantile function. Equal to `d.ppf(1-x)` for
x in (0, 1). Could be used to compute the lower critical values of a
distribution
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate 1 minus the quantile
Returns
-------
out : {ret1_type}
The lower-tail critical values of the distribution
Notes
-----
For applicable distributions, equivalent to calling `q__dist_name(x,
*args, lower.tail=0, log.p=0)` from R
"""
invlccdf_docstr = r"""
Evaluate inverse function of the logccdf of the distribution at x
Parameters
----------
x : {arg1_type}
The point(s) at which to evaluate the inverse of the log of the cdf
Returns
-------
out : {ret1_type}
The random variable(s) such that the log of 1 minus the cdf is equal
to x
Notes
-----
For applicable distributions, equivalent to calling `q__dist_name(x,
*args, lower.tail=0, log.p=1)` from R
"""
default_docstr_args = {"pdf_tex": r"\text{not given}",
"cdf_tex": r"\text{not given}",
"arg1_type": "array_like or scalar",
"ret1_type": "array_like or scalar"}
def _default_fit(self, x):
msg = "If you would like to see this open an issue or submit a pull"
msg += " request at https://github.com/spencerlyon2/distcan/issues"
raise NotImplementedError(msg)
def _default_expect(self, x):
msg = "If you would like to see this open an issue or submit a pull"
msg += " request at https://github.com/spencerlyon2/distcan/issues"
raise NotImplementedError(msg)
class CanDistFromScipy(object):
def __init__(self):
# assign scipy.stats.distributions.method_name to names I like
# standard names
self.pdf = self.dist.pdf
self.logpdf = self.dist.logpdf
self.cdf = self.dist.cdf
self.logcdf = self.dist.logcdf
self.rvs = self.dist.rvs
self.moment = self.dist.moment
self.stats = self.dist.stats
# not all distributions have the following: fit, expect
if hasattr(self.dist, "fit"):
self.fit = self.dist.fit
else:
self.fit = _default_fit
if hasattr(self.dist, "expect"):
self.expect = self.dist.expect
else:
self.fit = _default_expect
# survival function. Called the complementary cumulative
# function (ccdf) in .jl
self.sf = self.ccdf = self.dist.sf
self.logsf = self.logccdf = self.dist.logsf
self.isf = self.dist.isf
# Distributions.jl calls scipy's ppf function quantile. I like that
self.ppf = self.quantile = self.dist.ppf
# set docstrings
self._set_docstrings()
self.__doc__ = "foobar"
def _set_docstrings(self):
fmt_args = default_docstr_args.copy() # copy so ready for next use
fmt_args.update(self._metadata) # pull in data from subclass
# define docstrings
self.pdf.__func__.__doc__ = pdf_docstr.format(**fmt_args)
self.logpdf.__func__.__doc__ = logpdf_docstr.format(**fmt_args)
self.cdf.__func__.__doc__ = cdf_docstr.format(**fmt_args)
self.logcdf.__func__.__doc__ = logcdf_docstr.format(**fmt_args)
self.rvs.__func__.__doc__ = rvs_docstr.format(**fmt_args)
# survival function stuff
self.sf.__func__.__doc__ = sf_docstr.format(**fmt_args)
self.ccdf.__func__.__doc__ = self.sf.__func__.__doc__
self.logsf.__func__.__doc__ = logsf_docstr.format(**fmt_args)
self.logccdf.__func__.__doc__ = self.logsf.__func__.__doc__
self.isf.__func__.__doc__ = isf_docstr.format(**fmt_args)
# ppf
self.ppf.__func__.__doc__ = ppf_docstr.format(**fmt_args)
self.quantile.__func__.__doc__ = self.ppf.__func__.__doc__
# from distributions.jl
self.rand.__func__.__doc__ = rand_docstr.format(**fmt_args)
self.loglikelihood.__func__.__doc__ = ll_docstr.format(**fmt_args)
self.invlogcdf.__func__.__doc__ = invlogcdf_docstr.format(**fmt_args)
self.cquantile.__func__.__doc__ = cquantile_docstr.format(**fmt_args)
self.invlogccdf.__func__.__doc__ = invlccdf_docstr.format(**fmt_args)
def __str__(self):
return self._metadata["_str"] % (self.params)
def __repr__(self):
return self.__str__()
@property
def mean(self):
return self.dist.stats(moments="m")
@property
def var(self):
return self.dist.stats(moments="v")
@property
def std(self):
return sqrt(self.var)
@property
def skewness(self):
return self.dist.stats(moments="s")
@property
def kurtosis(self):
return self.dist.stats(moments="k")
@property
def median(self):
return self.dist.median()
@property
def mode(self):
return self.dist.ppf(0.5)
@property
def isplatykurtic(self):
return self.kurtosis > 0
@property
def isleptokurtic(self):
return self.kurtosis < 0
@property
def ismesokurtic(self):
return self.kurtosis == 0.0
@property
def entropy(self):
return float(self.dist.entropy())
def rand(self, *args):
return self.dist.rvs(size=args)
def loglikelihood(self, x):
return sum(self.logpdf(x))
def invlogcdf(self, x):
return self.quantile(np.exp(x))
def cquantile(self, x):
return self.quantile(1.0 - x)
def invlogccdf(self, x):
return self.quantile(-(np.exp(x) - 1.0))
| spencerlyon2/distcan | distcan/scipy_wrap.py | Python | mit | 10,929 | 0 |
#----------------------------------------------------------------------------------------
# nautilus-copypath - Quickly copy file paths to the clipboard from Nautilus.
# Copyright (C) Ronen Lapushner 2017-2018.
# Distributed under the GPL-v3+ license. See LICENSE for more information
#----------------------------------------------------------------------------------------
import gi
gi.require_version('Nautilus', '3.0')
gi.require_version('Gtk', '3.0')
from gi.repository import Nautilus, GObject, Gtk, Gdk
class CopyPathExtension(GObject.GObject, Nautilus.MenuProvider):
def __init__(self):
# Initialize clipboard
self.clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
def __sanitize_path(self, path):
# Replace spaces and parenthesis with their Linux-compatible equivalents.
return path.replace(' ', '\\ ').replace('(', '\\(').replace(')', '\\)')
def __copy_files_path(self, menu, files):
pathstr = None
# Get the paths for all the files.
# Also, strip any protocol headers, if required.
paths = [self.__sanitize_path(fileinfo.get_location().get_path())
for fileinfo in files]
# Append to the path string
if len(files) > 1:
pathstr = '\n'.join(paths)
elif len(files) == 1:
pathstr = paths[0]
# Set clipboard text
if pathstr is not None:
self.clipboard.set_text(pathstr, -1)
def __copy_dir_path(self, menu, path):
if path is not None:
pathstr = self.__sanitize_path(path.get_location().get_path())
self.clipboard.set_text(pathstr, -1)
def get_file_items(self, window, files):
# If there are many items to copy, change the label
# to reflect that.
if len(files) > 1:
item_label = 'Copy Paths'
else:
item_label = 'Copy Path'
item_copy_path = Nautilus.MenuItem(
name='PathUtils::CopyPath',
label=item_label,
tip='Copy the full path to the clipboard'
)
item_copy_path.connect('activate', self.__copy_files_path, files)
return item_copy_path,
def get_background_items(self, window, file):
item_copy_dir_path = Nautilus.MenuItem(
name='PathUtils::CopyCurrentDirPath',
label='Copy Directory Path',
tip='''Copy the current directory's path to the clipboard'''
)
item_copy_dir_path.connect('activate', self.__copy_dir_path, file)
return item_copy_dir_path,
| ronen25/nautilus-copypath | nautilus-copypath.py | Python | gpl-3.0 | 2,595 | 0.004624 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for conv."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_greater_tests(options):
"""Make a set of tests to do greater."""
test_parameters = [{
"input_dtype": [tf.float32, tf.int32, tf.int64],
"input_shape_pair": [([1, 1, 1, 3], [1, 1, 1, 3]),
([2, 3, 4, 5], [2, 3, 4, 5]), ([2, 3, 3], [2, 3]),
([5, 5], [1]), ([10], [2, 4, 10])],
"fully_quantize": [False],
}, {
"input_dtype": [tf.float32],
"input_shape_pair": [([1, 1, 1, 3], [1, 1, 1, 3]), ([2, 3, 3], [2, 3])],
"fully_quantize": [True],
}]
def build_graph(parameters):
"""Build the greater op testing graph."""
input_value1 = tf.compat.v1.placeholder(
dtype=parameters["input_dtype"],
name="input1",
shape=parameters["input_shape_pair"][0])
input_value2 = tf.compat.v1.placeholder(
dtype=parameters["input_dtype"],
name="input2",
shape=parameters["input_shape_pair"][1])
out = tf.greater(input_value1, input_value2)
return [input_value1, input_value2], [out]
def build_inputs(parameters, sess, inputs, outputs):
input_value1 = create_tensor_data(parameters["input_dtype"],
parameters["input_shape_pair"][0])
input_value2 = create_tensor_data(parameters["input_dtype"],
parameters["input_shape_pair"][1])
return [input_value1, input_value2], sess.run(
outputs, feed_dict=dict(zip(inputs, [input_value1, input_value2])))
make_zip_of_tests(
options,
test_parameters,
build_graph,
build_inputs,
expected_tf_failures=4)
| karllessard/tensorflow | tensorflow/lite/testing/op_tests/greater.py | Python | apache-2.0 | 2,735 | 0.001828 |
SBINDIR="/usr/sbin"
BINDIR="/usr/bin"
LIBEXEC="/usr/lib/xen/bin"
LIBDIR="/usr/lib64"
SHAREDIR="/usr/share"
PRIVATE_BINDIR="/usr/lib64/xen/bin"
XENFIRMWAREDIR="/usr/lib/xen/boot"
XEN_CONFIG_DIR="/etc/xen"
XEN_SCRIPT_DIR="/etc/xen/scripts"
XEN_LOCK_DIR="/var/lock"
XEN_RUN_DIR="/var/run/xen"
XEN_PAGING_DIR="/var/lib/xen/xenpaging"
| Hearen/OnceServer | pool_management/bn-xend-core/util/path.py | Python | mit | 330 | 0.036364 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2002-2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""classes that hold units of comma-separated values (.csv) files (csvunit)
or entire files (csvfile) for use with localisation
"""
import csv
from translate.misc import sparse
from translate.storage import base
class SimpleDictReader:
def __init__(self, fileobj, fieldnames):
self.fieldnames = fieldnames
self.contents = fileobj.read()
self.parser = sparse.SimpleParser(defaulttokenlist=[",", "\n"], whitespacechars="\r")
self.parser.stringescaping = 0
self.parser.quotechars = '"'
self.tokens = self.parser.tokenize(self.contents)
self.tokenpos = 0
def __iter__(self):
return self
def getvalue(self, value):
"""returns a value, evaluating strings as neccessary"""
if (value.startswith("'") and value.endswith("'")) or (value.startswith('"') and value.endswith('"')):
return sparse.stringeval(value)
else:
return value
def next(self):
lentokens = len(self.tokens)
while self.tokenpos < lentokens and self.tokens[self.tokenpos] == "\n":
self.tokenpos += 1
if self.tokenpos >= lentokens:
raise StopIteration()
thistokens = []
while self.tokenpos < lentokens and self.tokens[self.tokenpos] != "\n":
thistokens.append(self.tokens[self.tokenpos])
self.tokenpos += 1
while self.tokenpos < lentokens and self.tokens[self.tokenpos] == "\n":
self.tokenpos += 1
fields = []
# patch together fields since we can have quotes inside a field
currentfield = ''
fieldparts = 0
for token in thistokens:
if token == ',':
# a field is only quoted if the whole thing is quoted
if fieldparts == 1:
currentfield = self.getvalue(currentfield)
fields.append(currentfield)
currentfield = ''
fieldparts = 0
else:
currentfield += token
fieldparts += 1
# things after the last comma...
if fieldparts:
if fieldparts == 1:
currentfield = self.getvalue(currentfield)
fields.append(currentfield)
values = {}
for fieldnum in range(len(self.fieldnames)):
if fieldnum >= len(fields):
values[self.fieldnames[fieldnum]] = ""
else:
values[self.fieldnames[fieldnum]] = fields[fieldnum]
return values
class csvunit(base.TranslationUnit):
spreadsheetescapes = [("+", "\\+"), ("-", "\\-"), ("=", "\\="), ("'", "\\'")]
def __init__(self, source=None):
super(csvunit, self).__init__(source)
self.comment = ""
self.source = source
self.target = ""
def add_spreadsheet_escapes(self, source, target):
"""add common spreadsheet escapes to two strings"""
for unescaped, escaped in self.spreadsheetescapes:
if source.startswith(unescaped):
source = source.replace(unescaped, escaped, 1)
if target.startswith(unescaped):
target = target.replace(unescaped, escaped, 1)
return source, target
def remove_spreadsheet_escapes(self, source, target):
"""remove common spreadsheet escapes from two strings"""
for unescaped, escaped in self.spreadsheetescapes:
if source.startswith(escaped):
source = source.replace(escaped, unescaped, 1)
if target.startswith(escaped):
target = target.replace(escaped, unescaped, 1)
return source, target
def fromdict(self, cedict):
self.comment = cedict.get('location', '').decode('utf-8')
self.source = cedict.get('source', '').decode('utf-8')
self.target = cedict.get('target', '').decode('utf-8')
if self.comment is None:
self.comment = ''
if self.source is None:
self.source = ''
if self.target is None:
self.target = ''
self.source, self.target = self.remove_spreadsheet_escapes(self.source, self.target)
def todict(self, encoding='utf-8'):
comment, source, target = self.comment, self.source, self.target
source, target = self.add_spreadsheet_escapes(source, target)
if isinstance(comment, unicode):
comment = comment.encode(encoding)
if isinstance(source, unicode):
source = source.encode(encoding)
if isinstance(target, unicode):
target = target.encode(encoding)
return {'location': comment, 'source': source, 'target': target}
class csvfile(base.TranslationStore):
"""This class represents a .csv file with various lines.
The default format contains three columns: location, source, target"""
UnitClass = csvunit
Name = _("Comma Separated Value")
Mimetypes = ['text/comma-separated-values', 'text/csv']
Extensions = ["csv"]
def __init__(self, inputfile=None, fieldnames=None):
base.TranslationStore.__init__(self, unitclass=self.UnitClass)
self.units = []
if fieldnames is None:
self.fieldnames = ['location', 'source', 'target']
else:
if isinstance(fieldnames, basestring):
fieldnames = [fieldname.strip() for fieldname in fieldnames.split(",")]
self.fieldnames = fieldnames
self.filename = getattr(inputfile, 'name', '')
if inputfile is not None:
csvsrc = inputfile.read()
inputfile.close()
self.parse(csvsrc)
def parse(self, csvsrc):
csvfile = csv.StringIO(csvsrc)
reader = SimpleDictReader(csvfile, self.fieldnames)
for row in reader:
newce = self.UnitClass()
newce.fromdict(row)
self.addunit(newce)
def __str__(self):
"""convert to a string. double check that unicode is handled somehow here"""
source = self.getoutput()
if isinstance(source, unicode):
return source.encode(getattr(self, "encoding", "UTF-8"))
return source
def getoutput(self):
csvfile = csv.StringIO()
writer = csv.DictWriter(csvfile, self.fieldnames)
for ce in self.units:
cedict = ce.todict()
writer.writerow(cedict)
csvfile.reset()
return "".join(csvfile.readlines())
| lehmannro/translate | storage/csvl10n.py | Python | gpl-2.0 | 7,279 | 0.001786 |
"""
API for initiating and tracking requests for credit from a provider.
"""
from __future__ import absolute_import
import datetime
import logging
import uuid
import pytz
import six
from django.db import transaction
from edx_proctoring.api import get_last_exam_completion_date
from openedx.core.djangoapps.credit.exceptions import (
CreditProviderNotConfigured,
CreditRequestNotFound,
InvalidCreditStatus,
RequestAlreadyCompleted,
UserIsNotEligible
)
from openedx.core.djangoapps.credit.models import (
CreditEligibility,
CreditProvider,
CreditRequest,
CreditRequirementStatus
)
from openedx.core.djangoapps.credit.signature import get_shared_secret_key, signature
from student.models import CourseEnrollment, User
from util.date_utils import to_timestamp
from util.json_request import JsonResponse
# TODO: Cleanup this mess! ECOM-2908
log = logging.getLogger(__name__)
def get_credit_providers(providers_list=None):
"""Retrieve all available credit providers or filter on given providers_list.
Arguments:
providers_list (list of strings or None): contains list of ids of credit providers
or None.
Returns:
list of credit providers represented as dictionaries
Response Values:
>>> get_credit_providers(['hogwarts'])
[
{
"id": "hogwarts",
"name": "Hogwarts School of Witchcraft and Wizardry",
"url": "https://credit.example.com/",
"status_url": "https://credit.example.com/status/",
"description: "A new model for the Witchcraft and Wizardry School System.",
"enable_integration": false,
"fulfillment_instructions": "
<p>In order to fulfill credit, Hogwarts School of Witchcraft and Wizardry requires learners to:</p>
<ul>
<li>Sample instruction abc</li>
<li>Sample instruction xyz</li>
</ul>",
},
...
]
"""
return CreditProvider.get_credit_providers(providers_list=providers_list)
def get_credit_provider_info(request, provider_id): # pylint: disable=unused-argument
"""Retrieve the 'CreditProvider' model data against provided
credit provider.
Args:
provider_id (str): The identifier for the credit provider
Returns: 'CreditProvider' data dictionary
Example Usage:
>>> get_credit_provider_info("hogwarts")
{
"provider_id": "hogwarts",
"display_name": "Hogwarts School of Witchcraft and Wizardry",
"provider_url": "https://credit.example.com/",
"provider_status_url": "https://credit.example.com/status/",
"provider_description: "A new model for the Witchcraft and Wizardry School System.",
"enable_integration": False,
"fulfillment_instructions": "
<p>In order to fulfill credit, Hogwarts School of Witchcraft and Wizardry requires learners to:</p>
<ul>
<li>Sample instruction abc</li>
<li>Sample instruction xyz</li>
</ul>",
"thumbnail_url": "https://credit.example.com/logo.png"
}
"""
credit_provider = CreditProvider.get_credit_provider(provider_id=provider_id)
credit_provider_data = {}
if credit_provider:
credit_provider_data = {
"provider_id": credit_provider.provider_id,
"display_name": credit_provider.display_name,
"provider_url": credit_provider.provider_url,
"provider_status_url": credit_provider.provider_status_url,
"provider_description": credit_provider.provider_description,
"enable_integration": credit_provider.enable_integration,
"fulfillment_instructions": credit_provider.fulfillment_instructions,
"thumbnail_url": credit_provider.thumbnail_url
}
return JsonResponse(credit_provider_data)
@transaction.atomic
def create_credit_request(course_key, provider_id, username):
"""
Initiate a request for credit from a credit provider.
This will return the parameters that the user's browser will need to POST
to the credit provider. It does NOT calculate the signature.
Only users who are eligible for credit (have satisfied all credit requirements) are allowed to make requests.
A provider can be configured either with *integration enabled* or not.
If automatic integration is disabled, this method will simply return
a URL to the credit provider and method set to "GET", so the student can
visit the URL and request credit directly. No database record will be created
to track these requests.
If automatic integration *is* enabled, then this will also return the parameters
that the user's browser will need to POST to the credit provider.
These parameters will be digitally signed using a secret key shared with the credit provider.
A database record will be created to track the request with a 32-character UUID.
The returned dictionary can be used by the user's browser to send a POST request to the credit provider.
If a pending request already exists, this function should return a request description with the same UUID.
(Other parameters, such as the user's full name may be different than the original request).
If a completed request (either accepted or rejected) already exists, this function will
raise an exception. Users are not allowed to make additional requests once a request
has been completed.
Arguments:
course_key (CourseKey): The identifier for the course.
provider_id (str): The identifier of the credit provider.
username (str): The user initiating the request.
Returns: dict
Raises:
UserIsNotEligible: The user has not satisfied eligibility requirements for credit.
CreditProviderNotConfigured: The credit provider has not been configured for this course.
RequestAlreadyCompleted: The user has already submitted a request and received a response
from the credit provider.
Example Usage:
>>> create_credit_request(course.id, "hogwarts", "ron")
{
"url": "https://credit.example.com/request",
"method": "POST",
"parameters": {
"request_uuid": "557168d0f7664fe59097106c67c3f847",
"timestamp": 1434631630,
"course_org": "HogwartsX",
"course_num": "Potions101",
"course_run": "1T2015",
"final_grade": "0.95",
"user_username": "ron",
"user_email": "ron@example.com",
"user_full_name": "Ron Weasley",
"user_mailing_address": "",
"user_country": "US",
"signature": "cRCNjkE4IzY+erIjRwOQCpRILgOvXx4q2qvx141BCqI="
}
}
"""
try:
user_eligibility = CreditEligibility.objects.select_related('course').get(
username=username,
course__course_key=course_key
)
credit_course = user_eligibility.course
credit_provider = CreditProvider.objects.get(provider_id=provider_id)
except CreditEligibility.DoesNotExist:
log.warning(
u'User "%s" tried to initiate a request for credit in course "%s", '
u'but the user is not eligible for credit',
username, course_key
)
raise UserIsNotEligible
except CreditProvider.DoesNotExist:
log.error(u'Credit provider with ID "%s" has not been configured.', provider_id)
raise CreditProviderNotConfigured
# Check if we've enabled automatic integration with the credit
# provider. If not, we'll show the user a link to a URL
# where the user can request credit directly from the provider.
# Note that we do NOT track these requests in our database,
# since the state would always be "pending" (we never hear back).
if not credit_provider.enable_integration:
return {
"url": credit_provider.provider_url,
"method": "GET",
"parameters": {}
}
else:
# If automatic credit integration is enabled, then try
# to retrieve the shared signature *before* creating the request.
# That way, if there's a misconfiguration, we won't have requests
# in our system that we know weren't sent to the provider.
shared_secret_key = get_shared_secret_key(credit_provider.provider_id)
if shared_secret_key is None:
msg = u'Credit provider with ID "{provider_id}" does not have a secret key configured.'.format(
provider_id=credit_provider.provider_id
)
log.error(msg)
raise CreditProviderNotConfigured(msg)
# Initiate a new request if one has not already been created
credit_request, created = CreditRequest.objects.get_or_create(
course=credit_course,
provider=credit_provider,
username=username,
)
# Check whether we've already gotten a response for a request,
# If so, we're not allowed to issue any further requests.
# Skip checking the status if we know that we just created this record.
if not created and credit_request.status != "pending":
log.warning(
(
u'Cannot initiate credit request because the request with UUID "%s" '
u'exists with status "%s"'
), credit_request.uuid, credit_request.status
)
raise RequestAlreadyCompleted
if created:
credit_request.uuid = uuid.uuid4().hex
# Retrieve user account and profile info
user = User.objects.select_related('profile').get(username=username)
# Retrieve the final grade from the eligibility table
try:
final_grade = CreditRequirementStatus.objects.get(
username=username,
requirement__namespace="grade",
requirement__name="grade",
requirement__course__course_key=course_key,
status="satisfied"
).reason["final_grade"]
# NOTE (CCB): Limiting the grade to seven characters is a hack for ASU.
if len(six.text_type(final_grade)) > 7:
final_grade = u'{:.5f}'.format(final_grade)
else:
final_grade = six.text_type(final_grade)
except (CreditRequirementStatus.DoesNotExist, TypeError, KeyError):
msg = u'Could not retrieve final grade from the credit eligibility table for ' \
u'user [{user_id}] in course [{course_key}].'.format(user_id=user.id, course_key=course_key)
log.exception(msg)
raise UserIsNotEligible(msg)
# Getting the students's enrollment date
course_enrollment = CourseEnrollment.get_enrollment(user, course_key)
enrollment_date = course_enrollment.created if course_enrollment else ""
# Getting the student's course completion date
completion_date = get_last_exam_completion_date(course_key, username)
parameters = {
"request_uuid": credit_request.uuid,
"timestamp": to_timestamp(datetime.datetime.now(pytz.UTC)),
"course_org": course_key.org,
"course_num": course_key.course,
"course_run": course_key.run,
"enrollment_timestamp": to_timestamp(enrollment_date) if enrollment_date else "",
"course_completion_timestamp": to_timestamp(completion_date) if completion_date else "",
"final_grade": final_grade,
"user_username": user.username,
"user_email": user.email,
"user_full_name": user.profile.name,
"user_mailing_address": "",
"user_country": (
user.profile.country.code
if user.profile.country.code is not None
else ""
),
}
credit_request.parameters = parameters
credit_request.save()
if created:
log.info(u'Created new request for credit with UUID "%s"', credit_request.uuid)
else:
log.info(
u'Updated request for credit with UUID "%s" so the user can re-issue the request',
credit_request.uuid
)
# Sign the parameters using a secret key we share with the credit provider.
parameters["signature"] = signature(parameters, shared_secret_key)
return {
"url": credit_provider.provider_url,
"method": "POST",
"parameters": parameters
}
def update_credit_request_status(request_uuid, provider_id, status):
"""
Update the status of a credit request.
Approve or reject a request for a student to receive credit in a course
from a particular credit provider.
This function does NOT check that the status update is authorized.
The caller needs to handle authentication and authorization (checking the signature
of the message received from the credit provider)
The function is idempotent; if the request has already been updated to the status,
the function does nothing.
Arguments:
request_uuid (str): The unique identifier for the credit request.
provider_id (str): Identifier for the credit provider.
status (str): Either "approved" or "rejected"
Returns: None
Raises:
CreditRequestNotFound: No request exists that is associated with the given provider.
InvalidCreditStatus: The status is not either "approved" or "rejected".
"""
if status not in [CreditRequest.REQUEST_STATUS_APPROVED, CreditRequest.REQUEST_STATUS_REJECTED]:
raise InvalidCreditStatus
try:
request = CreditRequest.objects.get(uuid=request_uuid, provider__provider_id=provider_id)
old_status = request.status
request.status = status
request.save()
log.info(
u'Updated request with UUID "%s" from status "%s" to "%s" for provider with ID "%s".',
request_uuid, old_status, status, provider_id
)
except CreditRequest.DoesNotExist:
msg = (
u'Credit provider with ID "{provider_id}" attempted to '
u'update request with UUID "{request_uuid}", but no request '
u'with this UUID is associated with the provider.'
).format(provider_id=provider_id, request_uuid=request_uuid)
log.warning(msg)
raise CreditRequestNotFound(msg)
def get_credit_requests_for_user(username):
"""
Retrieve the status of a credit request.
Returns either "pending", "approved", or "rejected"
Arguments:
username (unicode): The username of the user who initiated the requests.
Returns: list
Example Usage:
>>> get_credit_request_status_for_user("bob")
[
{
"uuid": "557168d0f7664fe59097106c67c3f847",
"timestamp": 1434631630,
"course_key": "course-v1:HogwartsX+Potions101+1T2015",
"provider": {
"id": "HogwartsX",
"display_name": "Hogwarts School of Witchcraft and Wizardry",
},
"status": "pending" # or "approved" or "rejected"
}
]
"""
return CreditRequest.credit_requests_for_user(username)
def get_credit_request_status(username, course_key):
"""Get the credit request status.
This function returns the status of credit request of user for given course.
It returns the latest request status for the any credit provider.
The valid status are 'pending', 'approved' or 'rejected'.
Args:
username(str): The username of user
course_key(CourseKey): The course locator key
Returns:
A dictionary of credit request user has made if any
"""
credit_request = CreditRequest.get_user_request_status(username, course_key)
return {
"uuid": credit_request.uuid,
"timestamp": credit_request.modified,
"course_key": credit_request.course.course_key,
"provider": {
"id": credit_request.provider.provider_id,
"display_name": credit_request.provider.display_name
},
"status": credit_request.status
} if credit_request else {}
| ESOedX/edx-platform | openedx/core/djangoapps/credit/api/provider.py | Python | agpl-3.0 | 16,234 | 0.002649 |
import os
from .base import Output
class AppleSay(Output):
"""Speech output supporting the Apple Say subsystem."""
name = 'Apple Say'
def __init__(self, voice = 'Alex', rate = '300'):
self.voice = voice
self.rate = rate
super(AppleSay, self).__init__()
def is_active(self):
return not os.system('which say')
def speak(self, text, interrupt = 0):
if interrupt:
self.silence()
os.system('say -v %s -r %s "%s" &' % (self.voice, self.rate, text))
def silence(self):
os.system('killall say')
output_class = AppleSay | frastlin/PyAudioGame | pyaudiogame/accessible_output2/outputs/say.py | Python | mit | 535 | 0.050467 |
# -*- Mode: Python -*-
import socket
import unittest
__version__ = '0.1.1'
from .cys2n import *
protocol_version_map = {
'SSLv2' : 20,
'SSLv3' : 30,
'TLS10' : 31,
'TLS11' : 32,
'TLS12' : 33,
}
class PROTOCOL:
reverse_map = {}
for name, val in protocol_version_map.items():
setattr (PROTOCOL, name, val)
PROTOCOL.reverse_map[val] = name
class s2n_socket:
def __init__ (self, cfg, pysock, conn=None):
self.cfg = cfg
self.sock = pysock
self.fd = pysock.fileno()
self.conn = conn
self.negotiated = False
def __repr__ (self):
return '<s2n sock=%r conn=%r @%x>' % (self.sock, self.conn, id (self))
def bind (self, *args, **kwargs):
return self.sock.bind (*args, **kwargs)
def listen (self, *args, **kwargs):
return self.sock.listen (*args, **kwargs)
def accept (self):
sock, addr = self.sock.accept()
conn = Connection (MODE.SERVER)
conn.set_config (self.cfg)
conn.set_fd (sock.fileno())
# XXX verify
new = self.__class__ (self.cfg, sock, conn)
return new, addr
# XXX client mode as yet untested.
def connect (self, addr):
self.sock.connect (addr)
self.conn = Connection (MODE.CLIENT)
self.conn.set_config (self.cfg)
self.conn.set_fd (self.fd)
def _check_negotiated (self):
if not self.negotiated:
self.negotiate()
def negotiate (self):
if not self.negotiated:
self.conn.negotiate()
self.negotiated = True
def recv (self, block_size):
self._check_negotiated()
r = []
left = block_size
while left:
b, more = self.conn.recv (left)
r.append (b)
if not more:
break
else:
left -= len(b)
return b''.join (r)
def send (self, data):
self._check_negotiated()
pos = 0
left = len(data)
while left:
n, more = self.conn.send (data, pos)
pos += n
if not more:
break
else:
pass
left -= n
return pos
def shutdown (self, how=None):
more = 1
while more:
more = self.conn.shutdown()
def close (self):
try:
self.shutdown()
finally:
self.sock.close()
| samrushing/cys2n | cys2n/__init__.py | Python | bsd-2-clause | 2,452 | 0.01509 |
#!/usr/bin/env python3
import pwd
for p in pwd.getpwall():
if p.pw_shell.endswith('/bin/bash'):
print(p[0])
| thunderoy/dgplug_training | assignments/assign4.py | Python | mit | 121 | 0.008264 |
print " Content-Type: text/html; charset=utf-8"
print ""
print """
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<script type="text/javascript" src="http://yui.yahooapis.com/combo?3.3.0/build/yui/yui-min.js&3.3.0/build/loader/loader-min.js"></script>
</head>
<body class="yui3-skin-sam yui-skin-sam">
<h1>Test pili.lite in real case</h1>
<div id="testLogger"></div>
<script type='text/javascript'>
%s
</script>
</body>
</html>
""" % open("base_real.js").read()
| girvan/pili | test/base_real.py | Python | lgpl-3.0 | 501 | 0.003992 |
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""""Implementation of Spatial Transformer networks core components."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from itertools import chain
# Dependency imports
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from sonnet.python.modules import base
from sonnet.python.modules import basic
import tensorflow as tf
class GridWarper(base.AbstractModule):
"""Grid warper interface class.
An object implementing the `GridWarper` interface generates a reference grid
of feature points at construction time, and warps it via a parametric
transformation model, specified at run time by an input parameter Tensor.
Grid warpers must then implement a `create_features` function used to generate
the reference grid to be warped in the forward pass (according to a determined
warping model).
"""
def __init__(self, source_shape, output_shape, num_coeff, name, **kwargs):
"""Constructs a GridWarper module and initializes the source grid params.
`source_shape` and `output_shape` are used to define the size of the source
and output signal domains, as opposed to the shape of the respective
Tensors. For example, for an image of size `width=W` and `height=H`,
`{source,output}_shape=[H, W]`; for a volume of size `width=W`, `height=H`
and `depth=D`, `{source,output}_shape=[H, W, D]`.
Args:
source_shape: Iterable of integers determining the size of the source
signal domain.
output_shape: Iterable of integers determining the size of the destination
resampled signal domain.
num_coeff: Number of coefficients parametrizing the grid warp.
For example, a 2D affine transformation will be defined by the 6
parameters populating the corresponding 2x3 affine matrix.
name: Name of Module.
**kwargs: Extra kwargs to be forwarded to the `create_features` function,
instantiating the source grid parameters.
Raises:
Error: If `len(output_shape) > len(source_shape)`.
TypeError: If `output_shape` and `source_shape` are not both iterable.
"""
super(GridWarper, self).__init__(name=name)
self._source_shape = tuple(source_shape)
self._output_shape = tuple(output_shape)
if len(self._output_shape) > len(self._source_shape):
raise base.Error('Output domain dimensionality ({}) must be equal or '
'smaller than source domain dimensionality ({})'
.format(len(self._output_shape),
len(self._source_shape)))
self._num_coeff = num_coeff
self._psi = self._create_features(**kwargs)
@abc.abstractmethod
def _create_features(self, **kwargs):
"""Generates matrix of features, of size `[num_coeff, num_points]`."""
pass
@property
def n_coeff(self):
"""Returns number of coefficients of warping function."""
return self._n_coeff
@property
def psi(self):
"""Returns a list of features used to compute the grid warp."""
return self._psi
@property
def source_shape(self):
"""Returns a tuple containing the shape of the source signal."""
return self._source_shape
@property
def output_shape(self):
"""Returns a tuple containing the shape of the output grid."""
return self._output_shape
def _create_affine_features(output_shape, source_shape):
"""Generates n-dimensional homogenous coordinates for a given grid definition.
`source_shape` and `output_shape` are used to define the size of the source
and output signal domains, as opposed to the shape of the respective
Tensors. For example, for an image of size `width=W` and `height=H`,
`{source,output}_shape=[H, W]`; for a volume of size `width=W`, `height=H`
and `depth=D`, `{source,output}_shape=[H, W, D]`.
Args:
output_shape: Iterable of integers determining the shape of the grid to be
warped.
source_shape: Iterable of integers determining the domain of the signal to be
resampled.
Returns:
List of flattened numpy arrays of coordinates in range `[-1, 1]^N`, for
example:
```
[[x_0_0, .... , x_0_{n-1}],
....
[x_{M-1}_0, .... , x_{M-1}_{n-1}],
[x_{M}_0=0, .... , x_{M}_{n-1}=0],
...
[x_{N-1}_0=0, .... , x_{N-1}_{n-1}=0],
[1, ..., 1]]
```
where N is the dimensionality of the sampled space, M is the
dimensionality of the output space, i.e. 2 for images
and 3 for volumes, and n is the number of points in the output grid.
When the dimensionality of `output_shape` is smaller that that of
`source_shape` the last rows before [1, ..., 1] will be filled with 0.
"""
ranges = [np.linspace(-1, 1, x, dtype=np.float32)
for x in reversed(output_shape)]
psi = [x.reshape(-1) for x in np.meshgrid(*ranges, indexing='xy')]
dim_gap = len(source_shape) - len(output_shape)
for _ in xrange(dim_gap):
psi.append(np.zeros_like(psi[0], dtype=np.float32))
psi.append(np.ones_like(psi[0], dtype=np.float32))
return psi
class AffineGridWarper(GridWarper):
"""Affine Grid Warper class.
The affine grid warper generates a reference grid of n-dimensional points
and warps it via an affine transormation model determined by an input
parameter Tensor. Some of the transformation parameters can be fixed at
construction time via an `AffineWarpConstraints` object.
"""
def __init__(self,
source_shape,
output_shape,
constraints=None,
name='affine_grid_warper'):
"""Constructs an AffineGridWarper.
`source_shape` and `output_shape` are used to define the size of the source
and output signal domains, as opposed to the shape of the respective
Tensors. For example, for an image of size `width=W` and `height=H`,
`{source,output}_shape=[H, W]`; for a volume of size `width=W`, `height=H`
and `depth=D`, `{source,output}_shape=[H, W, D]`.
Args:
source_shape: Iterable of integers determining the size of the source
signal domain.
output_shape: Iterable of integers determining the size of the destination
resampled signal domain.
constraints: Either a double list of shape `[N, N+1]` defining constraints
on the entries of a matrix defining an affine transformation in N
dimensions, or an `AffineWarpConstraints` object. If the double list is
passed, a numeric value bakes in a constraint on the corresponding
entry in the tranformation matrix, whereas `None` implies that the
corresponding entry will be specified at run time.
name: Name of module.
Raises:
Error: If constraints fully define the affine transformation; or if
input grid shape and contraints have different dimensionality.
TypeError: If output_shape and source_shape are not both iterable.
"""
self._source_shape = tuple(source_shape)
self._output_shape = tuple(output_shape)
num_dim = len(source_shape)
if isinstance(constraints, AffineWarpConstraints):
self._constraints = constraints
elif constraints is None:
self._constraints = AffineWarpConstraints.no_constraints(num_dim)
else:
self._constraints = AffineWarpConstraints(constraints=constraints)
if self._constraints.num_free_params == 0:
raise base.Error('Transformation is fully constrained.')
if self._constraints.num_dim != num_dim:
raise base.Error('Incompatible set of constraints provided: '
'input grid shape and constraints have different '
'dimensionality.')
super(AffineGridWarper, self).__init__(source_shape=source_shape,
output_shape=output_shape,
num_coeff=6,
name=name,
constraints=self._constraints)
def _create_features(self, constraints):
"""Creates all the matrices needed to compute the output warped grids."""
affine_warp_constraints = constraints
if not isinstance(affine_warp_constraints, AffineWarpConstraints):
affine_warp_constraints = AffineWarpConstraints(affine_warp_constraints)
mask = affine_warp_constraints.mask
psi = _create_affine_features(output_shape=self._output_shape,
source_shape=self._source_shape)
scales = [(x - 1.0) * .5 for x in reversed(self._source_shape)]
offsets = scales
# Transforming a point x's i-th coordinate via an affine transformation
# is performed via the following dot product:
#
# x_i' = s_i * (T_i * x) + t_i (1)
#
# where Ti is the i-th row of an affine matrix, and the scalars s_i and t_i
# define a decentering and global scaling into the source space.
# In the AffineGridWarper some of the entries of Ti are provided via the
# input, some others are instead fixed, according to the constraints
# assigned in the constructor.
# In create_features the internal dot product (1) is accordingly broken down
# into two parts:
#
# x_i' = Ti[uncon_i] * x[uncon_i, :] + offset(con_var) (2)
#
# i.e. the sum of the dot product of the free parameters (coming
# from the input) indexed by uncond_i and an offset obtained by
# precomputing the fixed part of (1) according to the constraints.
# This step is implemented by analyzing row by row the constraints matrix
# and saving into a list the x[uncon_i] and offset(con_var) data matrices
# for each output dimension.
features = []
for row, scale in zip(mask, scales):
x_i = np.array([x for x, is_active in zip(psi, row) if is_active])
features.append(x_i * scale if len(x_i) else None)
for row_i, row in enumerate(mask):
x_i = None
s = scales[row_i]
for i, is_active in enumerate(row):
if is_active:
continue
# In principle a whole row of the affine matrix can be fully
# constrained. In that case the corresponding dot product between input
# parameters and grid coordinates doesn't need to be implemented in the
# computation graph since it can be precomputed.
# When a whole row if constrained, x_i - which is initialized to
# None - will still be None at the end do the loop when it is appended
# to the features list; this value is then used to detect this setup
# in the build function where the graph is assembled.
if x_i is None:
x_i = np.array(psi[i]) * affine_warp_constraints[row_i][i] * s
else:
x_i += np.array(psi[i]) * affine_warp_constraints[row_i][i] * s
features.append(x_i)
features += offsets
return features
def _build(self, inputs):
"""Assembles the module network and adds it to the graph.
The internal computation graph is assembled according to the set of
constraints provided at construction time.
Args:
inputs: Tensor containing a batch of transformation parameters.
Returns:
A batch of warped grids.
Raises:
Error: If the input tensor size is not consistent with the constraints
passed at construction time.
"""
input_shape = tf.shape(inputs)
input_dtype = inputs.dtype.as_numpy_dtype
batch_size = tf.expand_dims(input_shape[0], 0)
number_of_params = inputs.get_shape()[1]
if number_of_params != self._constraints.num_free_params:
raise base.Error('Input size is not consistent with constraint '
'definition: {} parameters expected, {} provided.'
.format(self._constraints.num_free_params,
number_of_params))
num_output_dimensions = len(self._psi) // 3
def get_input_slice(start, size):
"""Extracts a subset of columns from the input 2D Tensor."""
return basic.SliceByDim([1], [start], [size])(inputs)
warped_grid = []
var_index_offset = 0
number_of_points = np.prod(self._output_shape)
for i in xrange(num_output_dimensions):
if self._psi[i] is not None:
# The i-th output dimension is not fully specified by the constraints,
# the graph is setup to perform matrix multiplication in batch mode.
grid_coord = self._psi[i].astype(input_dtype)
num_active_vars = self._psi[i].shape[0]
active_vars = get_input_slice(var_index_offset, num_active_vars)
warped_coord = tf.matmul(active_vars, grid_coord)
warped_coord = tf.expand_dims(warped_coord, 1)
var_index_offset += num_active_vars
offset = self._psi[num_output_dimensions + i]
if offset is not None:
offset = offset.astype(input_dtype)
# Some entries in the i-th row of the affine matrix were constrained
# and the corresponding matrix multiplications have been precomputed.
tiling_params = tf.concat(
[
batch_size, tf.constant(
1, shape=(1,)), tf.ones_like(offset.shape)
],
0)
offset = offset.reshape((1, 1) + offset.shape)
warped_coord += tf.tile(offset, tiling_params)
else:
# The i-th output dimension is fully specified by the constraints, and
# the corresponding matrix multiplications have been precomputed.
warped_coord = self._psi[num_output_dimensions + i].astype(input_dtype)
tiling_params = tf.concat(
[
batch_size, tf.constant(
1, shape=(1,)), tf.ones_like(warped_coord.shape)
],
0)
warped_coord = warped_coord.reshape((1, 1) + warped_coord.shape)
warped_coord = tf.tile(warped_coord, tiling_params)
warped_coord += self._psi[i + 2 * num_output_dimensions]
# Need to help TF figuring out shape inference since tiling information
# is held in Tensors which are not known until run time.
warped_coord.set_shape([None, 1, number_of_points])
warped_grid.append(warped_coord)
# Reshape all the warped coordinates tensors to match the specified output
# shape and concatenate into a single matrix.
grid_shape = self._output_shape + (1,)
warped_grid = [basic.BatchReshape(grid_shape)(grid) for grid in warped_grid]
return tf.concat(warped_grid, len(grid_shape))
@property
def constraints(self):
return self._constraints
def inverse(self, name=None):
"""Returns a `sonnet` module to compute inverse affine transforms.
The function first assembles a network that given the constraints of the
current AffineGridWarper and a set of input parameters, retrieves the
coefficients of the corresponding inverse affine transform, then feeds its
output into a new AffineGridWarper setup to correctly warp the `output`
space into the `source` space.
Args:
name: Name of module implementing the inverse grid transformation.
Returns:
A `sonnet` module performing the inverse affine transform of a reference
grid of points via an AffineGridWarper module.
Raises:
tf.errors.UnimplementedError: If the function is called on a non 2D
instance of AffineGridWarper.
"""
if self._num_coeff != 6:
raise tf.errors.UnimplementedError('AffineGridWarper currently supports'
'inversion only for the 2D case.')
def _affine_grid_warper_inverse(inputs):
"""Assembles network to compute inverse affine transformation.
Each `inputs` row potentailly contains [a, b, tx, c, d, ty]
corresponding to an affine matrix:
A = [a, b, tx],
[c, d, ty]
We want to generate a tensor containing the coefficients of the
corresponding inverse affine transformation in a constraints-aware
fashion.
Calling M:
M = [a, b]
[c, d]
the affine matrix for the inverse transform is:
A_in = [M^(-1), M^-1 * [-tx, -tx]^T]
where
M^(-1) = (ad - bc)^(-1) * [ d, -b]
[-c, a]
Args:
inputs: Tensor containing a batch of transformation parameters.
Returns:
A tensorflow graph performing the inverse affine transformation
parametrized by the input coefficients.
"""
batch_size = tf.expand_dims(tf.shape(inputs)[0], 0)
constant_shape = tf.concat([batch_size, tf.convert_to_tensor((1,))], 0)
index = iter(range(6))
def get_variable(constraint):
if constraint is None:
i = next(index)
return inputs[:, i:i+1]
else:
return tf.fill(constant_shape, tf.constant(constraint,
dtype=inputs.dtype))
constraints = chain.from_iterable(self.constraints)
a, b, tx, c, d, ty = (get_variable(constr) for constr in constraints)
det = a * d - b * c
a_inv = d / det
b_inv = -b / det
c_inv = -c / det
d_inv = a / det
m_inv = basic.BatchReshape(
[2, 2])(tf.concat([a_inv, b_inv, c_inv, d_inv], 1))
txy = tf.expand_dims(tf.concat([tx, ty], 1), 2)
txy_inv = basic.BatchFlatten()(tf.matmul(m_inv, txy))
tx_inv = txy_inv[:, 0:1]
ty_inv = txy_inv[:, 1:2]
inverse_gw_inputs = tf.concat(
[a_inv, b_inv, -tx_inv, c_inv, d_inv, -ty_inv], 1)
agw = AffineGridWarper(self.output_shape,
self.source_shape)
return agw(inverse_gw_inputs) # pylint: disable=not-callable
if name is None:
name = self.module_name + '_inverse'
return base.Module(_affine_grid_warper_inverse, name=name)
class AffineWarpConstraints(object):
"""Affine warp contraints class.
`AffineWarpConstraints` allow for very succinct definitions of constraints on
the values of entries in affine transform matrices.
"""
def __init__(self, constraints=((None,) * 3,) * 2):
"""Creates a constraint definition for an affine transformation.
Args:
constraints: A doubly-nested iterable of shape `[N, N+1]` defining
constraints on the entries of a matrix that represents an affine
transformation in `N` dimensions. A numeric value bakes in a constraint
on the corresponding entry in the tranformation matrix, whereas `None`
implies that the corresponding entry will be specified at run time.
Raises:
TypeError: If `constraints` is not a nested iterable.
ValueError: If the double iterable `constraints` has inconsistent
dimensions.
"""
try:
self._constraints = tuple(tuple(x) for x in constraints)
except TypeError:
raise TypeError('constraints must be a nested iterable.')
# Number of rows
self._num_dim = len(self._constraints)
expected_num_cols = self._num_dim + 1
if any(len(x) != expected_num_cols for x in self._constraints):
raise ValueError('The input list must define a Nx(N+1) matrix of '
'contraints.')
def _calc_mask(self):
"""Computes a boolean mask from the user defined constraints."""
mask = []
for row in self._constraints:
mask.append(tuple(x is None for x in row))
return tuple(mask)
def _calc_num_free_params(self):
"""Computes number of non constrained parameters."""
return sum(row.count(None) for row in self._constraints)
@property
def num_free_params(self):
return self._calc_num_free_params()
@property
def mask(self):
return self._calc_mask()
@property
def constraints(self):
return self._constraints
@property
def num_dim(self):
return self._num_dim
def __getitem__(self, i):
"""Returns the list of constraints for the i-th row of the affine matrix."""
return self._constraints[i]
def _combine(self, x, y):
"""Combines two constraints, raising an error if they are not compatible."""
if x is None or y is None:
return x or y
if x != y:
raise ValueError('Incompatible set of constraints provided.')
return x
def __and__(self, rhs):
"""Combines two sets of constraints into a coherent single set."""
return self.combine_with(rhs)
def combine_with(self, additional_constraints):
"""Combines two sets of constraints into a coherent single set."""
x = additional_constraints
if not isinstance(additional_constraints, AffineWarpConstraints):
x = AffineWarpConstraints(additional_constraints)
new_constraints = []
for left, right in zip(self._constraints, x.constraints):
new_constraints.append([self._combine(x, y) for x, y in zip(left, right)])
return AffineWarpConstraints(new_constraints)
# Collection of utlities to initialize an AffineGridWarper in 2D and 3D.
@classmethod
def no_constraints(cls, num_dim=2):
"""Empty set of constraints for a num_dim-ensional affine transform."""
return cls(((None,) * (num_dim + 1),) * num_dim)
@classmethod
def translation_2d(cls, x=None, y=None):
"""Assign contraints on translation components of affine transform in 2d."""
return cls([[None, None, x],
[None, None, y]])
@classmethod
def translation_3d(cls, x=None, y=None, z=None):
"""Assign contraints on translation components of affine transform in 3d."""
return cls([[None, None, None, x],
[None, None, None, y],
[None, None, None, z]])
@classmethod
def scale_2d(cls, x=None, y=None):
"""Assigns contraints on scaling components of affine transform in 2d."""
return cls([[x, None, None],
[None, y, None]])
@classmethod
def scale_3d(cls, x=None, y=None, z=None):
"""Assigns contraints on scaling components of affine transform in 3d."""
return cls([[x, None, None, None],
[None, y, None, None],
[None, None, z, None]])
@classmethod
def shear_2d(cls, x=None, y=None):
"""Assigns contraints on shear components of affine transform in 2d."""
return cls([[None, x, None],
[y, None, None]])
@classmethod
def no_shear_2d(cls):
return cls.shear_2d(x=0, y=0)
@classmethod
def no_shear_3d(cls):
"""Assigns contraints on shear components of affine transform in 3d."""
return cls([[None, 0, 0, None],
[0, None, 0, None],
[0, 0, None, None]])
| mumuwoyou/vnpy-master | sonnet/python/modules/spatial_transformer.py | Python | mit | 23,304 | 0.006222 |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('rerun')
class MaxReRuns(object):
"""Force a task to rerun for debugging purposes."""
schema = {'type': ['boolean', 'integer']}
def on_task_start(self, task, config):
task.max_reruns = int(config)
def on_task_input(self, task, config):
task.rerun()
@event('plugin.register')
def register_plugin():
plugin.register(MaxReRuns, 'rerun', api_ver=2, debug=True)
| qvazzler/Flexget | flexget/plugins/operate/rerun.py | Python | mit | 643 | 0 |
# -*- coding: utf-8 -*-
# author : kian-meng, ang
#
# input : a dna string at most 100 nt (NucleoTides)
# output : 20 12 17 21
#
# $ python counting_nucleotides.py
# input : a dna string at most 100 nt (NucleoTides)
# output : 20 12 17 21
f = open("rosalind_dna.txt", "r")
dna_string = f.read()
# method 1: using count()
print "%d %d %d %d" % (
dna_string.count('A'), dna_string.count('C'),
dna_string.count('G'), dna_string.count('T')
)
# method 2: using list
for char in ['A', 'C', 'G', 'T']:
print dna_string.count(char),
print ""
# method 3: generate the unique characters
# we need to strip the newline \n and sort the result.
# @see http://stackoverflow.com/a/13902829
nucleotides = ''.join(sorted(set(dna_string.strip())))
for char in nucleotides:
print dna_string.count(char),
print ""
# method 4: using collections
# @see http://codereview.stackexchange.com/a/27784
# @see https://docs.python.org/2/library/collections.html#counter-objects
# @see http://stackoverflow.com/a/17930886
from collections import Counter
nucleotides_count = sorted(Counter(dna_string.strip()).items())
for _, count in nucleotides_count:
print count,
print ""
# method 5: using collections but different approach
counter = Counter()
for char in ''.join(dna_string.strip()):
counter[char] += 1
for _, count in sorted(counter.items()):
print count,
| kianmeng/codekata | rosalind/001_dna_counting_nucleotides/counting_nucleotides.py | Python | gpl-3.0 | 1,370 | 0.00073 |
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.tasks import dashboard
class History(horizon.Panel):
name = _("History")
slug = "history"
dashboard.Tasks.register(History)
| icloudrnd/automation_tools | openstack_dashboard/dashboards/tasks/history/panel.py | Python | apache-2.0 | 243 | 0.004115 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0009_file_method'),
]
operations = [
migrations.AddField(
model_name='file',
name='time',
field=models.TextField(default=0),
preserve_default=False,
),
]
| gregoriorobles/drPencilcode | app/migrations/0010_file_time.py | Python | agpl-3.0 | 417 | 0 |
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from django.core.urlresolvers import RegexURLResolver
from django.http import Http404
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.url_routing import RouteResult
_creation_counter = 0
def route(pattern, name=None):
def decorator(view_func):
global _creation_counter
_creation_counter += 1
# Make sure page has _routablepage_routes attribute
if not hasattr(view_func, '_routablepage_routes'):
view_func._routablepage_routes = []
# Add new route to view
view_func._routablepage_routes.append((
url(pattern, view_func, name=(name or view_func.__name__)),
_creation_counter,
))
return view_func
return decorator
class RoutablePageMixin(object):
"""
This class can be mixed in to a Page model, allowing extra routes to be
added to it.
"""
@classmethod
def get_subpage_urls(cls):
routes = []
for attr in dir(cls):
val = getattr(cls, attr, None)
if hasattr(val, '_routablepage_routes'):
routes.extend(val._routablepage_routes)
return tuple([
route[0]
for route in sorted(routes, key=lambda route: route[1])
])
@classmethod
def get_resolver(cls):
if '_routablepage_urlresolver' not in cls.__dict__:
subpage_urls = cls.get_subpage_urls()
cls._routablepage_urlresolver = RegexURLResolver(r'^/', subpage_urls)
return cls._routablepage_urlresolver
def reverse_subpage(self, name, args=None, kwargs=None):
"""
This method takes a route name/arguments and returns a URL path.
"""
args = args or []
kwargs = kwargs or {}
return self.get_resolver().reverse(name, *args, **kwargs)
def resolve_subpage(self, path):
"""
This method takes a URL path and finds the view to call.
"""
view, args, kwargs = self.get_resolver().resolve(path)
# Bind the method
view = view.__get__(self, type(self))
return view, args, kwargs
def route(self, request, path_components):
"""
This hooks the subpage URLs into Wagtail's routing.
"""
if self.live:
try:
path = '/'
if path_components:
path += '/'.join(path_components) + '/'
view, args, kwargs = self.resolve_subpage(path)
return RouteResult(self, args=(view, args, kwargs))
except Http404:
pass
return super(RoutablePageMixin, self).route(request, path_components)
def serve(self, request, view=None, args=None, kwargs=None):
if args is None:
args = []
if kwargs is None:
kwargs = {}
if view is None:
return super(RoutablePageMixin, self).serve(request, *args, **kwargs)
return view(request, *args, **kwargs)
def serve_preview(self, request, mode_name):
view, args, kwargs = self.resolve_subpage('/')
request.is_preview = True
return view(request, *args, **kwargs)
class RoutablePage(RoutablePageMixin, Page):
"""
This class extends Page by adding methods which allows extra routes to be
added to it.
"""
class Meta:
abstract = True
| Toshakins/wagtail | wagtail/contrib/wagtailroutablepage/models.py | Python | bsd-3-clause | 3,477 | 0.000575 |
#
# Copyright 2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
"""
This module allows to store and retrieve key/value pairs into the etree
representation of a libvirt domain XML. Each set of key/value pairs will be
stored under one first-level child of the metadata. Example:
<metadata>
<group1>
<a>1</a>
<b>2</b>
</group1>
<group2>
<c>3</c>
<d>4</d>
</group2>
<metadata>
The key/value pairs must comply with those requirements:
- keys must be python basestrings
- values must be one of: basestring, int, float
- containers are not supported values; the metadata
namespace is flat, and you cannot nest objects.
- partial updates are forbidden. You must overwrite all the key/value
pairs in a given set (hereafter referred as 'group') at the same time.
The flow is:
1. read the metadata using this module
2. update the data you need to work with
3. send back the metadata using this module
"""
from contextlib import contextmanager
import xml.etree.ElementTree as ET
import libvirt
import six
from vdsm.common import errors
from vdsm.virt import vmxml
from vdsm.virt import xmlconstants
_DEVICE = 'device'
class Error(errors.Base):
"""
Generic metadata error
"""
class UnsupportedType(Error):
"""
Unsupported python type. Supported python types are:
* ints
* floats
* string
"""
class MissingDevice(Error):
"""
Failed to uniquely identify one device using the given attributes.
"""
class Metadata(object):
"""
Use this class to load or dump a group (see the module docstring) from
or to a metadata element.
Optionally handles the XML namespaces. You will need the namespace
handling when building XML for the VM startup; when updating the
metadata, libvirt will take care of that.
See also the docstring of the `create` function.
"""
def __init__(self, namespace=None, namespace_uri=None):
"""
:param namespace: namespace to use
:type namespace: text string
:param namespace_uri: URI of the namespace to use
:type namespace_uri: text string
"""
self._namespace = namespace
self._namespace_uri = namespace_uri
self._prefix = None
if namespace is not None:
ET.register_namespace(namespace, namespace_uri)
self._prefix = '{%s}' % self._namespace_uri
def load(self, elem):
"""
Load the content of the given metadata element `elem`
into a python object, trying to recover the correct types.
To recover the types, this function relies on the element attributes
added by the `dump` method. Without them, the function will
still load the content, but everything will be a string.
Example:
<example>
<a>some value</a>
<b type="int">1</b>
</example>
elem = vmxml.parse_xml(...)
md = Metadata()
md.load(elem) -> {'a': 'some value', 'b': 1}
:param elem: root of the ElementTree to load
:type elem: ElementTree.Element
:returns: content of the group
:rtype: dict of key/value pairs. See the module docstring for types
"""
values = {}
for child in elem:
key, val = _elem_to_keyvalue(child)
values[self._strip_ns(key)] = val
return values
def dump(self, name, **kwargs):
"""
Dump the given arguments into the `name` metadata element.
This function transparently adds the type hints as element attributes,
so `load` can restore them.
Example:
md = Metadata()
md.dump('test', bar=42) -> elem
vmxml.format_xml(elem) ->
<test>
<bar type="int">42</bar>
</test>
:param name: group to put in the metadata
:type name: text string
:param namespace: namespace to use
:type namespace: text string
:param namespace_uri: URI of the namespace to use
:type namespace_uri: text string
:return: the corresponding element
:rtype: ElementTree.Element
kwargs: stored as subelements
"""
elem = ET.Element(self._add_ns(name))
for key, value in kwargs.items():
_keyvalue_to_elem(self._add_ns(key), value, elem)
return elem
def _add_ns(self, tag):
"""
Decorate the given tag with the namespace, if used
"""
return (self._prefix or '') + tag
def _strip_ns(self, tag):
"""
Remove the namespace from the given tag
"""
return tag.replace(self._prefix, '') if self._prefix else tag
def create(name, namespace, namespace_uri, **kwargs):
"""
Create one `name` element.
Use this function to initialize one empty metadata element,
at XML creation time.
Example:
metadata.create('vm', 'ovirt-vm', 'http://ovirt.org/vm/1.0',
version=4.2) -> elem
vmxml.format_xml(elem) ->
<ovirt-vm:vm xmlns:ovirt-vm="http://ovirt.org/vm/1.0">
<ovirt-vm:version type="float">4.2</ovirt-vm:version>
</ovirt-vm:vm>
:param name: group to put in the metadata
:type name: text string
:param namespace: namespace to use
:type namespace: text string
:param namespace_uri: URI of the namespace to use
:type namespace_uri: text string
:return: the corresponding element
:rtype: ElementTree.Element
kwargs: stored as subelements
"""
# here we must add the namespaces ourselves
metadata_obj = Metadata(namespace, namespace_uri)
return metadata_obj.dump(name, **kwargs)
def from_xml(xml_str):
"""
Helper function to parse the libvirt domain metadata used by oVirt
form one domain XML. Useful in the VM creation flow, when the
libvirt Domain is not yet started.
Example:
given this XML:
test_xml ->
<?xml version="1.0" encoding="utf-8"?>
<domain type="kvm" xmlns:ovirt-vm="http://ovirt.org/vm/1.0">
<metadata>
<ovirt-vm:vm>
<ovirt-vm:version type="float">4.2</ovirt-vm:version>
<ovirt-vm:custom>
<ovirt-vm:foo>bar</ovirt-vm:foo>
</ovirt-vm:custom>
</ovirt-vm:vm>
</metadata.>
</domain>
metadata.from_xml(test_xml) ->
{
'version': 4.2,
'custom':
{
'foo': 'bar'
},
}
:param xml_str: domain XML to parse
:type name: text string
:return: the parsed metadata
:rtype: Python dict, whose keys are always strings.
No nested objects are allowed, with the only exception of
the special 'custom' key, whose value will be another
Python dictionary whose keys are strings, with no
further nesting allowed.
"""
metadata_obj = Metadata(
xmlconstants.METADATA_VM_VDSM_PREFIX,
xmlconstants.METADATA_VM_VDSM_URI
)
root = vmxml.parse_xml(xml_str)
md_elem = root.find(
'./metadata/{%s}%s' % (
xmlconstants.METADATA_VM_VDSM_URI,
xmlconstants.METADATA_VM_VDSM_ELEMENT
)
)
if md_elem is None:
return {}
md_data = metadata_obj.load(md_elem)
custom_elem = root.find(
'./metadata/{%s}%s/{%s}custom' % (
xmlconstants.METADATA_VM_VDSM_URI,
xmlconstants.METADATA_VM_VDSM_ELEMENT,
xmlconstants.METADATA_VM_VDSM_URI,
)
)
if custom_elem is not None:
md_data['custom'] = metadata_obj.load(custom_elem)
return md_data
@contextmanager
def domain(dom, name, namespace, namespace_uri):
"""
Helper context manager to simplify the get the instance of Metadata
from a libvirt Domain object.
Example:
let's start with
dom.metadata() -> <vm/>
let's run this code
with metadata.domain(dom, 'vm', 'ovirt-vm',
'http://ovirt.org/vm/1.0')
) as vm:
vm['my_awesome_key'] = some_awesome_value() # returns 42
now we will have
dom.metadata() ->
<vm>
<my_awesome_key type="int">42</my_awesome_key>
</vm>
but if you look in the domain XML (e.g. virsh dumpxml) you will
have, courtesy of libvirt:
<metadata>
<ovirt-vm:vm xmlns:ovirt-vm="http://ovirt.org/vm/1.0">
<ovirt-vm:my_awesome_key type="int">42</ovirt-vm:my_awesome_key>
</ovirt-vm:vm>
</metadata>
:param dom: domain to access
:type dom: libvirt.Domain
:param name: metadata group to access
:type name: text string
:param namespace: metadata namespace to use
:type namespace: text string
:param namespace_uri: metadata namespace URI to use
:type namespace_uri: text string
"""
with _metadata_xml(dom, name, namespace, namespace_uri) as md:
# we DO NOT want to handle namespaces ourselves; libvirt does
# it automatically for us.
metadata_obj = Metadata()
content = metadata_obj.load(md[0])
yield content
md[0] = metadata_obj.dump(name, **content)
@contextmanager
def _metadata_xml(dom, tag, namespace, namespace_uri):
md_xml = "<{tag}/>".format(tag=tag)
try:
md_xml = dom.metadata(libvirt.VIR_DOMAIN_METADATA_ELEMENT,
namespace_uri,
0)
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_NO_DOMAIN_METADATA:
raise
md_elem = [vmxml.parse_xml(md_xml)]
# we do this because we need to receive back the updated element
yield md_elem
dom.setMetadata(libvirt.VIR_DOMAIN_METADATA_ELEMENT,
vmxml.format_xml(md_elem[0]),
namespace,
namespace_uri,
0)
def _find_device(vm_elem, attrs, namespace_uri=None):
"""
Find one device in the vm metadata, matching all the given attributes.
This function expect to work with a XML structure like:
<vm>
<device id="dev0">
<foo>bar</foo>
</device>
<device addr="0xF00" class="pci">
<number type="int">42</number>
</device>
</vm>
All the attributes given in `attrs` must match.
If the device element has more attributes, they are ignored.
Return None if no match is found, but raise MissingDevice if no device
is uniquely identified using the given `attrs`.
:param vm_elem: root of the vm metadata including the device metadata
:type vm_elem: ElementTree.Element
:param attrs: attributes to match to identify the device
:type attrs: dict, each item is string both for key and value
:param namespace_uri: optional URI of the namespace on which the `device`
element resides. Use 'None' to disable the namespace support.
:type namespace_uri: text string
:return: the device element, or None if no device data found
:rtype: ElementTree.Element, or None
"""
xpath_attrs = []
for key, value in attrs.items():
xpath_attrs.append(
'[@{key}="{value}"]'.format(key=key, value=value)
)
prefix = '' if namespace_uri is None else '{%s}' % namespace_uri
devices = vm_elem.findall(
'./{}device{}'.format(prefix, ''.join(xpath_attrs))
)
if len(devices) > 1:
raise MissingDevice()
if not devices:
return None
return devices[0]
@contextmanager
def device(dom, **kwargs):
"""
Helper context manager to get the metadata of a given device
from a libvirt Domain object.
Please make sure to check the IMPORTANT WARNING below.
Example:
let's start with
dom.metadata() ->
<vm>
<device id="dev0">
<foo>bar</foo>
</device>
<device id="dev1">
<number type="int">42</number>
</device>
</vm>
let's run this code
with metadata.device(dom, 'dev0') as dev:
buzz = do_some_work(dev['foo'])
dev['fizz'] = buzz
now we will have
dom.metadata() ->
<vm>
<device id="dev0">
<foo>bar</foo>
<fizz>sometimes_buzz</fizz>
</device>
<device id="dev1">
<number type="int">42</number>
</device>
</vm>
*** IMPORTANT WARNING ***
This context manager will provide the client access only to the metadata
of one device. Once it is done, it will update only that device, leaving
metadata of the other devices, or the VM, unchanged. But under the hood,
this context manager will *rewrite all the VM metadata*.
You will need to make sure *every* usage of metadata (either per-vm or
per-device) on the same libvirt.Domain is protected by one exclusive lock.
Synchronization is intentionally not done in this module, it should be
done at the same layer who owns the libvirt.Domain object.
:param dom: domain to access
:type dom: libvirt.Domain
kwargs: attributes to match to identify the device; values are expected to
be string.
"""
with _metadata_xml(
dom,
xmlconstants.METADATA_VM_VDSM_ELEMENT,
xmlconstants.METADATA_VM_VDSM_PREFIX,
xmlconstants.METADATA_VM_VDSM_URI
) as md:
vm_elem = md[0]
attrs = kwargs
dev_elem = _find_device(vm_elem, attrs)
if dev_elem is not None:
attrs = dev_elem.attrib.copy()
dev_found = True
else:
dev_found = False
dev_elem = ET.Element(_DEVICE, **attrs)
metadata_obj = Metadata()
content = metadata_obj.load(dev_elem)
yield content
# we want to completely replace the device metadata - not update
# the existing one - to not leave garbage behind
if dev_found:
vmxml.remove_child(vm_elem, dev_elem)
dev_elem = metadata_obj.dump(_DEVICE, **content)
dev_elem.attrib.update(attrs)
vmxml.append_child(vm_elem, etree_child=dev_elem)
md[0] = vm_elem
def device_from_xml_tree(root, **kwargs):
"""
Helper function to get the metadata of a given device
from one DOM subtree, obtained from the parsed XML of a libvirt Domain.
The DOM subtree is expected to have its root at the 'metadata' element
of the libvirt domain,
Example:
Let's start with this domain_xml:
<?xml version="1.0" encoding="utf-8"?>
<domain type="kvm" xmlns:ovirt-vm="http://ovirt.org/vm/1.0">
<metadata>
<ovirt-vm:vm>
<ovirt-vm:device id="mydev">
<ovirt-vm:foo>bar</ovirt-vm:foo>
</ovirt-vm:device>
</ovirt-vm:vm>
</metadata>
</domain>
Let's run this code:
dom = vmxml.parse_xml(domain_xml)
md_elem = vmxml.find_first(dom, 'metadata')
Now we will have:
metadata.device_from_xml_tree(md_elem, id='mydev') ->
{ 'foo': 'bar' }
:param root: DOM element, corresponding to the 'metadata' element of the
Domain XML.
:type: DOM element.
:param kwargs: attributes to match to identify the device;
the values are expected to be strings, much like the
`device` context manager
:return: the parsed metadata.
:rtype: Python dict, whose keys are always strings.
No nested objects are allowed.
"""
md_elem = root.find(
'./{%s}%s' % (
xmlconstants.METADATA_VM_VDSM_URI,
xmlconstants.METADATA_VM_VDSM_ELEMENT
)
)
if md_elem is None:
return {}
dev_elem = _find_device(
md_elem, kwargs, xmlconstants.METADATA_VM_VDSM_URI
)
if dev_elem is None:
return {}
metadata_obj = Metadata(
xmlconstants.METADATA_VM_VDSM_PREFIX,
xmlconstants.METADATA_VM_VDSM_URI
)
return metadata_obj.load(dev_elem)
def _elem_to_keyvalue(elem):
key = elem.tag
value = elem.text
data_type = elem.attrib.get('type')
if data_type is not None:
if data_type == 'int':
value = int(value)
elif data_type == 'float':
value = float(value)
# elif data_type == 'str': do nothing
return key, value
def _keyvalue_to_elem(key, value, elem):
subelem = ET.SubElement(elem, key)
if isinstance(value, int):
subelem.attrib['type'] = 'int'
elif isinstance(value, float):
subelem.attrib['type'] = 'float'
elif isinstance(value, six.string_types):
pass
else:
raise UnsupportedType(value)
subelem.text = str(value)
return subelem
| EdDev/vdsm | lib/vdsm/virt/metadata.py | Python | gpl-2.0 | 17,301 | 0 |
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urlparse
import time
import kodi
import log_utils
import dom_parser
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import QUALITIES
from salts_lib.constants import Q_ORDER
from salts_lib.constants import XHR
import scraper
BASE_URL = 'http://xmovies8.tv'
PLAYER_URL = '/ajax/movie/load_player_v2'
EPISODES_URL = '/ajax/movie/load_episodes'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE, VIDEO_TYPES.SEASON, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'xmovies8.v2'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
sources = {}
if source_url and source_url != FORCE_NO_MATCH:
page_url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(page_url, cache_limit=.5)
html = self.__get_players(html, page_url)
players = list(set(re.findall("load_player\(\s*'([^']+)'\s*,\s*'?(\d+)\s*'?", html)))
player_url = urlparse.urljoin(self.base_url, PLAYER_URL)
for link_id, height in players:
params = {'id': link_id, 'quality': height, '_': int(time.time() * 1000)}
player_url2 = player_url + '?' + urllib.urlencode(params)
headers = {'Referer': page_url, 'Accept-Encoding': 'gzip, deflate', 'Server': 'cloudflare-nginx', 'Accept-Formating': 'application/json, text/javascript'}
headers.update(XHR)
html = self._http_get(player_url2, headers=headers, cache_limit=0)
js_data = scraper_utils.parse_json(html, player_url)
if 'link' in js_data and js_data['link']:
link_url = js_data['link']
if 'player_v2.php' in link_url:
headers = {'Referer': page_url}
html = self._http_get(link_url, headers=headers, allow_redirect=False, method='HEAD', cache_limit=.25)
if html.startswith('http'):
if self._get_direct_hostname(html) == 'gvideo':
quality = scraper_utils.gv_get_quality(html)
sources[html] = {'quality': quality, 'direct': True}
else:
if height != '0':
quality = scraper_utils.height_get_quality(height)
else:
quality = QUALITIES.HIGH
sources[html] = {'quality': quality, 'direct': False}
if not kodi.get_setting('scraper_url') and Q_ORDER[quality] >= Q_ORDER[QUALITIES.HD720]: break
for source in sources:
direct = sources[source]['direct']
quality = sources[source]['quality']
if direct:
host = self._get_direct_hostname(source)
else:
host = urlparse.urlparse(source).hostname
stream_url = source + '|User-Agent=%s' % (scraper_utils.get_ua())
hoster = {'multi-part': False, 'host': host, 'class': self, 'quality': quality, 'views': None, 'rating': None, 'url': stream_url, 'direct': direct}
hosters.append(hoster)
return hosters
def __get_players(self, html, page_url):
url = urlparse.urljoin(self.base_url, EPISODES_URL)
match = re.search("data\s*:\s*{\s*id:\s*(\d+),\s*episode_id:\s*(\d+),\s*link_id:\s*(\d+),\s*from:\s*'([^']+)", html)
if match:
show_id, ep_id, link_id, from_id = match.groups()
data = {'id': show_id, 'episode_id': ep_id, 'link_id': link_id, 'from': from_id}
headers = {'Referer': page_url, 'Accept-Formating': 'application/json, text/javascript', 'Server': 'cloudflare-nginx'}
headers.update(XHR)
html = self._http_get(url, data=data, headers=headers, cache_limit=1)
return html
def _get_episode_url(self, season_url, video):
season_url = urlparse.urljoin(self.base_url, season_url)
html = self._http_get(season_url, cache_limit=.5)
html = self.__get_players(html, season_url)
episode_pattern = 'href="([^"]+)[^>]+class="[^"]*btn-episode[^>]*>(?:Episode)?\s*0*%s<' % (video.episode)
match = re.search(episode_pattern, html)
if match:
return scraper_utils.pathify_url(match.group(1))
def search(self, video_type, title, year, season=''):
results = []
search_url = urlparse.urljoin(self.base_url, '/movies/search?s=%s' % urllib.quote_plus(title))
html = self._http_get(search_url, cache_limit=8)
for item in dom_parser.parse_dom(html, 'div', {'class': '[^"]*c-content-product-2[^"]*'}):
match_title_year = dom_parser.parse_dom(item, 'h2', {'class': '[^"]*c-title[^"]*'})
match_url = dom_parser.parse_dom(item, 'a', ret='href')
if match_title_year and match_url:
match_title_year = match_title_year[0]
match_url = match_url[0]
is_season = re.search('Season\s+\d+', match_title_year, re.I)
if (video_type == VIDEO_TYPES.MOVIE and not is_season) or (video_type == VIDEO_TYPES.SEASON and is_season):
match_year = ''
if video_type == VIDEO_TYPES.SEASON:
match_title = match_title_year
if season and not re.search('Season\s+(%s)\s+' % (season), match_title_year, re.I):
continue
else:
match = re.search('(.*?)\s+\((\d{4})\)', match_title_year)
if match:
match_title, match_year = match.groups()
else:
match_title = match_title_year
match_year = ''
match_url = urlparse.urljoin(match_url, 'watching.html')
if not year or not match_year or year == match_year:
result = {'url': scraper_utils.pathify_url(match_url), 'title': scraper_utils.cleanse_title(match_title), 'year': match_year}
results.append(result)
return results
| felipenaselva/repo.felipe | plugin.video.salts/scrapers/xmovies8v2_scraper.py | Python | gpl-2.0 | 7,473 | 0.007895 |
import myhdl
from myhdl import *
from bin2gray2 import bin2gray
from inc import Inc
def GrayInc(graycnt, enable, clock, reset, width):
bincnt = Signal(modbv(0)[width:])
inc_1 = Inc(bincnt, enable, clock, reset)
bin2gray_1 = bin2gray(B=bincnt, G=graycnt, width=width)
return inc_1, bin2gray_1
def GrayIncReg(graycnt, enable, clock, reset, width):
graycnt_comb = Signal(modbv(0)[width:])
gray_inc_1 = GrayInc(graycnt_comb, enable, clock, reset, width)
@always(clock.posedge)
def reg_1():
graycnt.next = graycnt_comb
return gray_inc_1, reg_1
def main():
width = 8
graycnt = Signal(modbv(0)[width:])
enable = Signal(bool())
clock = Signal(bool())
reset = ResetSignal(0, active=0, async=True)
toVerilog(GrayIncReg, graycnt, enable, clock, reset, width)
toVHDL(GrayIncReg, graycnt, enable, clock, reset, width)
if __name__ == '__main__':
main()
| cfelton/myhdl | example/manual/GrayInc.py | Python | lgpl-2.1 | 1,048 | 0.016221 |
# -*- coding: utf-8 -*-
# pinched from django-moderation.
# modified to include rather than exclude, fields
import re
import difflib
def get_changes_between_models(model1, model2, include=[]):
from django.db.models import fields
changes = {}
for field_name in include:
field = type(model1)._meta.get_field(field_name)
value2 = unicode(getattr(model2, field_name))
value1 = unicode(getattr(model1, field_name))
if value1 != value2:
changes[field.verbose_name] = (value1, value2)
return changes
def get_diff(a, b):
out = []
sequence_matcher = difflib.SequenceMatcher(None, a, b)
for opcode in sequence_matcher.get_opcodes():
operation, start_a, end_a, start_b, end_b = opcode
deleted = ''.join(a[start_a:end_a])
inserted = ''.join(b[start_b:end_b])
if operation == "replace":
out.append('<del class="diff modified">%s</del>'\
'<ins class="diff modified">%s</ins>' % (deleted,
inserted))
elif operation == "delete":
out.append('<del class="diff">%s</del>' % deleted)
elif operation == "insert":
out.append('<ins class="diff">%s</ins>' % inserted)
elif operation == "equal":
out.append(inserted)
return out
def html_diff(a, b):
"""Takes in strings a and b and returns a human-readable HTML diff."""
a, b = html_to_list(a), html_to_list(b)
diff = get_diff(a, b)
return u"".join(diff)
def html_to_list(html):
pattern = re.compile(r'&.*?;|(?:<[^<]*?>)|'\
'(?:\w[\w-]*[ ]*)|(?:<[^<]*?>)|'\
'(?:\s*[,\.\?]*)', re.UNICODE)
return [''.join(element) for element in filter(None,
pattern.findall(html))]
def generate_diff(instance1, instance2, include=[]):
from django.db.models import fields
changes = get_changes_between_models(instance1, instance2, include)
fields_diff = []
for field_name in include:
field = type(instance1)._meta.get_field(field_name)
field_changes = changes.get(field.verbose_name, None)
if field_changes:
change1, change2 = field_changes
if change1 != change2:
diff = {'verbose_name': field.verbose_name, 'diff': html_diff(change1, change2)}
fields_diff.append(diff)
return fields_diff
| ixc/glamkit-eventtools | eventtools/utils/diff.py | Python | bsd-3-clause | 2,524 | 0.004754 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "BlogforSummyChou.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Adai0808/SummyChou | BlogforSummyChou/manage.py | Python | gpl-2.0 | 259 | 0.003861 |
# views are handlers that respond to requests from web browsers or other clients
# Each view function maps to one or more request URLs
from flask import render_template, flash, redirect
from app import app
from .forms import Deck
#./run.py
@app.route('/submit', methods=('GET', 'POST'))
def submit():
form = Deck()
if form.validate_on_submit():
return redirect('/index')
return render_template('submit.html',
title='Create Card',
form=form)
@app.route('/')
@app.route('/index')
def index():
# This is displayed on client's web browser
user = {'nickname': 'Enrique Iglesias'} #fake user
decks = [
{
'title': 'GRE Words',
'cards': [
{
'word': 'combust',
'definition': 'to catch on fire'
},
{
'word': 'phaze',
'definition': 'to be affected'
}
]
},
{
'title': 'Food words',
'cards': [
{
'word': 'amuse bouche',
'definition': 'little serving'
},
{
'word': 'kimchii',
'definition': 'femented cabbage'
}
]
}
]
return render_template('index.html',
title ='Home',
user=user,
posts=decks)
| ecotg/Flash-Card-App | app/views.py | Python | mit | 1,100 | 0.048182 |
import glob
import logging
import os
from typing import Any, Dict, List, Optional
from django.conf import settings
from zerver.lib.storage import static_path
# See https://jackstromberg.com/2013/01/useraccountcontrol-attributeflag-values/
# for docs on what these values mean.
LDAP_USER_ACCOUNT_CONTROL_NORMAL = "512"
LDAP_USER_ACCOUNT_CONTROL_DISABLED = "514"
def generate_dev_ldap_dir(mode: str, num_users: int = 8) -> Dict[str, Dict[str, Any]]:
mode = mode.lower()
ldap_data = []
for i in range(1, num_users + 1):
name = f"LDAP User {i}"
email = f"ldapuser{i}@zulip.com"
phone_number = f"999999999{i}"
birthdate = f"19{i:02}-{i:02}-{i:02}"
ldap_data.append((name, email, phone_number, birthdate))
profile_images = []
for path in glob.glob(os.path.join(static_path("images/team"), "*")):
with open(path, "rb") as f:
profile_images.append(f.read())
ldap_dir = {}
for i, user_data in enumerate(ldap_data):
email = user_data[1].lower()
email_username = email.split("@")[0]
common_data = {
"cn": [user_data[0]],
"userPassword": [email_username],
"phoneNumber": [user_data[2]],
"birthDate": [user_data[3]],
}
if mode == "a":
ldap_dir["uid=" + email + ",ou=users,dc=zulip,dc=com"] = dict(
uid=[email],
thumbnailPhoto=[profile_images[i % len(profile_images)]],
userAccountControl=[LDAP_USER_ACCOUNT_CONTROL_NORMAL],
**common_data,
)
elif mode == "b":
ldap_dir["uid=" + email_username + ",ou=users,dc=zulip,dc=com"] = dict(
uid=[email_username],
jpegPhoto=[profile_images[i % len(profile_images)]],
**common_data,
)
elif mode == "c":
ldap_dir["uid=" + email_username + ",ou=users,dc=zulip,dc=com"] = dict(
uid=[email_username], email=[email], **common_data
)
return ldap_dir
def init_fakeldap(
directory: Optional[Dict[str, Dict[str, List[str]]]] = None
) -> None: # nocoverage
# We only use this in development. Importing mock inside
# this function is an import time optimization, which
# avoids the expensive import of the mock module (slow
# because its dependency pbr uses pkgresources, which is
# really slow to import.)
from unittest import mock
from fakeldap import MockLDAP
# Silent `django_auth_ldap` logger in dev mode to avoid
# spammy user not found log messages.
ldap_auth_logger = logging.getLogger("django_auth_ldap")
ldap_auth_logger.setLevel(logging.CRITICAL)
fakeldap_logger = logging.getLogger("fakeldap")
fakeldap_logger.setLevel(logging.CRITICAL)
ldap_patcher = mock.patch("django_auth_ldap.config.ldap.initialize")
mock_initialize = ldap_patcher.start()
mock_ldap = MockLDAP()
mock_initialize.return_value = mock_ldap
mock_ldap.directory = directory or generate_dev_ldap_dir(
settings.FAKE_LDAP_MODE, settings.FAKE_LDAP_NUM_USERS
)
| rht/zulip | zerver/lib/dev_ldap_directory.py | Python | apache-2.0 | 3,149 | 0.00127 |
#!/usr/bin/env python
import os
import django
from os import path
from django.conf import settings
from django.core.management import call_command
def main():
if not settings.configured:
module_root = path.dirname(path.realpath(__file__))
settings.configure(
DEBUG = False,
INSTALLED_APPS = (
'fluent_contents',
),
)
if django.VERSION >= (1,7):
django.setup()
makemessages()
def makemessages():
os.chdir('fluent_contents')
call_command('makemessages', locale=('en', 'nl'), verbosity=1)
if __name__ == '__main__':
main()
| ixc/django-fluent-contents | makemessages.py | Python | apache-2.0 | 634 | 0.012618 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import, unicode_literals
from .value_types import find_value_type, TypeBase
from six import string_types
import re
from qtpy.QtCore import *
from qtpy.QtGui import *
from qtpy.QtWidgets import *
from .default_value_dict import DefaultValues
from collections import OrderedDict
import markdown
if False:
from typing import Iterator, Dict, List, Any
__all__ = [
"PropertyModel",
"BaseItem",
"CategoryItem",
"PropertyItem",
"ValueItem",
"FlatTableModel",
]
CategoryItemType = QStandardItem.UserType + 1
PropertyItemType = CategoryItemType + 1
class PropertyModel(QStandardItemModel):
PrefixRe = re.compile(r"(^[#*-]*)\s*(.*)")
def __init__(self, parent=None):
"""
:param QWidget parent: parent widgets
"""
super(PropertyModel, self).__init__(parent)
self.setHorizontalHeaderLabels([
self.tr("Property"),
self.tr("Value")
])
self._default_dict = DefaultValues()
self._use_default = False
self.required_flag = True
def __getattr__(self, key):
return self.get(key)
def create_table_model(self, root_index, parent):
"""
Create table type model.
:param QModelIndex root_index: root index
:param QWidget parent: parent widget
:return: table type model
:rtype: FlatTableModel
"""
return FlatTableModel(self, root_index, parent)
def _load_settings(self, settings, parent_item, params_dict, default_values):
last_item = None
for setting in settings:
if isinstance(setting, dict) and setting:
key = list(setting.keys())[0]
setting_param = setting.get(key, [{}])
elif isinstance(setting, (list, tuple)):
assert last_item is not None
self._load_settings(setting, last_item, params_dict, default_values)
continue
elif isinstance(setting, string_types):
key = setting.strip()
setting_param = {}
else:
continue
if not key:
continue
g = self.PrefixRe.match(key)
category_flag = False
header_flag = False
vbox_flag = False
if g:
prefix, key = g.group(1), g.group(2)
category_flag = "#" in prefix
header_flag = "*" in prefix
vbox_flag = "-" in prefix
if category_flag:
label = setting_param.get("label", key)
last_item = self.add_category(
parent_item, key, label,
header_flag, setting_param
)
last_item.vbox_flag = vbox_flag
else:
_params_dict = params_dict.get(key, {}).copy()
_params_dict.update(setting_param)
value = setting_param.get("value")
default = setting_param.get("default")
if default is None:
default = self._get_default_value(parent_item, key, default_values)
if default is None:
default = params_dict.get(key, {}).get("default")
if header_flag:
_params_dict["required"] = True
_params_dict["require_input"] = True
last_item = self.add_property(parent_item, key, value, default, _params_dict)
@staticmethod
def _get_default_value(parent_item, key, default_values):
# hierarchy access
if parent_item and parent_item.index().isValid():
try:
d = default_values
for pkey in parent_item.tree_key():
d = d[pkey]
return d[key]
except (KeyError, TypeError):
pass
# root access
try:
return default_values[key]
except KeyError:
pass
return None
def load_settings(self, settings, params_dict=None, default_values=None):
root_item = self.invisibleRootItem()
default_values = default_values or {}
params_dict = params_dict or {}
self._load_settings(settings, root_item, params_dict, default_values)
# setup link
prop_map = self.property_map()
for key, item in prop_map.items():
if "link" not in item.params:
continue
item = prop_map[key]
item.setup_link(prop_map)
@staticmethod
def create_category(key, label=None, header_flag=False, params=None):
# type: (string_types, string_types) -> CategoryItem
return CategoryItem(key, label or key, header_flag, params)
@staticmethod
def create_property(key, value_item, value_type, params=None, label_name=None):
params = params or {}
return PropertyItem(
key,
label_name or params.get("label", key),
value_item,
value_type,
params
)
def add_category(self, parent_item, *args, **kwargs):
value_item = QStandardItem()
left_item = self.create_category(*args, **kwargs)
parent_item.appendRow([left_item, value_item])
return left_item
def add_property(self, parent_item, key, value=None, default=None, params=None, label_name=None):
parent_item = parent_item or self.invisibleRootItem()
params = params or {}
value = value if value is not None else params.get("value")
default = default if default is not None else params.get("default")
label_name = label_name or params.get("label") or key
# value type
value_type = params.get("value_type")
if isinstance(value_type, string_types):
value_type = find_value_type(value_type, params)
value_item = ValueItem(value, default, value_type)
left_item = self.create_property(key, value_item, value_type, params, label_name)
if params.get("description"):
html = self._html(params.get("description").strip(), label_name, "###")
left_item.setToolTip(html)
parent_item.appendRow([left_item, value_item])
left_item.check_enable()
return left_item
def rowItem(self, index):
# type: (QModelIndex) -> PropertyItem
index = self.index(index.row(), 0, index.parent()) if index.column() != 0 else index
item = self.itemFromIndex(index) # type: PropertyItem
return item
def _property_item(self, index):
# type: (QModelIndex) -> PropertyItem or None
if not index.isValid():
return None
item = self.itemFromIndex(self.index(index.row(), 0, index.parent()))
if item.type() == PropertyItemType:
return item
return None
def get(self, keys, root_index=QModelIndex()):
if isinstance(keys, string_types):
keys = keys.split(".")
parent = self.itemFromIndex(root_index) if root_index.isValid() else self.invisibleRootItem()
for key in keys:
for row in range(parent.rowCount()):
item = parent.child(row) # type: PropertyItem
if item.key == key:
parent = item
break
else:
return None
return parent
def set_values(self, values, root=None):
root = root or self.invisibleRootItem()
values = values or {}
for property_item in self.properties(root.index()):
value = self._get_default_value(property_item.parent(), property_item.key, values)
property_item.set_value(value)
def properties(self, root_index=None):
# type: () -> Iterator[PropertyItem]
root_index = root_index or QModelIndex()
for index in self.model_iter(root_index, False):
item = self.itemFromIndex(index)
if item and item.type() == PropertyItemType:
yield item
def headers(self, root=None):
# type: () -> Iterator[PropertyItem]
root = root or QModelIndex()
for index in self.model_iter(root, False):
item = self.itemFromIndex(index) # type: BaseItem
if item.header_flag:
yield item
def property_map(self):
# type: () -> Dict[string_types, PropertyItem]
prop_map = OrderedDict()
for item in self.properties():
prop_map[item.key] = item
return prop_map
def is_complete(self, root_index=QModelIndex()):
# type: () -> bool
for property_item in self.properties(root_index):
if not property_item.is_complete():
return False
return True
@staticmethod
def _html(markdown_str, title, title_prefix="#"):
md = """
{title_prefix} {title}
{}
""".strip().format(markdown_str, title=title, title_prefix=title_prefix)
mdo = markdown.Markdown(extensions=["gfm"])
return mdo.convert(md)
def model_iter(self, parent_index=QModelIndex(), col_iter=True):
"""
:rtype: generator(QModelIndex)
:type col_iter: bool
:type parent_index: QModelIndex
"""
index = self.index(0, 0, parent_index)
if not index.isValid():
return
while True:
if col_iter:
for col in range(0, self.columnCount(parent_index)):
yield index.siblding(index.row(), col)
else:
yield index
if self.rowCount(index) > 0:
for _ in self.model_iter(index, col_iter):
yield _
index = index.sibling(index.row() + 1, index.column())
if not index.isValid():
break
def data(self, index, role=Qt.DisplayRole):
if index.isValid() and role == Qt.ToolTipRole:
if index.column() == 1:
index = index.sibling(index.row(), 0)
return index.data(role)
return super(PropertyModel, self).data(index, role)
def setData(self, index, value, role=Qt.EditRole):
# type: (QModelIndex, Any, int) -> bool
if role == Qt.CheckStateRole:
checked = value == Qt.Checked
item = self.itemFromIndex(index)
ch_item = item.child(0, 0)
ch_item.setEnabled(checked)
# noinspection PyUnresolvedReferences
self.dataChanged.emit(ch_item.index(), ch_item.index())
elif role == Qt.EditRole:
item = self.rowItem(index)
item.set_value(value)
return True
return super(PropertyModel, self).setData(index, value, role)
def dump(self, store_none=False, flat=False, exclude_default=False):
# type: (bool, bool, bool) -> dict
dump_dict = OrderedDict()
for index in self.model_iter(col_iter=False):
item = self.itemFromIndex(index)
key = item.tree_key()
if item.is_category:
continue
if item.value is not None:
if exclude_default and item.was_default():
continue
else:
if store_none is False:
continue
dump_dict[key] = item.value
result_dict = {}
if flat:
for key, value in dump_dict.items():
result_dict[key[-1]] = value
else:
for key, value in dump_dict.items():
parent_key, key = key[:-1], key[-1]
parent_dict = result_dict
if parent_key:
for k in parent_key:
if k not in parent_dict:
parent_dict[k] = {}
parent_dict = parent_dict[k]
parent_dict[key] = value
return result_dict
class BaseItem(QStandardItem):
is_category = True
def __init__(self, key, name):
# type: (string_types, string_types) -> None
super(BaseItem, self).__init__(name)
self.key = key
self.header_flag = False
self.vbox_flag = False
def __getattr__(self, item):
for row in range(self.rowCount()):
ch_item = self.child(row, 0)
if ch_item.key == item:
return ch_item
raise AttributeError(item)
def tree_key(self):
keys = []
item = self
while item:
keys.append(item.key)
item = item.parent()
return tuple(reversed(keys))
def add_property(self, *args, **kwargs):
return self.model().add_property(self, *args, **kwargs)
def add_category(self, *args, **kwargs):
return self.model().add_category(self, *args, **kwargs)
def set_values(self, values, root=None):
return self.model().set_values(values, root or self)
def enabled(self):
if not self.isCheckable():
return True
return self.checkState() == Qt.Checked
def check_enable(self):
parent = self.parent()
while parent:
if parent.isCheckable():
self.setEnabled(parent.checkState() == Qt.Checked)
parent = parent.parent()
def setChecked(self, checked):
if checked:
self.setCheckState(Qt.Checked)
else:
self.setCheckState(Qt.Unchecked)
def update_enabled(self, parent_check, checked_parent):
if self is not checked_parent:
if parent_check != self.isEnabled():
self.setEnabled(parent_check)
if self.model():
self.model().dataChanged.emit(self.index(), self.index())
for row in range(self.rowCount()):
child = self.child(row, 0)
child.update_enabled(parent_check, checked_parent)
class CategoryItem(BaseItem):
BACKGROUND_COLOR = QColor(71, 74, 77)
FOREGROUND_COLOR = QColor(0xFF, 0xFF, 0xFF)
def type(self):
return CategoryItemType
def __init__(self, key, name, header_flag, params=None):
# type: (string_types, string_types, bool, dict) -> None
super(CategoryItem, self).__init__(key, name)
self.setBackground(QBrush(self.BACKGROUND_COLOR))
self.setForeground(self.FOREGROUND_COLOR)
self.header_flag = header_flag
font = self.font()
font.setBold(True)
self.setFont(font)
self.setFlags(self.flags() & ~(Qt.ItemIsEditable | Qt.ItemIsSelectable))
self.setEnabled(True)
params = params or {}
checkable = params.get("checkable", False)
if checkable:
checked = params.get("default", False)
self.setCheckable(True)
self.setCheckState(Qt.Checked if checked else Qt.Unchecked)
class PropertyItem(BaseItem):
is_category = False
REQUIRED_FOREGROUND_COLOR = QColor("#f8b862")
BOLD_FONT = QFont()
BOLD_FONT.setBold(True)
LinkParserRe = re.compile("{(.*?)}")
def __init__(self, key, label, value_item, value_type, params):
# type: (string_types, dict) -> None
super(PropertyItem, self).__init__(key, label)
self.value_item = value_item
self.description = params.get("description")
self.description_path = params.get("description_path", ".")
self.required = params.get("required", False)
self.require_input = params.get("require_input", False)
self.replace_space = params.get("replace_space")
value_item.allow_empty = params.get("allow_empty", True)
self.params = params
# value type
self.value_type = value_type
# item flags
self.setFlags(Qt.NoItemFlags)
self.setEnabled(True)
self.setEditable(False)
# link param
self.link = None
self._linked = []
self._links = []
# default values
self._default_flag = value_item.value is None
# reserved
self.validator = None
self.update_bg_color()
@staticmethod
def parse_link(link):
# type: (string_types) -> List[string_types]
return PropertyItem.LinkParserRe.findall(link)
def type(self):
return PropertyItemType
@property
def default(self):
return self.value_item.default
def update_bg_color(self):
if not self.is_complete():
self.setForeground(self.REQUIRED_FOREGROUND_COLOR)
self.setFont(self.BOLD_FONT)
else:
self.setData(None, Qt.ForegroundRole)
self.setFont(QFont())
def set_required(self, required):
self.required = required
def set_indent(self, indent):
# type: (int) -> None
self.setText((" " * indent) + self.text())
def set_validator(self, validator):
# type: (QValidator) -> None
self.validator = validator
def set_value(self, value, force_update=False, not_set_value=False):
# (Any) -> None
if not not_set_value:
# noinspection PyUnreachableCode
self.value_item.set_value(self.type_class().filter(value))
self._default_flag = self.value is None
for linked in self._linked:
linked.update_link()
self.update_bg_color()
def update_link(self):
# (Any) -> None
if self.link is None:
if self.model():
self.value_item.set_default_display(self.default)
self.update_bg_color()
return
# create format dict
default_value = self.value_item.default
if self.link:
d = {
"_default": default_value,
}
for item in self._links:
value = item.value if item.value else ""
if self.replace_space is not None:
value = value.replace(" ", self.replace_space)
value = value.replace(" ", self.replace_space)
d[item.key] = value
try:
cache = self.link.format(**d)
except KeyError:
cache = default_value
else:
cache = default_value
# self.type_class().set_link(link_value)
self.value_item.set_default_display(self.type_class().filter(cache))
self.update_bg_color()
@property
def value(self):
# type: () -> any
return self.value_item.value
def was_default(self):
# type: () -> bool
return self.value_item.was_default()
def setup_link(self, prop_map):
# type: (dict, string_types) -> None
link = self.params.get("link")
if not link:
return
self._links = []
keys = self.parse_link(link)
if not keys:
keys = [link]
link = "{" + link + "}"
for key in keys:
item = prop_map.get(key)
if item:
self._links.append(item)
# noinspection PyProtectedMember
item._linked.append(self)
self.link = link
self.update_link()
def is_complete(self):
# type: () -> bool
if self.required and self.model() and self.model().required_flag:
if not self.value:
return False
if not self.value_item.input_value and self.require_input:
return False
return True
def type_class(self):
return self.value_type or TypeBase
class ValueItem(QStandardItem):
DEFAULT_VALUE_FOREGROUND_COLOR = QColor(0x80, 0x80, 0x80)
def __init__(self, value, default, value_type):
super(ValueItem, self).__init__()
self._input_value = value
self._default_value = default
self._default_display = default
self.value_type = value_type
self.setFlags(self.flags() | Qt.ItemIsEditable | Qt.ItemIsSelectable)
self.allow_empty = True
self.set_value(value)
if self.value_type:
self.setSizeHint(self.value_type.sizeHint())
self.value_type.setup(self)
@property
def input_value(self):
return self._input_value
@property
def value(self):
if not (not (self._input_value is None) and not (not (self._input_value or self.allow_empty))):
return self._default_display
# noinspection PyUnreachableCode
return self._input_value
def was_input(self):
return self._input_value is None
def was_default(self):
return self._input_value is None or self._input_value == self._default_value
@property
def default(self):
return self._default_value
def set_default_display(self, value):
self._default_display = value
if not self._input_value:
self.set_value(self._input_value)
def update(self):
if not self._input_value:
self.set_value(self._input_value)
def set_value(self, value):
_value = value if value is not None else self._default_display
_value = self.value_type.filter(_value) if self.value_type else _value
_display_value = self.value_type.data(_value) if self.value_type else _value
if isinstance(_display_value, bool):
_display_value = "Yes" if _display_value else "No"
self.setText(_display_value or "")
self._input_value = value
icon = self.value_type.icon(_value) if self.value_type else None
if icon:
self.setIcon(icon)
if value is not None:
self.setData(None, Qt.ForegroundRole)
else:
self.setForeground(self.DEFAULT_VALUE_FOREGROUND_COLOR)
if self.model():
property_index = self.index().sibling(self.row(), 0)
property_item = self.model().itemFromIndex(property_index) # type: PropertyItem
if property_item:
property_item.set_value(self.value, not_set_value=True)
class FlatTableModel(QAbstractProxyModel):
"""
TreeModel -> TableModel Translate Model
"""
def __init__(self, source_model, root_index, parent=None):
# type: (PropertyModel, QModelIndex, QWidget) -> None
super(FlatTableModel, self).__init__(parent)
self._map_dict = {}
self._from_dict = {}
self.row_count = 0
for i, index in enumerate(source_model.model_iter(root_index, False)):
self._map_dict[(i, 0)] = index
self._map_dict[(i, 1)] = index.sibling(index.row(), 1)
self._from_dict[index] = self.index(i, 0)
self._from_dict[index.sibling(index.row(), 1)] = self.index(i, 1)
self.row_count += 1
self.setSourceModel(source_model)
# noinspection PyUnresolvedReferences
source_model.dataChanged.connect(self._onChanged)
def _onChanged(self, left_index, right_index):
# noinspection PyUnresolvedReferences
self.dataChanged.emit(
self.mapFromSource(left_index),
self.mapFromSource(right_index)
)
# noinspection PyMethodOverriding
def rowCount(self, index=QModelIndex()):
if index.isValid():
return 0
return self.row_count
# noinspection PyMethodOverriding
def columnCount(self, index=QModelIndex()):
return 2
# noinspection PyMethodOverriding
def index(self, row, column, parent=QModelIndex()):
return self.createIndex(row, column, parent)
def itemFromIndex(self, index):
source_index = self.mapToSource(index)
return self.sourceModel().itemFromIndex(source_index)
def mapToSource(self, index):
if not index.isValid():
return index
return self._map_dict.get((index.row(), index.column()), QModelIndex())
def rowItem(self, index):
return self.sourceModel().rowItem(self.mapToSource(index))
def mapFromSource(self, source_index):
if not source_index.isValid():
return source_index
try:
return self._from_dict[source_index]
except KeyError:
return QModelIndex()
def parent(self, index=QModelIndex()):
return QModelIndex()
def set_values(self, *args, **kwargs):
return self.sourceModel().set_values(*args, **kwargs)
def dump(self, *args, **kwargs):
return self.sourceModel().dump(*args, **kwargs)
| pashango2/sphinx-explorer | sphinx_explorer/property_widget/property_model.py | Python | mit | 24,672 | 0.000851 |
from django.test import TestCase, RequestFactory
from main.models import *
from main.views import *
from bs4 import BeautifulSoup
from .base import *
import datetime
from feedback.models import IndividualFeedback
class StatusCheckTest(TestCase):
"""Testing the decorator test functions"""
def test_user_teacher_test_works(self):
elmar = create_teacher()
self.assertTrue(is_staff(elmar.user))
self.assertTrue(is_teacher(elmar.user))
self.assertFalse(is_admin(elmar.user))
self.assertFalse(is_student(elmar.user))
def test_staff_admin_status_is_properly_undertood_at_login(self):
admin = create_admin()
self.assertTrue(is_staff(admin.user))
self.assertFalse(is_teacher(admin.user))
self.assertTrue(is_admin(admin.user))
self.assertFalse(is_student(admin.user))
def test_student_is_student_and_neither_admin_nor_teacher(self):
bugs_user = User.objects.create_user(
username='bb42', password='ilovecarrots')
bugs = Student.objects.create(
student_id='bb42',
last_name='Bunny',
first_name='Bugs',
user=bugs_user
)
self.assertTrue(is_student(bugs_user))
self.assertFalse(is_staff(bugs_user))
self.assertFalse(is_admin(bugs_user))
self.assertFalse(is_teacher(bugs_user))
class HomePageTest(TeacherUnitTest):
"""Simple tests for the home page"""
def test_home_page_renders_home_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'home.html')
def test_home_page_title_contains_uni_name(self):
response = self.client.get('/')
self.assertContains(response, 'Acme University')
class HomePageForStudentTest(StudentUnitTest):
"""Student homepage is shown"""
def test_student_home_shows_student_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'student_home.html')
def test_student_sees_links_to_all_marksheets(self):
student = self.user.student
module1 = create_module()
performance1 = Performance.objects.create(
student=student, module=module1)
assessment1 = Assessment.objects.create(
module=module1,
value=50,
title='Essay',
available=True,
resit_available=True
)
assessment2 = Assessment.objects.create(
module=module1,
value=50,
title='Exam',
available=True
)
assessment_result_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=30,
resit_mark=40,
)
feedback_1_1 = IndividualFeedback.objects.create(
assessment_result=assessment_result_1,
attempt='first',
completed=True
)
feedback_1_2 = IndividualFeedback.objects.create(
assessment_result=assessment_result_1,
attempt='resit',
completed=True
)
performance1.assessment_results.add(assessment_result_1)
link1 = (
'<a href="/export_feedback/' +
module1.code +
'/' +
str(module1.year) +
'/' +
assessment1.slug +
'/' +
student.student_id +
'/'
)
link1_1 = link1 + 'first/'
link1_2 = link1 + 'resit/'
assessment_result_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=30,
resit_mark=40,
)
feedback_2_1 = IndividualFeedback.objects.create(
assessment_result=assessment_result_2,
attempt='first',
completed=True
)
performance1.assessment_results.add(assessment_result_2)
link2_1 = (
'<a href="/export_feedback/' +
module1.code +
'/' +
str(module1.year) +
'/' +
assessment2.slug +
'/' +
student.student_id +
'/first/'
)
module2 = Module.objects.create(
title="Introduction to Squaredance",
year=1901,
code='i2sq42'
)
student.modules.add(module2)
performance2 = Performance.objects.create(
student=student, module=module2)
assessment3 = Assessment.objects.create(
module=module2,
value=50,
title='Essay',
available=False,
resit_available=False
)
assessment_result_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=30,
resit_mark=40,
)
feedback_3_1 = IndividualFeedback.objects.create(
assessment_result=assessment_result_3,
attempt='first',
completed=True
)
feedback_3_2 = IndividualFeedback.objects.create(
assessment_result=assessment_result_3,
attempt='resit',
completed=True
)
performance2.assessment_results.add(assessment_result_3)
link3 = (
'<a href="/export_feedback/' +
module2.code +
'/' +
str(module2.year) +
'/' +
assessment3.slug +
'/' +
student.student_id
)
link3_1 = link3 + '/first/'
link3_2 = link3 + '/resit/'
assessment4 = Assessment.objects.create(
module=module2,
value=50,
title='Exam',
available=False
)
assessment_result_4 = AssessmentResult.objects.create(
assessment=assessment4,
mark=30,
resit_mark=40,
)
feedback_4_1 = IndividualFeedback.objects.create(
assessment_result=assessment_result_4,
attempt='first',
completed=True
)
performance2.assessment_results.add(assessment_result_4)
link4_1 = (
'<a href="/export_feedback/' +
module2.code +
'/' +
str(module2.year) +
'/' +
assessment2.slug +
'/' +
student.student_id +
'/first/'
)
response = self.client.get('/')
self.assertContains(response, link1_1)
self.assertContains(response, link1_2)
self.assertContains(response, link2_1)
self.assertNotContains(response, link3_1)
self.assertNotContains(response, link3_2)
self.assertNotContains(response, link4_1)
class AdminDashboardTest(AdminUnitTest):
"""Checks the Admin Dashboard"""
def test_admin_page_uses_right_template(self):
response = self.client.get('/admin_dashboard/')
self.assertNotContains(response, 'Main Settings')
self.user.staff.main_admin = True
self.user.staff.save()
response = self.client.get('/admin_dashboard/')
self.assertContains(response, 'Main Settings')
def test_admin_page_shows_all_subjects_and_years_for_main_admin(self):
self.user.staff.main_admin = True
self.user.staff.save()
subject_area_1 = SubjectArea.objects.create(name='Cartoon Studies')
subject_area_2 = SubjectArea.objects.create(name='Evil Plotting')
course_1 = Course.objects.create(
title='BA in Cartoon Studies',
short_title='Cartoon Studies',
)
course_1.subject_areas.add(subject_area_1)
course_2 = Course.objects.create(
title='BA in Evil Plotting',
short_title='Evil Plotting',
)
course_2.subject_areas.add(subject_area_2)
course_3 = Course.objects.create(
title='BA in Cartoon Studies with Evil Plotting',
short_title='Cartoon Studies / Evil Plotting',
)
course_3.subject_areas.add(subject_area_1)
course_3.subject_areas.add(subject_area_2)
stuff = set_up_stuff()
student_1 = stuff[1]
student_1.course = course_1
student_1.year = 1
student_1.save()
student_2 = stuff[2]
student_2.course = course_2
student_2.year = 2
student_2.save()
student_3 = stuff[3]
student_3.course = course_3
student_3.year = 3
student_3.save()
response = self.client.get('/admin_dashboard/')
url = (
'<a href="/assign_tutors/' +
subject_area_1.slug +
'/1/">'
)
self.assertContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_1.slug +
'/2/">'
)
self.assertNotContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_1.slug +
'/3/">'
)
self.assertContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_2.slug +
'/1/">'
)
self.assertNotContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_2.slug +
'/2/">'
)
self.assertContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_2.slug +
'/3/">'
)
self.assertContains(response, url)
def test_admin_page_shows_own_subjects_and_years_for_normal_admin(self):
subject_area_1 = SubjectArea.objects.create(name='Cartoon Studies')
self.user.staff.subject_areas.add(subject_area_1)
subject_area_2 = SubjectArea.objects.create(name='Evil Plotting')
course_1 = Course.objects.create(
title='BA in Cartoon Studies',
short_title='Cartoon Studies',
)
course_1.subject_areas.add(subject_area_1)
course_2 = Course.objects.create(
title='BA in Evil Plotting',
short_title='Evil Plotting',
)
course_2.subject_areas.add(subject_area_2)
course_3 = Course.objects.create(
title='BA in Cartoon Studies with Evil Plotting',
short_title='Cartoon Studies / Evil Plotting',
)
course_3.subject_areas.add(subject_area_1)
course_3.subject_areas.add(subject_area_2)
stuff = set_up_stuff()
student_1 = stuff[1]
student_1.course = course_1
student_1.year = 1
student_1.save()
student_2 = stuff[2]
student_2.course = course_2
student_2.year = 2
student_2.save()
student_3 = stuff[3]
student_3.course = course_3
student_3.year = 3
student_3.save()
response = self.client.get('/admin_dashboard/')
url = (
'<a href="/assign_tutors/' +
subject_area_1.slug +
'/1/">'
)
self.assertContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_1.slug +
'/2/">'
)
self.assertNotContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_1.slug +
'/3/">'
)
self.assertContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_2.slug +
'/1/">'
)
self.assertNotContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_2.slug +
'/2/">'
)
self.assertNotContains(response, url)
url = (
'<a href="/assign_tutors/' +
subject_area_2.slug +
'/3/">'
)
self.assertNotContains(response, url)
class StudentViewTest(TeacherUnitTest):
"""Tests for the student view function"""
def test_student_view_renders_student_view_template(self):
student = create_student()
response = self.client.get(student.get_absolute_url())
self.assertTemplateUsed(response, 'student_view.html')
self.assertContains(response, "bb23")
self.assertContains(response, "Bunny")
self.assertContains(response, "Bugs")
class AddEditStudentTest(TeacherUnitTest):
"""Tests for the student form function"""
def send_form(self):
response = self.client.post(
'/add_student/',
data={
'student_id': 'bb23',
'last_name': 'Bünny',
'first_name': 'Bugs Middle Names'
}
)
return response
def test_add_edit_student_renders_right_template(self):
response = self.client.get('/add_student/')
self.assertTemplateUsed(response, 'student_form.html')
def test_add_student_adds_student_to_database(self):
self.send_form()
student = Student.objects.first()
self.assertEqual(student.student_id, 'bb23')
self.assertEqual(student.last_name, 'Bünny')
self.assertEqual(student.first_name, 'Bugs Middle Names')
def test_edit_student_shows_correct_data(self):
student = create_student()
response = self.client.get(student.get_edit_url())
self.assertTemplateUsed(response, 'student_form.html')
self.assertContains(response, 'Bunny')
self.assertContains(response, 'Bugs')
self.assertContains(response, 'bb23')
class InviteStudentTest(AdminUnitTest):
"""Already added students can be invited"""
def test_students_can_be_invited_users_get_created(self):
subject_area = create_subject_area()
course = Course.objects.create(
title='BA in Cartoon Studies',
short_title='BA CS',
)
course.subject_areas.add(subject_area)
course.save()
student1 = create_student()
student1.email = 'bb23@acme.edu'
student1.save()
student2 = Student.objects.create(
student_id='bb4223',
first_name='Buster Middle Names',
last_name='Bunny',
email='bb4223@acme.edu',
year=2,
course=course
)
url = '/invite_students/' + subject_area.slug + '/'
request = self.factory.post(
url,
data={
'selected_student_id': [
student1.student_id,
student2.student_id
]
}
)
request.user = self.user
invite_students(request, subject_area.slug, testing=True)
user1 = User.objects.get(username='bmnb1')
user2 = User.objects.get(username='bmnb2')
student1_out = Student.objects.get(student_id='bb23')
student2_out = Student.objects.get(first_name='Buster Middle Names')
self.assertEqual(student1_out.user, user1)
self.assertEqual(student2_out.user, user2)
def test_invitation_status_is_displayed_correctly(self):
subject_area = create_subject_area()
course = Course.objects.create(
title='BA in Cartoon Studies',
short_title='BA CS',
)
course.subject_areas.add(subject_area)
course.save()
student1 = create_student() # No email address
student2 = Student.objects.create(
student_id='bb4223',
first_name='Buster',
last_name='Bunny',
year=2,
email='bb4423@acme.edu',
course=course
)
url = '/invite_students/' + subject_area.slug + '/'
request = self.factory.post(
url,
data={'selected_student_id': [
student1.student_id, student2.student_id
]
}
)
request.user = self.user
response = invite_students(request, subject_area.slug, testing=True)
soup = BeautifulSoup(response.content)
added = str(soup.select('#students_added')[0])
not_added = str(soup.select('#students_without_email')[0])
self.assertIn(student1.name(), not_added)
self.assertIn(student2.name(), added)
class StaffResetPasswordTest(AdminUnitTest):
"""Password can be reset by staff"""
def test_staff_can_reset_password(self):
request = self.factory.get(
'/reset_password/',
data={'email': self.user.email}
)
request.user = self.user
response = reset_password(request, testing=True)
self.assertContains(response, self.user.first_name)
class StudentResetPasswordTest(NotYetLoggedInUnitTest):
def test_student_can_reset_password(self):
user = User.objects.create_user(
username='bb42', password='ilovecarrots')
student = Student.objects.create(
student_id='bb42',
last_name='Bunny',
first_name='Bugs',
user=user,
email='bb23@acme.edu'
)
request = self.factory.get(
'/reset_password/',
data={'email': student.email}
)
request.user = self.user
response = reset_password(request, testing=True)
self.assertContains(response, student.short_first_name())
class ModuleViewTest(TeacherUnitTest):
"""Tests for the module view"""
def test_module_view_renders_module_view_template(self):
module = Module.objects.create(
title="Hunting Practice",
code="hp23",
year=1900
)
response = self.client.get(module.get_absolute_url())
self.assertTemplateUsed(response, 'module_view.html')
def test_performances_in_a_module_are_shown(self):
module = Module.objects.create(
title="Hunting Practice",
code="hp23",
year=1900,
eligible="1"
)
student = Student.objects.create(
last_name="Pig",
first_name="Porky",
student_id="pp2323",
year=2
)
response = self.client.post(
module.get_add_students_url(),
data={'student_ids': [student.student_id]}
)
out_response = self.client.get(module.get_absolute_url())
self.assertContains(out_response, "Pig, Porky")
def test_only_active_students_appear_in_module_view(self):
module = create_module()
student1 = create_student()
student2 = Student.objects.create(
last_name="Pig",
first_name="Porky",
student_id="pp2323",
active=False
)
student1.modules.add(module)
performance1 = Performance.objects.create(
student=student1, module=module)
performance2 = Performance.objects.create(
student=student2, module=module)
student2.modules.add(module)
response = self.client.get(module.get_absolute_url())
self.assertContains(response, 'Bunny, Bugs')
self.assertNotContains(response, 'Pig, Porky')
def test_assessment_availability_is_shown_correctly(self):
module = create_module()
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
student=student, module=module)
assessment = Assessment.objects.create(
title="Essay",
value=100,
available=False,
marksheet_type="Something"
)
module.assessments.add(assessment)
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
'<span class="glyphicon glyphicon-eye-close">'
)
self.assertContains(
response,
'Show Essay to students'
)
assessment.available = True
assessment.save()
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
'<span class="glyphicon glyphicon-eye-open">'
)
self.assertContains(
response,
'Hide Essay from students'
)
def test_only_assessments_with_marksheet_show_availability(self):
module = create_module()
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
student=student, module=module)
assessment1 = Assessment.objects.create(
title="Essay",
value=50,
available=False,
marksheet_type="Something"
)
assessment2 = Assessment.objects.create(
title="Exam",
value=50,
available=False,
)
module.assessments.add(assessment1)
module.assessments.add(assessment2)
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
'Show Essay to students'
)
self.assertNotContains(
response,
'Show Exam to students'
)
def test_resit_menu_shows_when_required(self):
stuff = set_up_stuff()
module = stuff[0]
module.foundational = True
module.save()
student1 = stuff[1]
student1.qld = True
student1.save()
student2 = stuff[2]
student2.qld = True
student2.save()
performance1 = Performance.objects.get(
module=module, student=student1
)
performance2 = Performance.objects.get(
module=module, student=student2
)
assessment1 = Assessment.objects.create(
module=module,
title='Essay',
value=50
)
assessment2 = Assessment.objects.create(
module=module,
title='Presentation',
value=50
)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=42
)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=40
)
performance1.assessment_results.add(result1_1)
performance1.assessment_results.add(result1_2)
result2_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60,
)
result2_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=80
)
performance2.assessment_results.add(result2_1)
performance2.assessment_results.add(result2_2)
response = self.client.get(module.get_absolute_url())
resit_string = (
'<a class = "btn btn-default dropdown-toggle" data-toggle' +
'="dropdown">Resits <span class="caret"></span></a>'
)
self.assertNotContains(
response,
resit_string
)
result1_1.mark = 0
result1_1.save()
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
resit_string
)
result1_1.mark = 50
result1_1.save()
result2_1.mark = 39
result2_1.save()
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
resit_string
)
def test_two_resit_with_feedback_symbols_show(self):
stuff = set_up_stuff()
module = stuff[0]
module.foundational = True
module.save()
student1 = stuff[1]
student1.qld = True
student1.save()
performance1 = Performance.objects.get(
module=module, student=student1
)
assessment1 = Assessment.objects.create(
module=module,
title='Essay',
value=50,
marksheet_type='ESSAY',
resit_marksheet_type='ESSAY',
)
assessment2 = Assessment.objects.create(
module=module,
title='Presentation',
value=50,
marksheet_type='PRESENTATION',
resit_marksheet_type='PRESENTATION',
)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
resit_mark=80
)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=36,
resit_mark=80
)
performance1.assessment_results.add(result1_1)
performance1.assessment_results.add(result1_2)
resit_string_essay = (
'<a href="/individual_feedback/' +
stuff[0].code +
'/' +
str(stuff[0].year) +
'/' +
assessment1.slug +
'/' +
stuff[1].student_id +
'/resit/">'
)
resit_string_presentation = (
'<a href="/individual_feedback/' +
stuff[0].code +
'/' +
str(stuff[0].year) +
'/' +
assessment2.slug +
'/' +
stuff[1].student_id +
'/resit/">'
)
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
resit_string_essay
)
self.assertContains(
response,
resit_string_presentation
)
def test_two_resit_with_feedback_symbols_show_with_3_assessments(self):
stuff = set_up_stuff()
module = stuff[0]
module.save()
student1 = stuff[1]
student1.save()
performance1 = Performance.objects.get(
module=module, student=student1
)
assessment1 = Assessment.objects.create(
module=module,
title='Essay',
value=25,
marksheet_type='ESSAY',
resit_marksheet_type='ESSAY',
)
assessment2 = Assessment.objects.create(
module=module,
title='Presentation',
value=25,
marksheet_type='PRESENTATION',
resit_marksheet_type='PRESENTATION',
)
assessment3 = Assessment.objects.create(
module=module,
title='Second Essay',
value=50,
marksheet_type='ESSAY',
resit_marksheet_type='ESSAY',
)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=30,
resit_mark=80
)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=28,
resit_mark=80
)
result1_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=40,
)
performance1.assessment_results.add(result1_1)
performance1.assessment_results.add(result1_2)
performance1.assessment_results.add(result1_3)
resit_string_essay = (
'<a href="/individual_feedback/' +
stuff[0].code +
'/' +
str(stuff[0].year) +
'/' +
assessment1.slug +
'/' +
stuff[1].student_id +
'/resit/">'
)
resit_string_presentation = (
'<a href="/individual_feedback/' +
stuff[0].code +
'/' +
str(stuff[0].year) +
'/' +
assessment2.slug +
'/' +
stuff[1].student_id +
'/resit/">'
)
resit_string_second_essay = (
'<a href="/individual_feedback/' +
stuff[0].code +
'/' +
str(stuff[0].year) +
'/' +
assessment3.slug +
'/' +
stuff[1].student_id +
'/resit/">'
)
response = self.client.get(module.get_absolute_url())
self.assertContains(
response,
resit_string_essay
)
self.assertContains(
response,
resit_string_presentation
)
self.assertNotContains(
response,
resit_string_second_essay
)
class AddStudentsToModuleTest(TeacherUnitTest):
"""Tests for the function to add students to a module"""
def test_add_students_to_module_uses_right_template(self):
module = create_module()
response = self.client.get(module.get_add_students_url())
self.assertTemplateUsed(response, 'add_students_to_module.html')
def test_only_students_from_same_subject_areas_and_year_are_shown(self):
subject_area1 = create_subject_area()
subject_area2 = SubjectArea.objects.create(name="Evil Plotting")
course = Course.objects.create(title="BA in Cartoon Studies")
course.subject_areas.add(subject_area1)
course.save()
course2 = Course.objects.create(
title="BA in Evil Plotting")
course2.subject_areas.add(subject_area2)
course2.save()
module = create_module()
module.subject_areas.add(subject_area1)
module.save()
student1 = create_student()
student1.course = course
student1.year = 1
student1.save()
student2 = Student.objects.create(
last_name="Duck",
first_name="Daffy",
student_id="dd42",
course=course2,
year=1
)
student3 = Student.objects.create(
last_name="Pig",
first_name="Porky",
student_id="pp2323",
course=course,
year=2
)
student4 = Student.objects.create(
last_name="Runner",
first_name="Road",
student_id="rr42",
course=course,
year=1,
active=False
)
response = self.client.get(module.get_add_students_url())
self.assertContains(response, 'Bunny')
self.assertNotContains(response, 'Duck')
self.assertNotContains(response, 'Pig')
self.assertNotContains(response, 'Runner')
def test_submitting_an_empty_form_does_not_break_it(self):
module = create_module()
response = self.client.post(
'/add_students_to_module/%s/%s' % (module.code, module.year),
data={}
)
self.assertTrue(response.status_code in [301, 302])
class RemoveStudentFromModuleTest(TeacherUnitTest):
"""Tests for the function to remove a student from a module"""
def test_student_removed_from_module_is_not_in_module_anymore(self):
module = create_module()
student = create_student()
student.modules.add(module)
Performance.objects.create(module=module, student=student)
url = (
'/remove_student_from_module/' +
module.code +
'/' +
str(module.year) +
'/' +
student.student_id +
'/'
)
request = self.factory.get(url)
request.user = self.user
response = remove_student_from_module(
request, module.code, module.year, student.student_id)
self.assertEqual(Performance.objects.count(), 0)
self.assertEqual(student.modules.count(), 0)
def test_assessment_results_are_deleted(self):
module = create_module()
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
module=module, student=student)
assessment = Assessment.objects.create(
module=module,
title='Essay'
)
result = AssessmentResult.objects.create(assessment=assessment)
self.assertEqual(AssessmentResult.objects.count(), 1)
performance.assessment_results.add(result)
url = (
'/remove_student_from_module/' +
module.code +
'/' +
str(module.year) +
'/' +
student.student_id +
'/'
)
response = self.client.get(url)
self.assertEqual(AssessmentResult.objects.count(), 0)
def test_feedback_gets_deleted(self):
module = create_module()
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
module=module, student=student)
assessment = Assessment.objects.create(
module=module,
title='Essay'
)
result = AssessmentResult.objects.create(assessment=assessment)
performance.assessment_results.add(result)
feedback = IndividualFeedback.objects.create(
assessment_result=result,
attempt='first'
)
url = (
'/remove_student_from_module/' +
module.code +
'/' +
str(module.year) +
'/' +
student.student_id +
'/'
)
response = self.client.get(url)
self.assertEqual(IndividualFeedback.objects.count(), 0)
class DeleteModuleTest(TeacherUnitTest):
"""Tests that the Delete Module Function removes performances and marks"""
def test_deleting_module_deletes_everything_else(self):
module = create_module()
module.teachers.add(self.user.staff)
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
module=module, student=student)
assessment = Assessment.objects.create(
module=module,
title="Dissertation",
value=100,
)
result = AssessmentResult.objects.create(
assessment=assessment,
mark=60
)
performance.assessment_results.add(result)
response = self.client.get(module.get_delete_self_url())
self.assertEqual(Module.objects.count(), 0)
self.assertEqual(Student.objects.count(), 1)
self.assertEqual(Performance.objects.count(), 0)
self.assertEqual(Assessment.objects.count(), 0)
self.assertEqual(AssessmentResult.objects.count(), 0)
def test_only_instructor_or_admin_can_delete_a_module(self):
module = create_module()
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
module=module, student=student)
assessment = Assessment.objects.create(
module=module,
title="Dissertation",
value=100,
)
result = AssessmentResult.objects.create(
assessment=assessment,
mark=60
)
performance.assessment_results.add(result)
response = self.client.get(module.get_delete_self_url())
self.assertEqual(Module.objects.count(), 1)
self.assertEqual(Student.objects.count(), 1)
self.assertEqual(Performance.objects.count(), 1)
self.assertEqual(Assessment.objects.count(), 1)
self.assertEqual(AssessmentResult.objects.count(), 1)
class SeminarGroupTest(TeacherUnitTest):
"""Tests involving the seminar group setup"""
def test_seminar_groups_can_be_saved(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
request = self.factory.post(
module.get_seminar_groups_url(),
data={
'action': 'Save students',
student1.student_id: '1',
student2.student_id: '2',
student3.student_id: '1'
}
)
request.user = self.user
response = assign_seminar_groups(request, module.code, module.year)
performance1 = Performance.objects.get(student=student1, module=module)
performance2 = Performance.objects.get(student=student2, module=module)
performance3 = Performance.objects.get(student=student3, module=module)
self.assertEqual(performance1.seminar_group, 1)
self.assertEqual(performance2.seminar_group, 2)
self.assertEqual(performance3.seminar_group, 1)
def test_seminar_groups_can_be_randomized_ignoring_previous_values(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student5 = stuff[5]
request = self.factory.post(
module.get_seminar_groups_url(),
data={
'action': 'Go',
'ignore': True,
'number_of_groups': '3'
}
)
request.user = self.user
response = assign_seminar_groups(request, module.code, module.year)
performance1 = Performance.objects.get(student=student1, module=module)
performance2 = Performance.objects.get(student=student2, module=module)
performance3 = Performance.objects.get(student=student3, module=module)
performance4 = Performance.objects.get(student=student4, module=module)
performance5 = Performance.objects.get(student=student5, module=module)
self.assertNotEqual(performance1.seminar_group, None)
self.assertNotEqual(performance2.seminar_group, None)
self.assertNotEqual(performance3.seminar_group, None)
self.assertNotEqual(performance4.seminar_group, None)
self.assertNotEqual(performance5.seminar_group, None)
list_of_seminar_groups = []
list_of_seminar_groups.append(performance1.seminar_group)
list_of_seminar_groups.append(performance2.seminar_group)
list_of_seminar_groups.append(performance3.seminar_group)
list_of_seminar_groups.append(performance4.seminar_group)
list_of_seminar_groups.append(performance5.seminar_group)
self.assertTrue(1 in list_of_seminar_groups)
self.assertTrue(2 in list_of_seminar_groups)
self.assertTrue(3 in list_of_seminar_groups)
def test_seminar_groups_can_be_randomized_leaving_previous_values(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
performance1 = Performance.objects.get(student=student1, module=module)
performance1.seminar_group = 1
performance1.save()
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student5 = stuff[5]
request = self.factory.post(
module.get_seminar_groups_url(),
data={
student2.student_id: '2',
'action': 'Go',
'number_of_groups': '3'
}
)
request.user = self.user
response = assign_seminar_groups(request, module.code, module.year)
performance1 = Performance.objects.get(student=student1, module=module)
performance2 = Performance.objects.get(student=student2, module=module)
performance3 = Performance.objects.get(student=student3, module=module)
performance4 = Performance.objects.get(student=student4, module=module)
performance5 = Performance.objects.get(student=student5, module=module)
self.assertEqual(performance1.seminar_group, 1)
self.assertEqual(performance2.seminar_group, 2)
self.assertNotEqual(performance3.seminar_group, None)
self.assertNotEqual(performance4.seminar_group, None)
self.assertNotEqual(performance5.seminar_group, None)
def test_seminar_group_overview_uses_correct_template(self):
module = create_module()
response = self.client.get(module.get_seminar_group_overview_url())
self.assertTemplateUsed(response, 'seminar_group_overview.html')
def test_seminar_group_overview_is_correct(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student5 = stuff[5]
performance1 = Performance.objects.get(student=student1, module=module)
performance1.seminar_group = 1
performance1.save()
performance2 = Performance.objects.get(student=student2, module=module)
performance2.seminar_group = 2
performance2.save()
performance3 = Performance.objects.get(student=student3, module=module)
performance3.seminar_group = 1
performance3.save()
performance4 = Performance.objects.get(student=student4, module=module)
performance4.seminar_group = 2
performance4.save()
performance5 = Performance.objects.get(student=student5, module=module)
performance5.seminar_group = 1
performance5.save()
request = self.factory.get(module.get_seminar_group_overview_url())
request.user = self.user
response = seminar_group_overview(request, module.code, module.year)
soup = BeautifulSoup(response.content)
group_1 = str(soup.select('#group_1')[0])
group_2 = str(soup.select('#group_2')[0])
self.assertIn(student1.short_name(), group_1)
self.assertIn(student2.short_name(), group_2)
self.assertIn(student3.short_name(), group_1)
self.assertIn(student4.short_name(), group_2)
self.assertIn(student5.short_name(), group_1)
class AssessmentTest(TeacherUnitTest):
"""Tests involving setting and deleting of assessments"""
def test_assessments_page_uses_right_template(self):
module = set_up_stuff()[0]
response = self.client.get(module.get_assessment_url())
self.assertTemplateUsed(response, 'assessment.html')
def test_assessments_can_be_added_to_module(self):
module = set_up_stuff()[0]
request = self.factory.post(
module.get_assessment_url(),
data={
'title': 'Hunting Exercise',
'value': 40,
}
)
request.user = self.user
assessment(request, module.code, module.year)
assessment_out = Assessment.objects.first()
self.assertEqual(assessment_out.title, 'Hunting Exercise')
self.assertEqual(assessment_out.value, 40)
def test_assessment_can_be_deleted(self):
stuff = set_up_stuff()
module = stuff[0]
performance = Performance.objects.first()
assessment = Assessment.objects.create(
module=module,
title="Hunting Exercise",
value=40
)
result = AssessmentResult.objects.create(
assessment=assessment,
mark=40
)
performance.assessment_results.add(result)
self.assertEqual(Assessment.objects.count(), 1)
self.assertEqual(AssessmentResult.objects.count(), 1)
request = self.factory.get(assessment.get_delete_url())
request.user = self.user
delete_assessment(request, module.code, module.year, assessment.slug)
self.assertEqual(Assessment.objects.count(), 0)
self.assertEqual(AssessmentResult.objects.count(), 0)
def test_toggle_assessment_availability_works(self):
module = create_module()
assessment = Assessment.objects.create(
module=module,
title='Hunting Exercise',
value=100
)
self.assertFalse(assessment.available)
request = self.factory.get(assessment.get_toggle_availability_url())
request.user = self.user
response = toggle_assessment_availability(
request, module.code, module.year, assessment.slug, 'first')
assessment_out = Assessment.objects.first()
self.assertTrue(assessment_out.available)
request = self.factory.get(assessment.get_toggle_availability_url())
request.user = self.user
response = toggle_assessment_availability(
request, module.code, module.year, assessment.slug, 'first')
assessment_out = Assessment.objects.first()
self.assertFalse(assessment_out.available)
request = self.factory.get(
assessment.get_toggle_availability_url('resit'))
request.user = self.user
response = toggle_assessment_availability(
request, module.code, module.year, assessment.slug, 'resit')
assessment_out = Assessment.objects.first()
self.assertTrue(assessment_out.resit_available)
request = self.factory.get(
assessment.get_toggle_availability_url('resit'))
request.user = self.user
response = toggle_assessment_availability(
request, module.code, module.year, assessment.slug, 'resit')
assessment_out = Assessment.objects.first()
self.assertFalse(assessment_out.resit_available)
request = self.factory.get(
assessment.get_toggle_availability_url('second_resit'))
request.user = self.user
response = toggle_assessment_availability(
request, module.code, module.year, assessment.slug, 'second_resit')
assessment_out = Assessment.objects.first()
self.assertTrue(assessment_out.second_resit_available)
request = self.factory.get(
assessment.get_toggle_availability_url('second_resit'))
request.user = self.user
response = toggle_assessment_availability(
request, module.code, module.year, assessment.slug, 'second_resit')
assessment_out = Assessment.objects.first()
self.assertFalse(assessment_out.second_resit_available)
class AttendanceTest(TeacherUnitTest):
"""Tests around the attendance function"""
def test_attendance_uses_correct_template(self):
module = set_up_stuff()[0]
response = self.client.get(module.get_attendance_url('all'))
self.assertTemplateUsed(response, 'attendance.html')
def test_attendance_form_shows_seminar_group(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student5 = stuff[5]
performance1 = Performance.objects.get(student=student1, module=module)
performance2 = Performance.objects.get(student=student2, module=module)
performance3 = Performance.objects.get(student=student3, module=module)
performance4 = Performance.objects.get(student=student4, module=module)
performance5 = Performance.objects.get(student=student5, module=module)
performance1.seminar_group = 1
performance1.save()
performance2.seminar_group = 1
performance2.save()
performance3.seminar_group = 1
performance3.save()
performance4.seminar_group = 2
performance4.save()
performance5.seminar_group = 2
performance5.save()
request = self.factory.get(module.get_attendance_url(1))
request.user = self.user
response = attendance(request, module.code, module.year, '1')
self.assertContains(response, student1.last_name)
self.assertContains(response, student2.last_name)
self.assertContains(response, student3.last_name)
self.assertNotContains(response, student4.last_name)
self.assertNotContains(response, student5.last_name)
request = self.factory.get(module.get_attendance_url(2))
request.user = self.user
response = attendance(request, module.code, module.year, '2')
self.assertNotContains(response, student1.last_name)
self.assertNotContains(response, student2.last_name)
self.assertNotContains(response, student3.last_name)
self.assertContains(response, student4.last_name)
self.assertContains(response, student5.last_name)
request = self.factory.get(module.get_attendance_url('all'))
request.user = self.user
response = attendance(request, module.code, module.year, 'all')
self.assertContains(response, student1.last_name)
self.assertContains(response, student2.last_name)
self.assertContains(response, student3.last_name)
self.assertContains(response, student4.last_name)
self.assertContains(response, student5.last_name)
def test_attendance_form_shows_only_active_students(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student5 = stuff[5]
student5.active = False
student5.save()
performance1 = Performance.objects.get(student=student1, module=module)
performance2 = Performance.objects.get(student=student2, module=module)
performance3 = Performance.objects.get(student=student3, module=module)
performance4 = Performance.objects.get(student=student4, module=module)
performance5 = Performance.objects.get(student=student5, module=module)
performance1.seminar_group = 1
performance1.save()
performance2.seminar_group = 1
performance2.save()
performance3.seminar_group = 1
performance3.save()
performance4.seminar_group = 1
performance4.save()
performance5.seminar_group = 1
performance5.save()
request = self.factory.get(module.get_attendance_url(1))
request.user = self.user
response = attendance(request, module.code, module.year, '1')
self.assertContains(response, student1.last_name)
self.assertContains(response, student2.last_name)
self.assertContains(response, student3.last_name)
self.assertContains(response, student4.last_name)
self.assertNotContains(response, student5.last_name)
def test_attendance_can_be_added_through_form(self):
stuff = set_up_stuff()
module = stuff[0]
request = self.factory.post(
module.get_attendance_url('all'),
data={
'bb23_1': 'p',
'bb23_2': 'a',
'bb23_3': 'e',
'dd42_1': 'p',
'dd42_3': 'a',
'save': 'Save Changes for all weeks'
}
)
request.user = self.user
response = attendance(request, module.code, module.year, 'all')
student1_out = Student.objects.get(student_id='bb23')
performance1_out = Performance.objects.get(
student=student1_out, module=module)
student2_out = Student.objects.get(student_id='dd42')
performance2_out = Performance.objects.get(
student=student2_out, module=module)
self.assertEqual(performance1_out.attendance_for(1), 'p')
self.assertEqual(performance1_out.attendance_for(2), 'a')
self.assertEqual(performance1_out.attendance_for(3), 'e')
self.assertEqual(performance2_out.attendance_for(1), 'p')
self.assertEqual(performance2_out.attendance_for(2), None)
self.assertEqual(performance2_out.attendance_for(3), 'a')
def test_attendance_changes_are_ignored_for_hidden_weeks(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
performance1 = Performance.objects.get(student=student1, module=module)
performance1.save_attendance('1', 'e')
request = self.factory.post(
module.get_attendance_url('all'),
data={
'bb23_1': 'p',
'bb23_2': 'a',
'bb23_3': 'e',
'dd42_1': 'p',
'dd42_3': 'a',
'save': 'Save Changes for Week 2'
}
)
request.user = self.user
attendance(request, module.code, module.year, 'all')
student1_out = Student.objects.get(student_id='bb23')
performance1_out = Performance.objects.get(
student=student1_out, module=module)
student2_out = Student.objects.get(student_id='dd42')
performance2_out = Performance.objects.get(
student=student2_out, module=module)
self.assertEqual(performance1_out.attendance_for(1), 'e')
self.assertEqual(performance1_out.attendance_for(2), 'a')
self.assertEqual(performance1_out.attendance_for(3), None)
self.assertEqual(performance2_out.attendance_for(1), None)
self.assertEqual(performance2_out.attendance_for(2), None)
self.assertEqual(performance2_out.attendance_for(3), None)
class MarkAllAssessmentsTest(TeacherUnitTest):
"""Testing the function to mark all for one assessment openly."""
def test_mark_all_template_is_used(self):
stuff = set_up_stuff()
module = stuff[0]
student = stuff[1]
assessment = Assessment.objects.create(
module=module, title="Essay", value=100)
response = self.client.get(assessment.get_mark_all_url())
self.assertTemplateUsed(response, 'mark_all.html')
def test_all_students_are_shown_in_mark_all_page(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
other_student = Student.objects.create(
first_name="Road",
last_name="Runner",
student_id="rr42"
)
assessment = Assessment.objects.create(
module=module, title="Essay", value=100)
request = self.factory.get(assessment.get_mark_all_url())
request.user = self.user
response = mark_all(
request,
module.code,
module.year,
'essay',
'first'
)
self.assertContains(response, student1.name())
self.assertContains(response, student2.name())
self.assertContains(response, student3.name())
self.assertNotContains(response, other_student.name())
def test_only_students_who_need_resit_show_in_mark_all_resit_page(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
assessment1 = Assessment.objects.create(
module=module, title="Essay", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Exam", value=50)
performance1 = Performance.objects.get(
module=module,
student=student1
)
performance2 = Performance.objects.get(
module=module,
student=student2
)
performance3 = Performance.objects.get(
module=module,
student=student3
)
performance4 = Performance.objects.get(
module=module,
student=student4
)
result_1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60
)
performance1.assessment_results.add(result_1_1)
result_1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance1.assessment_results.add(result_1_2)
# Student 1 clearly passed and should not be in either
result_2_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=30
)
performance2.assessment_results.add(result_2_1)
result_2_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=30
)
performance2.assessment_results.add(result_2_2)
# Student 2 clearly failed and should be in both
result_3_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=35
)
performance3.assessment_results.add(result_3_1)
result_3_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=40
)
performance3.assessment_results.add(result_3_2)
# Student 3 failed (not so clearly) and should be in 1 only
request = self.factory.get(
assessment1.get_mark_all_url(attempt='resit')
)
result_4_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60,
concessions='G'
)
performance4.assessment_results.add(result_4_1)
result_4_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance4.assessment_results.add(result_4_2)
# Student 4 has concessions for the passed essay and should be in 1
request.user = self.user
response1 = mark_all(
request,
module.code,
module.year,
'essay',
'resit'
)
self.assertNotContains(response1, student1.name())
self.assertContains(response1, student2.name())
self.assertContains(response1, student3.name())
self.assertContains(response1, student4.name())
request = self.factory.get(
assessment2.get_mark_all_url(attempt='resit')
)
request.user = self.user
response2 = mark_all(
request,
module.code,
module.year,
'exam',
'resit'
)
self.assertNotContains(response2, student1.name())
self.assertContains(response2, student2.name())
self.assertNotContains(response2, student3.name())
self.assertNotContains(response2, student4.name())
def test_existing_results_show_up_in_mark_all_page(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
assessment1 = Assessment.objects.create(
module=module, title="Essay 1", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Essay 2", value=50)
performance1 = Performance.objects.get(
module=module, student=student1)
ar1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=50
)
ar1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance1.assessment_results.add(ar1_1)
performance1.assessment_results.add(ar1_2)
request = self.factory.get(assessment1.get_mark_all_url())
request.user = self.user
response = mark_all(
request,
module.code,
module.year,
'essay-1',
'first'
)
self.assertContains(response, 60)
html = (
'<input class="form-control assessment_mark" type="number" ' +
'min="0" max="100" id="essay-1_' +
student1.student_id +
'" name="mark_' +
student1.student_id +
'" type="number" value="50" /><small>Previously: 50</small>'
)
self.assertContains(response, html)
def test_marks_can_be_saved_with_existing_ar_objects(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
assessment1 = Assessment.objects.create(
module=module, title="Essay 1", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Essay 2", value=50)
result1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=50
)
performance1 = Performance.objects.get(module=module, student=student1)
performance1.assessment_results.add(result1)
result2 = AssessmentResult.objects.create(assessment=assessment1)
performance2 = Performance.objects.get(module=module, student=student2)
performance2.assessment_results.add(result2)
id1 = 'mark_' + student1.student_id
id2 = 'mark_' + student2.student_id
request = self.factory.post(
assessment1.get_mark_all_url(),
data={
id1: '20',
id2: '40'
}
)
request.user = self.user
response = mark_all(
request,
module.code,
module.year,
'essay-1',
'first'
)
performance1_out = Performance.objects.get(
module=module, student=student1)
self.assertEqual(
performance1_out.get_assessment_result('essay-1', 'first'),
20
)
performance2_out = Performance.objects.get(
module=module, student=student2)
self.assertEqual(
performance2_out.get_assessment_result('essay-1', 'first'),
40
)
def test_marks_can_be_saved_without_existing_ar_objects(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
assessment1 = Assessment.objects.create(
module=module, title="Essay 1", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Essay 2", value=50)
id1 = 'mark_' + student1.student_id
id2 = 'mark_' + student2.student_id
request = self.factory.post(
assessment1.get_mark_all_url(),
data={
id1: '20',
id2: '40'
}
)
request.user = self.user
response = mark_all(
request,
module.code,
module.year,
'essay-1',
'first'
)
performance1_out = Performance.objects.get(
module=module, student=student1)
self.assertEqual(
performance1_out.get_assessment_result('essay-1', 'first'),
20
)
performance2_out = Performance.objects.get(
module=module, student=student2)
self.assertEqual(
performance2_out.get_assessment_result('essay-1', 'first'),
40
)
class MarkAllAssessmentsAnonymouslyTest(TeacherUnitTest):
"""Testing the function to mark all for one assessment anonymously."""
def test_only_exam_ids_are_shown_if_anonymous_is_set(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student1.exam_id = '1234'
student1.save()
student2 = stuff[2]
student2.exam_id = '2345'
student2.save()
student3 = stuff[3]
student3.exam_id = '3456'
student3.save()
assessment = Assessment.objects.create(
module=module, title="Essay", value=100)
request = self.factory.get(assessment.get_mark_all_url(anonymous=True))
request.user = self.user
response = mark_all_anonymously(
request,
module.code,
module.year,
'essay',
'first',
)
self.assertContains(response, student1.exam_id)
self.assertContains(response, student2.exam_id)
self.assertContains(response, student3.exam_id)
self.assertNotContains(response, student1.first_name)
self.assertNotContains(response, student1.last_name)
self.assertNotContains(response, student1.student_id)
self.assertNotContains(response, student2.first_name)
self.assertNotContains(response, student2.last_name)
self.assertNotContains(response, student2.student_id)
self.assertNotContains(response, student3.first_name)
self.assertNotContains(response, student3.last_name)
self.assertNotContains(response, student3.student_id)
def test_anonymous_marks_can_be_saved_with_existing_ar_objects(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student1.exam_id = '1234'
student1.save()
student2 = stuff[2]
student2.exam_id = '2345'
student2.save()
assessment1 = Assessment.objects.create(
module=module, title="Essay 1", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Essay 2", value=50)
result1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=50
)
performance1 = Performance.objects.get(module=module, student=student1)
performance1.assessment_results.add(result1)
result2 = AssessmentResult.objects.create(assessment=assessment1)
performance2 = Performance.objects.get(module=module, student=student2)
performance2.assessment_results.add(result2)
id1 = 'mark_' + student1.exam_id
id2 = 'mark_' + student2.exam_id
request = self.factory.post(
assessment1.get_mark_all_url(anonymous=True),
data={
id1: '20',
id2: '40'
}
)
request.user = self.user
response = mark_all_anonymously(
request,
module.code,
module.year,
'essay-1',
'first'
)
performance1_out = Performance.objects.get(
module=module, student=student1)
self.assertEqual(
performance1_out.get_assessment_result('essay-1', 'first'),
20
)
performance2_out = Performance.objects.get(
module=module, student=student2)
self.assertEqual(
performance2_out.get_assessment_result('essay-1', 'first'),
40
)
def test_anonymous_marks_can_be_saved_without_existing_ar_objects(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student1.exam_id = '1234'
student1.save()
student2 = stuff[2]
student2.exam_id = '2345'
student2.save()
assessment1 = Assessment.objects.create(
module=module, title="Essay 1", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Essay 2", value=50)
id1 = 'mark_' + student1.exam_id
id2 = 'mark_' + student2.exam_id
request = self.factory.post(
assessment1.get_mark_all_url(anonymous=True),
data={
id1: '20',
id2: '40'
}
)
request.user = self.user
response = mark_all_anonymously(
request,
module.code,
module.year,
'essay-1',
'first'
)
performance1_out = Performance.objects.get(
module=module, student=student1)
self.assertEqual(
performance1_out.get_assessment_result('essay-1', 'first'),
20
)
performance2_out = Performance.objects.get(
module=module, student=student2)
self.assertEqual(
performance2_out.get_assessment_result('essay-1', 'first'),
40
)
def test_only_students_who_need_resit_show_in_mark_all_resit_a_page(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student1.exam_id = '1234'
student1.save()
student2 = stuff[2]
student2.exam_id = '2345'
student2.save()
student3 = stuff[3]
student3.exam_id = '3456'
student3.save()
student4 = stuff[4]
student4.exam_id = '4567'
student4.save()
assessment1 = Assessment.objects.create(
module=module, title="Essay", value=50)
assessment2 = Assessment.objects.create(
module=module, title="Exam", value=50)
performance1 = Performance.objects.get(
module=module,
student=student1
)
performance2 = Performance.objects.get(
module=module,
student=student2
)
performance3 = Performance.objects.get(
module=module,
student=student3
)
performance4 = Performance.objects.get(
module=module,
student=student4
)
result_1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60
)
performance1.assessment_results.add(result_1_1)
result_1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance1.assessment_results.add(result_1_2)
# Student 1 clearly passed and should not be in either
result_2_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=30
)
performance2.assessment_results.add(result_2_1)
result_2_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=30
)
performance2.assessment_results.add(result_2_2)
# Student 2 clearly failed and should be in both
result_3_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=35
)
performance3.assessment_results.add(result_3_1)
result_3_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=40
)
performance3.assessment_results.add(result_3_2)
# Student 3 failed (not so clearly) and should be in 1 only
request = self.factory.get(
assessment1.get_mark_all_url(attempt='resit')
)
result_4_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60,
concessions='G'
)
performance4.assessment_results.add(result_4_1)
result_4_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance4.assessment_results.add(result_4_2)
# Student 4 has concessions for the passed essay and should be in 1
request.user = self.user
request = self.factory.get(
assessment1.get_mark_all_url(
anonymous=True,
attempt='resit'
)
)
request.user = self.user
response1 = mark_all_anonymously(
request,
module.code,
module.year,
'essay',
'resit',
)
self.assertNotContains(response1, student1.exam_id)
self.assertContains(response1, student2.exam_id)
self.assertContains(response1, student3.exam_id)
self.assertContains(response1, student4.exam_id)
request = self.factory.get(
assessment2.get_mark_all_url(
anonymous=True,
attempt='resit'
)
)
request.user = self.user
response2 = mark_all_anonymously(
request,
module.code,
module.year,
'exam',
'resit'
)
self.assertNotContains(response2, student1.exam_id)
self.assertContains(response2, student2.exam_id)
self.assertNotContains(response2, student3.exam_id)
self.assertNotContains(response2, student4.exam_id)
class AddEditStaffTest(AdminUnitTest):
"""Tests for adding and adding a new staff member"""
def test_staff_can_be_added_new_user_gets_created(self):
subject_area = SubjectArea.objects.create(name='Cartoon Studies')
request = self.factory.post('/add_staff/', data={
'first_name': 'Elmar',
'last_name': 'Fudd',
'email': 'elmar.fudd@acme.edu',
'role': 'teacher'
})
request.user = self.user
add_or_edit_staff(request, testing=True)
user = User.objects.get(last_name='Fudd')
staff = Staff.objects.get(user=user)
self.assertEqual(user.staff, staff)
self.assertEqual(user.first_name, 'Elmar')
self.assertEqual(user.email, 'elmar.fudd@acme.edu')
self.assertEqual(staff.role, 'teacher')
def test_form_for_existing_staff_shows_right_details(self):
user_in = create_user()
subject_area = SubjectArea.objects.create(name='Cartoon Studies')
staff_in = Staff.objects.create(user=user_in, role='teacher')
staff_in.subject_areas.add(subject_area)
staff_in.save()
request = self.factory.get(staff_in.get_edit_url())
request.user = self.user
response = add_or_edit_staff(request, user_in.username)
soup = BeautifulSoup(response.content)
first_name = str(soup.select('#id_first_name')[0]['value'])
self.assertEqual(first_name, 'Elmar')
last_name = str(soup.select('#id_last_name')[0]['value'])
self.assertEqual(last_name, 'Fudd')
last_name = str(soup.select('#id_email')[0]['value'])
self.assertEqual(last_name, 'e.fudd@acme.edu')
teacher_option = str(soup.find(value='teacher'))
self.assertTrue('selected="selected"' in teacher_option)
def test_staff_member_can_be_edited(self):
user_in = User.objects.create_user(
'ef10', 'e.fudd@acme.edu', 'rabbitseason')
user_in.last_name = 'Fadd'
user_in.first_name = 'Elmar'
user_in.save()
subject_area = SubjectArea.objects.create(name='Cartoon Studies')
staff_in = Staff.objects.create(user=user_in, role='teacher')
staff_in.subject_areas.add(subject_area)
staff_in.save()
request = self.factory.post(staff_in.get_edit_url(), data={
'first_name': 'Elmar',
'last_name': 'Fudd',
'email': 'elmar.fudd@acme.edu',
'role': 'admin'
})
request.user = self.user
add_or_edit_staff(request, user_in.username, testing=True)
staff_out = Staff.objects.get(user=user_in)
self.assertEqual(staff_out.user.last_name, 'Fudd')
self.assertEqual(staff_out.role, 'admin')
class ViewStaffTest(AdminUnitTest):
"""Tests for Viewing Staff Members"""
def test_staff_view_by_subject_uses_correct_template(self):
response = self.client.get('/view_staff_by_subject/')
self.assertTemplateUsed(response, 'all_staff_by_subject.html')
def test_staff_view_by_subject_contains_staff(self):
subject_area_1 = create_subject_area()
subject_area_2 = SubjectArea.objects.create(name='Evil Plotting')
staff1 = create_teacher()
staff1.subject_areas.add(subject_area_1)
staff1.save()
user2 = User.objects.create_user(
'ys142', 'y.sam@acme.edu', 'squaredance')
user2.last_name = 'Sam'
user2.first_name = 'Yosemite'
user2.save()
staff2 = Staff.objects.create(user=user2, role='Teacher')
staff2.subject_areas.add(subject_area_1)
staff2.subject_areas.add(subject_area_2)
staff2.save()
user3 = User.objects.create_user(
'ta123', 't.avery@acme.edu', 'othergod')
user3.first_name = 'Tex'
user3.last_name = 'Avery'
user3.save()
staff3 = Staff.objects.create(user=user3, role='Admin')
staff3.subject_areas.add(subject_area_1)
staff3.save()
request = self.factory.get('/view_staff_by_subject/')
request.user = self.user
response = view_staff_by_subject(request)
soup = BeautifulSoup(response.content)
table1 = str(soup.find(id=subject_area_1.slug))
self.assertTrue(staff1.name() in table1)
self.assertTrue(staff2.name() in table1)
self.assertTrue(staff3.name() in table1)
table2 = str(soup.find(id=subject_area_2.slug))
self.assertFalse(staff1.name() in table2)
self.assertTrue(staff2.name() in table2)
self.assertFalse(staff3.name() in table2)
def test_staff_view_by_name_contains_staff(self):
subject_area_1 = create_subject_area()
subject_area_2 = SubjectArea.objects.create(name='Evil Plotting')
staff1 = create_teacher()
staff1.subject_areas.add(subject_area_1)
staff1.save()
user2 = User.objects.create_user(
'ys142', 'y.sam@acme.edu', 'squaredance')
user2.last_name = 'Sam'
user2.first_name = 'Yosemite'
user2.save()
staff2 = Staff.objects.create(user=user2, role='Teacher')
staff2.subject_areas.add(subject_area_1)
staff2.subject_areas.add(subject_area_2)
staff2.save()
user3 = User.objects.create_user(
'ta142', 't.avery@acme.edu', 'othergod')
user3.first_name = 'Tex'
user3.last_name = 'Avery'
user3.save()
staff3 = Staff.objects.create(user=user3, role='Admin')
staff3.subject_areas.add(subject_area_1)
staff3.save()
request = self.factory.get('/view_staff_by_name/')
request.user = self.user
response = view_staff_by_name(request)
self.assertContains(response, staff1.name())
self.assertContains(response, staff2.name())
self.assertContains(response, staff3.name())
class YearViewTest(AdminUnitTest):
"""Tests around the year view function from a teacher's perspective"""
def test_year_view_uses_right_template(self):
response = self.client.get('/students/all/')
self.assertTemplateUsed(response, 'year_view.html')
def test_teachers_see_all_students_from_their_only_subject_area(self):
stuff = set_up_stuff()
subject_area1 = SubjectArea.objects.create(name="Cartoon Studies")
subject_area2 = SubjectArea.objects.create(name="Evil Plotting")
self.user.staff.subject_areas.add(subject_area1)
course1 = Course.objects.create(
title="BA in Cartoon Studies", short_title="Cartoon Studies")
course1.subject_areas.add(subject_area1)
course2 = Course.objects.create(
title="BA in Evil Plotting", short_title="Evil Plotting")
course2.subject_areas.add(subject_area2)
course3 = Course.objects.create(
title="BA in Cartoon Studies with Evil Plotting",
short_title="Cartoon Studies / Evil Plotting"
)
course3.subject_areas.add(subject_area1)
course3.subject_areas.add(subject_area2)
student1 = stuff[1]
student1.year = 1
student1.course = course1
student1.save()
student2 = stuff[2]
student2.year = 1
student2.course = course1
student2.save()
student3 = stuff[3]
student3.course = course2
student3.year = 1
student3.save()
student4 = stuff[4]
student4.course = course3
student4.year = 1
student4.save()
request = self.factory.get('/year_view/1/')
request.user = self.user
response = year_view(request, '1')
self.assertContains(response, student1.last_name)
self.assertContains(response, student2.last_name)
self.assertNotContains(response, student3.last_name)
self.assertContains(response, student4.last_name)
def test_teachers_see_all_students_from_their_many_subject_areas(self):
stuff = set_up_stuff()
subject_area1 = SubjectArea.objects.create(name="Cartoon Studies")
subject_area2 = SubjectArea.objects.create(name="Evil Plotting")
self.user.staff.subject_areas.add(subject_area1)
self.user.staff.subject_areas.add(subject_area2)
course1 = Course.objects.create(
title="BA in Cartoon Studies", short_title="Cartoon Studies")
course1.subject_areas.add(subject_area1)
course2 = Course.objects.create(
title="BA in Evil Plotting", short_title="Evil Plotting")
course2.subject_areas.add(subject_area2)
course3 = Course.objects.create(
title="BA in Cartoon Studies with Evil Plotting",
short_title="Cartoon Studies / Evil Plotting"
)
course3.subject_areas.add(subject_area1)
course3.subject_areas.add(subject_area2)
student1 = stuff[1]
student1.year = 1
student1.course = course1
student1.save()
student2 = stuff[2]
student2.year = 1
student2.course = course1
student2.save()
student3 = stuff[3]
student3.course = course2
student3.year = 1
student3.save()
student4 = stuff[4]
student4.course = course3
student4.year = 1
student4.save()
request = self.factory.get('/year_view/1/')
request.user = self.user
response = year_view(request, '1')
self.assertContains(response, student1.last_name)
self.assertContains(response, student2.last_name)
self.assertContains(response, student3.last_name)
self.assertContains(response, student4.last_name)
def test_main_admin_sees_all_active_students_for_a_year_are_shown(self):
stuff = set_up_stuff()
self.user.staff.main_admin = True
self.user.staff.save()
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student4.year = 2
student4.save()
student5 = stuff[5]
student5.active = False
student5.save()
request = self.factory.get('/year_view/1/')
request.user = self.user
response = year_view(request, '1')
self.assertContains(response, student1.last_name)
self.assertContains(response, student2.last_name)
self.assertContains(response, student3.last_name)
self.assertNotContains(response, student4.last_name)
self.assertNotContains(response, student5.last_name)
def test_only_admin_and_programme_director_see_edit_stuff(self):
stuff = set_up_stuff()
subject_area = create_subject_area()
course = create_course()
course.subject_areas.add(subject_area)
self.user.staff.role = 'admin'
self.user.staff.subject_areas.add(subject_area)
self.user.staff.save()
student1 = stuff[1]
student1.course = course
student1.save()
student2 = stuff[2]
student2.course = course
student2.save()
request = self.factory.get('/year_view/1/')
request.user = self.user
response = year_view(request, '1')
self.assertContains(response, 'bulkfunctions')
self.user.staff.role = 'teacher'
self.user.staff.save()
request = self.factory.get('/year_view/1/')
request.user = self.user
response = year_view(request, '1')
self.assertNotContains(response, 'bulkfunctions')
self.user.staff.role = 'teacher'
self.user.staff.programme_director = True
self.user.staff.save()
request = self.factory.get('/year_view/1/')
request.user = self.user
response = year_view(request, '1')
self.assertContains(response, 'bulkfunctions')
def test_bulk_changing_functions_work(self):
stuff = set_up_stuff()
subject_area = create_subject_area()
course1 = create_course()
course1.subject_areas.add(subject_area)
course2 = Course.objects.create(
title='BA in Evil Plotting', short_title='Evil Plotting')
subject_area2 = SubjectArea.objects.create(name='Evil Plotting')
course2.subject_areas.add(subject_area2)
self.user.staff.role = 'admin'
self.user.staff.subject_areas.add(subject_area)
self.user.staff.save()
student1 = stuff[1]
student1.course = course1
student1.qld = True
student1.save()
student2 = stuff[2]
student2.course = course1
student2.qld = True
student2.save()
student3 = stuff[3]
student3.course = course1
student3.qld = True
student3.save()
stuff[4].delete()
stuff[5].delete()
# Set course
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student2.student_id, student3.student_id],
'modify': 'course_BA in Evil Plotting'
})
request.user = self.user
response = year_view(request, '1')
student1_out = Student.objects.get(student_id=student1.student_id)
student2_out = Student.objects.get(student_id=student2.student_id)
student3_out = Student.objects.get(student_id=student3.student_id)
self.assertEqual(student1_out.course, course1)
self.assertEqual(student2_out.course, course2)
self.assertEqual(student3_out.course, course2)
# Set QLD
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student1.student_id, student2.student_id],
'modify': 'qld_off'
})
request.user = self.user
response = year_view(request, '1')
student1_out = Student.objects.get(student_id=student1.student_id)
student2_out = Student.objects.get(student_id=student2.student_id)
student3_out = Student.objects.get(student_id=student3.student_id)
self.assertEqual(student1_out.qld, False)
self.assertEqual(student2_out.qld, False)
self.assertEqual(student3_out.qld, True)
# Set begin of studies
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student1.student_id, student2.student_id],
'modify': 'since_1900'
})
request.user = self.user
response = year_view(request, '1')
student1_out = Student.objects.get(student_id=student1.student_id)
student2_out = Student.objects.get(student_id=student2.student_id)
student3_out = Student.objects.get(student_id=student3.student_id)
self.assertEqual(student1_out.since, 1900)
self.assertEqual(student2_out.since, 1900)
self.assertEqual(student3_out.since, None)
# Set Year
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student1.student_id, student2.student_id],
'modify': 'year_2'
})
request.user = self.user
response = year_view(request, '1')
student1_out = Student.objects.get(student_id=student1.student_id)
student2_out = Student.objects.get(student_id=student2.student_id)
student3_out = Student.objects.get(student_id=student3.student_id)
self.assertEqual(student1_out.year, 2)
self.assertEqual(student2_out.year, 2)
self.assertEqual(student3_out.year, 1)
# Active
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student1.student_id, student2.student_id],
'modify': 'active_no'
})
request.user = self.user
response = year_view(request, '1')
student1_out = Student.objects.get(student_id=student1.student_id)
student2_out = Student.objects.get(student_id=student2.student_id)
student3_out = Student.objects.get(student_id=student3.student_id)
self.assertEqual(student1_out.active, False)
self.assertEqual(student2_out.active, False)
self.assertEqual(student3_out.active, True)
# Delete
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student1.student_id, student2.student_id],
'modify': 'delete_yes'
})
request.user = self.user
response = year_view(request, '1')
self.assertEqual(Student.objects.count(), 1)
def test_deleting_student_deletes_everything(self):
module = create_module()
student = create_student()
student.modules.add(module)
performance = Performance.objects.create(
module=module, student=student)
assessment = Assessment.objects.create(
module=module,
title='Essay'
)
result = AssessmentResult.objects.create(assessment=assessment)
feedback = IndividualFeedback.objects.create(
assessment_result=result,
attempt='first'
)
self.assertEqual(AssessmentResult.objects.count(), 1)
self.assertEqual(IndividualFeedback.objects.count(), 1)
performance.assessment_results.add(result)
request = self.factory.post('/year_view/1/', data={
'selected_student_id': [student.student_id],
'modify': 'delete_yes'
})
request.user = self.user
response = year_view(request, '1')
self.assertEqual(AssessmentResult.objects.count(), 0)
self.assertEqual(IndividualFeedback.objects.count(), 0)
class CSVParsingTests(AdminUnitTest):
"""Tests for the CSV Parsing"""
def test_csv_data_gets_parsed_properly(self):
parsed_csvlist = (
'bb42;Bunny;Bugs;1900;1;bb42@acme.edu;+112345678/////' +
'dd23;Duck;Daffy;1900;1;dd23@acme.edu;+123456789/////' +
'pp42;Pig;Porky;1899;2;pp42@acme.edu;+134567890/////' +
'test;wrong;entry;to;beignored'
)
data = Data.objects.create(id='randomstring', value=parsed_csvlist)
request = self.factory.post('/parse_csv/randomstring/', data={
'column1': 'student_id',
'column2': 'last_name',
'column3': 'first_name',
'column4': 'since',
'column5': 'year',
'column6': 'email',
'column7': 'phone_number',
'exclude': '4'
})
request.user = self.user
parse_csv(request, data.id)
self.assertEqual(Student.objects.count(), 3)
student1 = Student.objects.get(student_id='bb42')
student2 = Student.objects.get(student_id='dd23')
student3 = Student.objects.get(student_id='pp42')
self.assertEqual(student1.last_name, 'Bunny')
self.assertEqual(student1.first_name, 'Bugs')
self.assertEqual(student1.since, 1900)
self.assertEqual(student1.email, 'bb42@acme.edu')
self.assertEqual(student1.phone_number, '+112345678')
class AssignTutorsTest(AdminUnitTest):
"""Tests for the assigning tutors view from an admin perspective"""
def test_right_template_used(self):
SubjectArea.objects.create(name="Cartoon Studies")
response = self.client.get('/assign_tutors/cartoon-studies/1/')
self.assertTemplateUsed(response, 'assign_tutors.html')
def test_assign_tutors_view_shows_right_tutors(self):
subject_area1 = SubjectArea.objects.create(name="Cartoon Studies")
subject_area2 = SubjectArea.objects.create(name="Evil Plotting")
user1 = User.objects.create_user(
username='ef1',
password='rabbitseason',
last_name='Fudd',
first_name='Elmar'
)
staff1 = Staff.objects.create(user=user1, role='teacher')
staff1.subject_areas.add(subject_area1)
user2 = User.objects.create_user(
username='ys2',
password='squaredance',
last_name='Sam',
first_name='Yosemite'
)
staff2 = Staff.objects.create(user=user2, role='teacher')
staff2.subject_areas.add(subject_area2)
user3 = User.objects.create_user(
username='mtm3',
password='zapp',
last_name='The Martian',
first_name='Marvin'
)
staff3 = Staff.objects.create(user=user3, role='teacher')
staff3.subject_areas.add(subject_area1)
staff3.subject_areas.add(subject_area2)
request = self.factory.get('/assign_tutors/cartoon-studies/1')
request.user = self.user
response = assign_tutors(request, 'cartoon-studies', '1')
soup = BeautifulSoup(response.content)
table = str(soup.select('#teachers')[0])
self.assertTrue(user1.last_name in table)
self.assertFalse(user2.last_name in table)
self.assertTrue(user3.last_name in table)
def test_assign_tutors_view_shows_right_students(self):
subject_area1 = SubjectArea.objects.create(name="Cartoon Studies")
subject_area2 = SubjectArea.objects.create(name="Evil Plotting")
course1 = Course.objects.create(title='BA in Cartoon Studies')
course1.subject_areas.add(subject_area1)
course2 = Course.objects.create(title='BA in Evil Plotting')
course2.subject_areas.add(subject_area2)
course3 = Course.objects.create(
title='BA in Cartoon Studies with Evil Plotting')
course3.subject_areas.add(subject_area1, subject_area2)
user1 = User.objects.create_user(
username='ef1',
password='rabbitseason',
last_name='Fudd',
first_name='Elmar'
)
staff1 = Staff.objects.create(user=user1, role='teacher')
staff1.subject_areas.add(subject_area1)
student1 = Student.objects.create(
student_id='bb42',
first_name='Bugs',
last_name='Bunny',
course=course1,
year=1
)
student2 = Student.objects.create(
student_id='dd23',
first_name='Duck',
last_name='Daffy',
course=course2,
year=1
)
student3 = Student.objects.create(
student_id='pp23',
first_name='Porky',
last_name='Pig',
course=course3,
year=1
)
student4 = Student.objects.create(
student_id='rr23',
first_name='Road',
last_name='Runner',
course=course1,
year=2
)
request = self.factory.get('/assign_tutors/cartoon-studies/1')
request.user = self.user
response = assign_tutors(request, 'cartoon-studies', '1')
self.assertContains(response, 'Bunny')
self.assertNotContains(response, 'Duck')
self.assertContains(response, 'Pig')
self.assertNotContains(response, 'Runner')
def test_tutors_can_be_assigned(self):
subject_area = SubjectArea.objects.create(name="Cartoon Studies")
course = Course.objects.create(title='BA in Cartoon Studies')
course.subject_areas.add(subject_area)
user1 = User.objects.create_user(
username='ef1',
password='rabbitseason',
last_name='Fudd',
first_name='Elmar'
)
staff1 = Staff.objects.create(user=user1, role='teacher')
staff1.subject_areas.add(subject_area)
user2 = User.objects.create_user(
username='ys2',
password='squaredance',
last_name='Sam',
first_name='Yosemite'
)
staff2 = Staff.objects.create(user=user2, role='teacher')
staff2.subject_areas.add(subject_area)
student1 = Student.objects.create(
student_id='bb42',
first_name='Bugs',
last_name='Bunny',
course=course,
year=1
)
student2 = Student.objects.create(
student_id='dd23',
first_name='Duck',
last_name='Daffy',
course=course,
year=1
)
student3 = Student.objects.create(
student_id='pp23',
first_name='Porky',
last_name='Pig',
course=course,
year=1
)
student4 = Student.objects.create(
student_id='rr23',
first_name='Road',
last_name='Runner',
course=course,
year=1
)
request = self.factory.post(
'/assign_tutors/cartoon-studies/1',
data={
'bb42': 'ef1',
'dd23': 'ys2',
'pp23': 'ef1'
}
)
request.user = self.user
response = assign_tutors(request, 'cartoon-studies', '1')
student1_out = Student.objects.get(student_id='bb42')
self.assertEqual(student1_out.tutor, staff1)
student2_out = Student.objects.get(student_id='dd23')
self.assertEqual(student2_out.tutor, staff2)
student3_out = Student.objects.get(student_id='pp23')
self.assertEqual(student3_out.tutor, staff1)
student4_out = Student.objects.get(student_id='rr23')
self.assertEqual(student4_out.tutor, None)
class AllTuteeMeetingTest(TeacherUnitTest):
"""Tests about the function showing all tutee meetings"""
def test_page_can_only_be_seen_by_pd(self):
subject_area = create_subject_area()
url = (
'/all_tutee_meetings/' +
subject_area.slug +
'/1/'
)
request = self.factory.get(url)
request.user = self.user
response = all_tutee_meetings(request, 'cartoon-studies', '1')
self.assertNotEqual(response.status_code, 200)
self.user.staff.programme_director = True
self.user.staff.save()
request = self.factory.get(url)
request.user = self.user
response = all_tutee_meetings(request, 'cartoon-studies', '1')
self.assertEqual(response.status_code, 200)
def test_page_uses_right_template(self):
subject_area = create_subject_area()
url = (
'/all_tutee_meetings/' +
subject_area.slug +
'/1/'
)
self.user.staff.programme_director = True
self.user.staff.save()
response = self.client.get(url)
self.assertTemplateUsed(response, 'all_tutees.html')
def test_students_in_the_right_year_show_up(self):
subject_area = create_subject_area()
course = Course.objects.create(title='Cartoon Studies')
course.subject_areas.add(subject_area)
student1 = Student.objects.create(
student_id='bb1',
first_name='Bugs',
last_name='Bunny',
year=1,
course=course
)
student2 = Student.objects.create(
student_id='dd1',
first_name='Duck',
last_name='Daffy',
year=2,
course=course
)
url = (
'/all_tutee_meetings/' +
subject_area.slug +
'/1/'
)
self.user.staff.programme_director = True
self.user.staff.save()
request = self.factory.get(url)
request.user = self.user
response = all_tutee_meetings(request, 'cartoon-studies', '1')
self.assertContains(response, student1.get_absolute_url())
self.assertNotContains(response, student2.get_absolute_url())
def test_tutor_appears_on_page(self):
subject_area = create_subject_area()
course = Course.objects.create(title='Cartoon Studies')
course.subject_areas.add(subject_area)
teacher = create_teacher()
student1 = Student.objects.create(
student_id='bb1',
first_name='Bugs',
last_name='Bunny',
year=1,
course=course,
tutor=teacher
)
url = (
'/all_tutee_meetings/' +
subject_area.slug +
'/1/'
)
self.user.staff.programme_director = True
self.user.staff.save()
request = self.factory.get(url)
request.user = self.user
response = all_tutee_meetings(request, 'cartoon-studies', '1')
self.assertContains(response, student1.get_absolute_url())
self.assertContains(response, teacher.name())
def test_tutor_meetings_appear(self):
subject_area = create_subject_area()
course = Course.objects.create(title='Cartoon Studies')
course.subject_areas.add(subject_area)
teacher = create_teacher()
student1 = Student.objects.create(
student_id='bb1',
first_name='Bugs',
last_name='Bunny',
year=1,
course=course,
tutor=teacher
)
student2 = Student.objects.create(
student_id='dd1',
first_name='Duck',
last_name='Daffy',
year=1,
course=course,
tutor=teacher
)
date = datetime.date(1900, 1, 1)
meeting1 = TuteeSession.objects.create(
tutor=teacher,
tutee=student1,
date_of_meet=date,
notes="Some Text"
)
url = (
'/all_tutee_meetings/' +
subject_area.slug +
'/1/'
)
self.user.staff.programme_director = True
self.user.staff.save()
request = self.factory.get(url)
request.user = self.user
response = all_tutee_meetings(request, 'cartoon-studies', '1')
self.assertContains(response, '1 Jan 1900')
self.assertContains(response, meeting1.get_absolute_url())
class MyTuteesTests(TeacherUnitTest):
"""Making sure that the my tutee view shows everything necessary"""
def test_all_tutees_are_shown(self):
stuff = set_up_stuff()
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student4 = stuff[4]
student5 = stuff[5]
student1.tutor = self.user.staff
student1.save()
student2.tutor = self.user.staff
student2.save()
student3.tutor = self.user.staff
student3.save()
request = self.factory.get('/my_tutees/')
request.user = self.user
response = my_tutees(request)
self.assertContains(response, student1.name())
self.assertContains(response, student2.name())
self.assertContains(response, student3.name())
self.assertNotContains(response, student4.name())
self.assertNotContains(response, student5.name())
def test_all_tutee_meetings_are_shown(self):
student = create_student()
student.tutor = self.user.staff
student.save()
date1 = datetime.date(1900, 1, 1)
date2 = datetime.date(1900, 1, 2)
meeting1 = TuteeSession.objects.create(
tutor=self.user.staff,
tutee=student,
date_of_meet=date1,
notes='Text'
)
meeting2 = TuteeSession.objects.create(
tutor=self.user.staff,
tutee=student,
date_of_meet=date2,
notes='Text'
)
request = self.factory.get('/my_tutees/')
request.user = self.user
response = my_tutees(request)
self.assertContains(response, '1 Jan 1900')
self.assertContains(response, '2 Jan 1900')
class AddressNinesTest(TeacherUnitTest):
"""Tests the function that allows to change averages ending with 9"""
def test_address_nines_uses_right_template(self):
module = create_module()
response = self.client.get(module.get_address_nines_url())
self.assertTemplateUsed(response, 'address_nines.html')
def test_address_nines_shows_all_averages_ending_with_nine(self):
stuff = set_up_stuff()
module = stuff[0]
assessment1 = Assessment.objects.create(
module=module,
title='Assessment 1',
value=20
)
assessment2 = Assessment.objects.create(
module=module,
title='Assessment 2',
value=30
)
assessment3 = Assessment.objects.create(
module=module,
title='Assessment 3',
value=50
)
# Student 1 with average of 49
student1 = stuff[1]
performance1 = Performance.objects.get(module=module, student=student1)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=50
)
performance1.assessment_results.add(result1_1)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=48
)
performance1.assessment_results.add(result1_2)
result1_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=50
)
performance1.assessment_results.add(result1_3)
performance1.calculate_average()
# Student 2 with 59 Average
student2 = stuff[2]
performance2 = Performance.objects.get(module=module, student=student2)
result2_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=62
)
performance2.assessment_results.add(result2_1)
result2_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=58
)
performance2.assessment_results.add(result2_2)
result2_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=59
)
performance2.assessment_results.add(result2_3)
performance2.calculate_average()
# Student 3 with 60 Average
student3 = stuff[3]
performance3 = Performance.objects.get(module=module, student=student3)
result3_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60
)
performance3.assessment_results.add(result3_1)
result3_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance3.assessment_results.add(result3_2)
result3_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=60
)
performance3.assessment_results.add(result3_3)
performance3.calculate_average()
request = self.factory.get(module.get_address_nines_url())
request.user = self.user
response = address_nines(request, module.code, module.year)
self.assertContains(response, student1.short_name())
self.assertContains(response, student2.short_name())
self.assertNotContains(response, student3.short_name())
def test_address_nines_shows_no_nines_found_message_when_no_nines(self):
stuff = set_up_stuff()
module = stuff[0]
assessment1 = Assessment.objects.create(
module=module,
title='Assessment 1',
value=20
)
assessment2 = Assessment.objects.create(
module=module,
title='Assessment 2',
value=30
)
assessment3 = Assessment.objects.create(
module=module,
title='Assessment 3',
value=50
)
# Student 1 with 40 average
student1 = stuff[1]
performance1 = Performance.objects.get(module=module, student=student1)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=40
)
performance1.assessment_results.add(result1_1)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=40
)
performance1.assessment_results.add(result1_2)
result1_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=40
)
performance1.assessment_results.add(result1_3)
performance1.calculate_average()
# Student 2 with 55 Average
student2 = stuff[2]
performance2 = Performance.objects.get(module=module, student=student2)
result2_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=55
)
performance2.assessment_results.add(result2_1)
result2_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=55
)
performance2.assessment_results.add(result2_2)
result2_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=55
)
performance2.assessment_results.add(result2_3)
performance2.calculate_average()
# Student 3 with 60 Average
student3 = stuff[3]
performance3 = Performance.objects.get(module=module, student=student3)
result3_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=60
)
performance3.assessment_results.add(result3_1)
result3_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=60
)
performance3.assessment_results.add(result3_2)
result3_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=60
)
performance3.assessment_results.add(result3_3)
performance3.calculate_average()
request = self.factory.get(module.get_address_nines_url())
request.user = self.user
response = address_nines(request, module.code, module.year)
self.assertNotContains(response, student1.short_name())
self.assertNotContains(response, student2.short_name())
self.assertNotContains(response, student3.short_name())
self.assertContains(response, 'no averages ending with a 9')
def test_address_nines_changes_marks(self):
stuff = set_up_stuff()
module = stuff[0]
assessment1 = Assessment.objects.create(
module=module,
title='Assessment 1',
value=20
)
assessment2 = Assessment.objects.create(
module=module,
title='Assessment 2',
value=30
)
assessment3 = Assessment.objects.create(
module=module,
title='Assessment 3',
value=50
)
# Student 1 with average of 49
student1 = stuff[1]
performance1 = Performance.objects.get(module=module, student=student1)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=50
)
r1_1_field = 'mark_' + assessment1.slug + '_' + student1.student_id
performance1.assessment_results.add(result1_1)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=48
)
performance1.assessment_results.add(result1_2)
r1_2_field = 'mark_' + assessment2.slug + '_' + student1.student_id
result1_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=50
)
performance1.assessment_results.add(result1_3)
r1_3_field = 'mark_' + assessment3.slug + '_' + student1.student_id
performance1.calculate_average()
# Student 2 with 59 Average
student2 = stuff[2]
performance2 = Performance.objects.get(module=module, student=student2)
result2_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=62
)
performance2.assessment_results.add(result2_1)
r2_1_field = 'mark_' + assessment1.slug + '_' + student2.student_id
result2_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=58
)
performance2.assessment_results.add(result2_2)
r2_2_field = 'mark_' + assessment2.slug + '_' + student2.student_id
result2_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=59
)
performance2.assessment_results.add(result2_3)
r2_3_field = 'mark_' + assessment3.slug + '_' + student2.student_id
performance2.calculate_average()
request = self.factory.post(
module.get_address_nines_url(),
data={
r1_1_field: '50',
r1_2_field: '49',
r1_3_field: '50',
r2_1_field: '63',
r2_2_field: '58',
r2_3_field: '59'
}
)
request.user = self.user
response = address_nines(request, module.code, module.year)
performance_1_out = Performance.objects.get(
student=student1, module=module
)
performance_2_out = Performance.objects.get(
student=student2, module=module
)
self.assertEqual(performance_1_out.average, 50)
self.assertEqual(performance_2_out.average, 60)
def test_address_nines_templates_contains_correct_form_tags(self):
stuff = set_up_stuff()
module = stuff[0]
assessment1 = Assessment.objects.create(
module=module,
title='Assessment 1',
value=20
)
assessment2 = Assessment.objects.create(
module=module,
title='Assessment 2',
value=30
)
assessment3 = Assessment.objects.create(
module=module,
title='Assessment 3',
value=50
)
# Student 1 with average of 49
student1 = stuff[1]
performance1 = Performance.objects.get(module=module, student=student1)
result1_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=50
)
r1_1_field = (
'name="mark_' +
assessment1.slug +
'_' +
student1.student_id +
'"'
)
performance1.assessment_results.add(result1_1)
result1_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=48
)
r1_2_field = (
'name="mark_' +
assessment2.slug +
'_' +
student1.student_id +
'"'
)
performance1.assessment_results.add(result1_2)
result1_3 = AssessmentResult.objects.create(
assessment=assessment3,
mark=50
)
r1_3_field = (
'name="mark_' +
assessment3.slug +
'_' +
student1.student_id +
'"'
)
performance1.assessment_results.add(result1_3)
performance1.calculate_average()
request = self.factory.get(module.get_address_nines_url())
request.user = self.user
response = address_nines(request, module.code, module.year)
self.assertContains(response, r1_1_field)
self.assertContains(response, r1_2_field)
self.assertContains(response, r1_3_field)
class EditExamIDsTest(AdminUnitTest):
"""Testing the function to manually edit Exam IDs"""
def test_right_template_used(self):
subject_area = SubjectArea.objects.create(name="Cartoon Studies")
url = (
'/edit_exam_ids/' +
subject_area.slug +
'/1/'
)
response = self.client.get(url)
self.assertTemplateUsed(response, 'edit_exam_ids.html')
def test_only_active_students_with_right_SA_and_year_appear_in_form(self):
subject_area1 = SubjectArea.objects.create(name="Cartoon Studies")
subject_area2 = SubjectArea.objects.create(name="Evil Plotting")
course1 = Course.objects.create(
title="MA in Cartoon Studies",
short_title="Cartoon Studies"
)
course1.subject_areas.add(subject_area1)
course2 = Course.objects.create(
title="MSc in Evil Plotting",
short_title="Evil Plotting"
)
course2.subject_areas.add(subject_area2)
course3 = Course.objects.create(
title="MA in Cartoon Studies and Evil Plotting",
short_title="Cartoon Studies/Evil Plotting"
)
course3.subject_areas.add(subject_area1)
course3.subject_areas.add(subject_area2)
stuff = set_up_stuff()
student1 = stuff[1]
student1.active = True
student1.course = course1
student1.year = 1
student1.save()
student2 = stuff[2]
student2.active = False
student2.course = course1
student2.year = 1
student2.save()
student3 = stuff[3]
student3.active = True
student3.course = course2
student3.year = 1
student3.save()
student4 = stuff[4]
student4.active = True
student4.course = course3
student4.year = 1
student4.save()
student5 = stuff[5]
student5.active = True
student5.course = course3
student5.year = 2
student5.save()
url = (
'/edit_exam_ids/' +
subject_area1.slug +
'/1/'
)
request = self.factory.get(url)
request.user = self.user
response = edit_exam_ids(request, subject_area1.slug, '1')
self.assertContains(response, student1.student_id)
self.assertNotContains(response, student2.student_id)
self.assertNotContains(response, student3.student_id)
self.assertContains(response, student4.student_id)
self.assertNotContains(response, student5.student_id)
def test_existing_exam_ids_are_shown(self):
subject_area = SubjectArea.objects.create(name="Cartoon Studies")
course = Course.objects.create(
title="MA in Cartoon Studies",
short_title="Cartoon Studies"
)
course.subject_areas.add(subject_area)
stuff = set_up_stuff()
student1 = stuff[1]
student1.active = True
student1.course = course
student1.year = 1
student1.exam_id = '1234'
student1.save()
student2 = stuff[2]
student2.active = True
student2.course = course
student2.year = 1
student2.exam_id = '56789ABC'
student2.save()
url = (
'/edit_exam_ids/' +
subject_area.slug +
'/1/'
)
request = self.factory.get(url)
request.user = self.user
response = edit_exam_ids(request, subject_area.slug, '1')
self.assertContains(response, '1234')
self.assertContains(response, '56789ABC')
def test_exam_ids_get_saved_properly(self):
subject_area = SubjectArea.objects.create(name="Cartoon Studies")
course = Course.objects.create(
title="MA in Cartoon Studies",
short_title="Cartoon Studies"
)
course.subject_areas.add(subject_area)
stuff = set_up_stuff()
student1 = stuff[1]
student1.active = True
student1.course = course
student1.year = 1
student1.save()
student2 = stuff[2]
student2.active = True
student2.course = course
student2.year = 1
student2.save()
student3 = stuff[3]
student3.active = True
student3.course = course
student3.year = 1
student3.save()
url = (
'/edit_exam_ids/' +
subject_area.slug +
'/1/'
)
request = self.factory.post(
url,
data={
student1.student_id: '1234',
student2.student_id: '56789E',
student3.student_id: ''
}
)
request.user = self.user
response = edit_exam_ids(request, subject_area.slug, '1')
student1_out = Student.objects.get(student_id=student1.student_id)
student2_out = Student.objects.get(student_id=student2.student_id)
student3_out = Student.objects.get(student_id=student3.student_id)
self.assertEqual(student1_out.exam_id, '1234')
self.assertEqual(student2_out.exam_id, '56789E')
self.assertEqual(student3_out.exam_id, None)
class ConcessionsTest(AdminUnitTest):
"""Testing the concessions form"""
def test_concessions_form_uses_right_template(self):
module = create_module()
response = self.client.get(module.get_concessions_url('first'))
self.assertTemplateUsed(response, 'concessions.html')
def test_all_active_students_appear_in_template(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
student3 = stuff[3]
student3.active = False
student3.save()
request = self.factory.get(module.get_concessions_url('first'))
request.user = self.user
response = concessions(request, module.code, module.year, 'first')
self.assertContains(response, student1.short_name())
self.assertContains(response, student2.short_name())
self.assertNotContains(response, student3.short_name())
def test_correct_names_for_values_in_template(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
assessment1 = Assessment.objects.create(
module=module,
title="Assessment 1"
)
assessment2 = Assessment.objects.create(
module=module,
title="Assessment 2"
)
performance1 = Performance.objects.get(module=module, student=student1)
assessment_result_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
concessions='N'
)
assessment_result_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=38,
concessions='G'
)
performance1.assessment_results.add(assessment_result_1)
performance1.assessment_results.add(assessment_result_2)
request = self.factory.get(module.get_concessions_url('first'))
request.user = self.user
response = concessions(request, module.code, module.year, 'first')
tag_name_1_1 = (
'name="' +
student1.student_id +
'_' +
assessment1.slug +
'"'
)
tag_name_1_2 = (
'name="' +
student1.student_id +
'_' +
assessment2.slug +
'"'
)
self.assertContains(response, tag_name_1_1)
self.assertContains(response, tag_name_1_2)
tag_name_2_1 = (
'name="' +
student2.student_id +
'_' +
assessment1.slug +
'"'
)
tag_name_2_2 = (
'name="' +
student2.student_id +
'_' +
assessment2.slug +
'"'
)
self.assertContains(response, tag_name_2_1)
self.assertContains(response, tag_name_2_2)
def test_existing_concessions_are_displayed(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
assessment1 = Assessment.objects.create(
module=module,
title="Assessment 1"
)
assessment2 = Assessment.objects.create(
module=module,
title="Assessment 2"
)
performance1 = Performance.objects.get(module=module, student=student1)
assessment_result_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
concessions='N'
)
assessment_result_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=38,
concessions='G'
)
performance1.assessment_results.add(assessment_result_1)
performance1.assessment_results.add(assessment_result_2)
request = self.factory.get(module.get_concessions_url('first'))
request.user = self.user
response = concessions(request, module.code, module.year, 'first')
soup = BeautifulSoup(response.content)
tag_name_1_1 = (
'#' +
student1.student_id +
'_' +
assessment1.slug
)
select1 = str(soup.select(tag_name_1_1)[0])
options1 = select1.split('<option')
for part in options1:
if 'value="N"' in part:
option1 = part
self.assertIn('selected', option1)
tag_name_1_2 = (
'#' +
student1.student_id +
'_' +
assessment2.slug
)
select2 = str(soup.select(tag_name_1_2)[0])
options2 = select2.split('<option')
for part in options2:
if 'value="N"' in part:
option2 = part
self.assertNotIn('selected', option2)
for part in options2:
if 'value="G"' in part:
option2 = part
self.assertIn('selected', option2)
def test_submitting_the_form_saves_concessions(self):
stuff = set_up_stuff()
module = stuff[0]
student1 = stuff[1]
student2 = stuff[2]
assessment1 = Assessment.objects.create(
module=module,
title="Assessment 1"
)
assessment2 = Assessment.objects.create(
module=module,
title="Assessment 2"
)
performance1 = Performance.objects.get(module=module, student=student1)
assessment_result_1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
concessions='N'
)
assessment_result_2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=38,
concessions='G'
)
performance1.assessment_results.add(assessment_result_1)
performance1.assessment_results.add(assessment_result_2)
tag_name_1_1 = (
student1.student_id +
'_' +
assessment1.slug
)
tag_name_1_2 = (
student1.student_id +
'_' +
assessment2.slug
)
tag_name_2_1 = (
student2.student_id +
'_' +
assessment1.slug
)
tag_name_2_2 = (
student2.student_id +
'_' +
assessment2.slug
)
request = self.factory.post(
module.get_concessions_url('first'),
data={
tag_name_1_1: 'G',
tag_name_1_2: 'P',
tag_name_2_1: 'N',
tag_name_2_2: 'G',
}
)
request.user = self.user
response = concessions(request, module.code, module.year, 'first')
assessment_result_1_1_out = AssessmentResult.objects.get(
assessment = assessment1,
part_of = performance1
)
assessment_result_1_2_out = AssessmentResult.objects.get(
assessment = assessment2,
part_of = performance1
)
performance2 = Performance.objects.get(module=module, student=student2)
assessment_result_2_1_out = AssessmentResult.objects.get(
assessment = assessment1,
part_of = performance2
)
assessment_result_2_2_out = AssessmentResult.objects.get(
assessment = assessment2,
part_of = performance2
)
self.assertEqual(assessment_result_1_1_out.concessions, 'G')
self.assertEqual(assessment_result_1_2_out.concessions, 'P')
self.assertEqual(assessment_result_2_1_out.concessions, 'N')
self.assertEqual(assessment_result_2_2_out.concessions, 'G')
class NextYearTest(MainAdminUnitTest):
"""Testing the switch to the next year with all its complications"""
def populate_db_with_students(self):
subject_area_1 = SubjectArea.objects.create(name="Cartoon Studies")
subject_area_2 = SubjectArea.objects.create(name="Evil Plotting")
course_1 = Course.objects.create(
title='BA in Cartoon Studies',
short_title='Cartoon Studies',
)
course_1.subject_areas.add(subject_area_1)
course_2 = Course.objects.create(
title='BA in Evil Plotting',
short_title='Evil Plotting',
)
course_2.subject_areas.add(subject_area_2)
course_3 = Course.objects.create(
title='BA in Cartoon Studies with Evil Plotting',
short_title='Cartoon Studies / Evil Plotting',
)
course_3.subject_areas.add(subject_area_1)
course_3.subject_areas.add(subject_area_2)
students = {}
student1_1 = Student.objects.create(
first_name='Bugs',
last_name='Bunny',
student_id='bb23',
year=1,
course=course_1,
)
students['1-2'] = student1_1
student1_2 = Student.objects.create(
first_name='Daffy',
last_name='Duck',
student_id='dd42',
year=1,
is_part_time=True,
course=course_1
)
students['1-spty'] = student1_2
student1_3 = Student.objects.create(
first_name='Silvester',
last_name='Cat',
student_id='sc23',
year=1,
is_part_time=True,
second_part_time_year=True,
course=course_1
)
students['spty-2'] = student1_3
student1_4 = Student.objects.create(
first_name='While E',
last_name='Coyote',
student_id='wec23',
year=1,
course=course_3
)
students['mixed_course'] = student1_4
student2_1 = Student.objects.create(
first_name='Tweety',
last_name='Bird',
student_id='tb23',
year=2,
course=course_1
)
students['2-3'] = student2_1
student3_1 = Student.objects.create(
first_name='Tasmanian',
last_name='Devil',
student_id='td23',
year=3,
course=course_1
)
students['3-4'] = student3_1
student4_1 = Student.objects.create(
first_name='Marvin',
last_name='Martian',
student_id='mm23',
year=1,
course=course_2
)
students['different_course'] = student4_1
return students
def test_enter_student_progression_uses_correct_template(self):
students = self.populate_db_with_students()
response = self.client.get(
'/enter_student_progression/cartoon-studies/1/'
)
self.assertTemplateUsed(response, 'enter_student_progression.html')
def test_enter_student_progression_shows_correct_students(self):
students = self.populate_db_with_students()
request = self.factory.get(
'/enter_student_progression/cartoon-studies/1/'
)
request.user = self.user
response = enter_student_progression(
request, 'cartoon-studies', '1')
self.assertContains(response, students['1-2'].student_id)
self.assertContains(response, students['1-spty'].student_id)
self.assertContains(response, students['mixed_course'].student_id)
self.assertNotContains(
response, students['different_course'].student_id)
self.assertNotContains(response, students['2-3'].student_id)
self.assertNotContains(response, students['3-4'].student_id)
def test_pass_and_proceed(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
next_year='PP'
)
this_year = int(Setting.objects.get(name="current_year").value)
next_year = str(this_year + 1)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 2)
new_year = Setting.objects.get(name="current_year").value
self.assertEqual(new_year, next_year)
def test_pass_and_proceed_for_part_time_student(self):
student1 = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
is_part_time=True,
next_year='PP'
)
student2 = Student.objects.create(
first_name="Daffy",
last_name="Duck",
student_id="dd23",
year=1,
is_part_time=True,
second_part_time_year=True,
next_year='PP'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student1_out = Student.objects.get(first_name="Bugs")
student2_out = Student.objects.get(first_name="Daffy")
self.assertEqual(student1_out.year, 1)
self.assertTrue(student1_out.second_part_time_year)
self.assertEqual(student2_out.year, 2)
self.assertFalse(student2_out.second_part_time_year)
def test_pass_and_proceed_with_qld_resit(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
qld=True,
next_year='PQ'
)
this_year = int(Setting.objects.get(name="current_year").value)
module = Module.objects.create(
title="Carrot Eating",
code="CE23",
year=this_year,
foundational=True
)
assessment1 = Assessment.objects.create(
title="Essay",
value=20
)
assessment2 = Assessment.objects.create(
title="Exam",
value=80
)
result1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
resit_mark=38
)
result2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=80,
)
performance = Performance.objects.create(
student=student,
module=module,
belongs_to_year=1
)
performance.assessment_results.add(result1)
performance.assessment_results.add(result2)
self.assertEqual(performance.qld_failures_after_resit(), [result1])
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 2)
comment_str = (
'In Year 2, Bugs will have to resit Carrot Eating ' +
'(Essay) for QLD purposes'
)
self.assertEqual(student_out.notes, comment_str)
def test_pass_and_proceed_with_trailed_resits(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
qld=True,
next_year='PT'
)
this_year = int(Setting.objects.get(name="current_year").value)
module = Module.objects.create(
title="Carrot Eating",
code="CE23",
year=this_year,
foundational=True
)
assessment1 = Assessment.objects.create(
title="Essay",
value=20
)
assessment2 = Assessment.objects.create(
title="Exam",
value=80
)
result1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
)
result2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=35,
)
performance = Performance.objects.create(
student=student,
module=module,
belongs_to_year=1
)
performance.assessment_results.add(result1)
performance.assessment_results.add(result2)
performance.calculate_average()
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 2)
comment_str = (
'In Year 2, Bugs will have to resit Carrot Eating ' +
'(Essay); Carrot Eating (Exam) (trailed)'
)
self.assertEqual(student_out.notes, comment_str)
def test_pass_and_proceed_with_compensation(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
qld=True,
next_year='PC'
)
this_year = int(Setting.objects.get(name="current_year").value)
module = Module.objects.create(
title="Carrot Eating",
code="CE23",
year=this_year,
foundational=True
)
assessment1 = Assessment.objects.create(
title="Essay",
value=20
)
assessment2 = Assessment.objects.create(
title="Exam",
value=80
)
result1 = AssessmentResult.objects.create(
assessment=assessment1,
mark=38,
)
result2 = AssessmentResult.objects.create(
assessment=assessment2,
mark=35,
)
performance = Performance.objects.create(
student=student,
module=module,
belongs_to_year=1
)
performance.assessment_results.add(result1)
performance.assessment_results.add(result2)
performance.calculate_average()
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 2)
comment_str = 'Failure in %s (%s) has been compensated' %(
module.title, performance.real_average)
self.assertEqual(student_out.notes, comment_str)
def test_repeat_year_works(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
next_year='R'
)
this_year = int(Setting.objects.get(name="current_year").value)
next_year = str(this_year + 1)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 1)
new_year = Setting.objects.get(name="current_year").value
self.assertEqual(new_year, next_year)
self.assertEqual(student_out.notes, 'Repeated Year 1')
def test_repeat_year_absj_works(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=1,
next_year='ABSJ'
)
this_year = int(Setting.objects.get(name="current_year").value)
next_year = str(this_year + 1)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 1)
new_year = Setting.objects.get(name="current_year").value
self.assertEqual(new_year, next_year)
self.assertEqual(student_out.notes, 'Repeated Year 1 ABSJ')
def test_graduate_with_first(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='1'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 1)
def test_graduate_with_21(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='21'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 21)
def test_graduate_with_22(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='22'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 22)
def test_graduate_with_3rd(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='3'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 3)
def test_graduate_with_cert_he(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='C'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 7)
def test_graduate_with_dipl_he(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='D'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 6)
def test_graduate_with_ordinary_degree(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='O'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 5)
def test_withdraw_student(self):
student = Student.objects.create(
first_name="Bugs",
last_name="Bunny",
student_id="bb23",
year=3,
next_year='WD'
)
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_out = Student.objects.first()
self.assertEqual(student_out.year, 9)
self.assertTrue(student_out.active)
self.assertEqual(student_out.achieved_degree, 8)
def test_proceed_to_next_year_with_multiple_students(self):
students = self.populate_db_with_students()
for student in students:
students[student].next_year = 'PP'
students[student].save()
students['3-4'].next_year = '1'
students['3-4'].save()
request = self.factory.get(reverse('proceed_to_next_year'))
request.user = self.user
response = proceed_to_next_year(request)
student_1_2 = Student.objects.get(
student_id=students['1-2'].student_id)
self.assertEqual(student_1_2.year, 2)
student_1_spty = Student.objects.get(
student_id=students['1-spty'].student_id)
self.assertEqual(student_1_spty.year, 1)
self.assertTrue(student_1_spty.second_part_time_year)
student_spty_2 = Student.objects.get(
student_id=students['spty-2'].student_id)
self.assertEqual(student_spty_2.year, 2)
self.assertFalse(student_spty_2.second_part_time_year)
student_2_3 = Student.objects.get(
student_id=students['2-3'].student_id)
self.assertEqual(student_2_3.year, 3)
student_3_4 = Student.objects.get(
student_id=students['3-4'].student_id)
self.assertEqual(student_3_4.year, 9)
self.assertEqual(student_3_4.achieved_degree, 1)
| tobi2006/nomosdb | main/tests/test_views.py | Python | gpl-3.0 | 145,259 | 0.000296 |
#!/usr/bin/env python3
'''automated testing of Samba3 against windows'''
import wintest
def set_libpath(t):
t.putenv("LD_LIBRARY_PATH", "${PREFIX}/lib")
def set_krb5_conf(t):
t.run_cmd("mkdir -p ${PREFIX}/etc")
t.write_file("${PREFIX}/etc/krb5.conf",
'''[libdefaults]
dns_lookup_realm = false
dns_lookup_kdc = true''')
t.putenv("KRB5_CONFIG", '${PREFIX}/etc/krb5.conf')
def build_s3(t):
'''build samba3'''
t.info('Building s3')
t.chdir('${SOURCETREE}/source3')
t.putenv('CC', 'ccache gcc')
t.run_cmd("./autogen.sh")
t.run_cmd("./configure -C --prefix=${PREFIX} --enable-developer")
t.run_cmd('make basics')
t.run_cmd('make -j4')
t.run_cmd('rm -rf ${PREFIX}')
t.run_cmd('make install')
def start_s3(t):
t.info('Starting Samba3')
t.chdir("${PREFIX}")
t.run_cmd('killall -9 -q samba smbd nmbd winbindd', checkfail=False)
t.run_cmd("rm -f var/locks/*.pid")
t.run_cmd(['sbin/nmbd', "-D"])
t.run_cmd(['sbin/winbindd', "-D"])
t.run_cmd(['sbin/smbd', "-D"])
t.port_wait("${INTERFACE_IP}", 139)
def test_wbinfo(t):
t.info('Testing wbinfo')
t.chdir('${PREFIX}')
t.cmd_contains("bin/wbinfo --version", ["Version 4."])
t.cmd_contains("bin/wbinfo -p", ["Ping to winbindd succeeded"])
t.retry_cmd("bin/wbinfo --online-status",
["BUILTIN : online",
"${HOSTNAME} : online",
"${WIN_DOMAIN} : online"],
casefold=True)
t.cmd_contains("bin/wbinfo -u",
["${WIN_DOMAIN}/administrator",
"${WIN_DOMAIN}/krbtgt"],
casefold=True)
t.cmd_contains("bin/wbinfo -g",
["${WIN_DOMAIN}/domain users",
"${WIN_DOMAIN}/domain guests",
"${WIN_DOMAIN}/domain admins"],
casefold=True)
t.cmd_contains("bin/wbinfo --name-to-sid administrator",
"S-1-5-.*-500 SID_USER .1",
regex=True)
t.cmd_contains("bin/wbinfo --name-to-sid 'domain users'",
"S-1-5-.*-513 SID_DOM_GROUP .2",
regex=True)
t.retry_cmd("bin/wbinfo --authenticate=${WIN_DOMAIN}/administrator%${WIN_PASS}",
["plaintext password authentication succeeded",
"challenge/response password authentication succeeded"])
t.retry_cmd("bin/wbinfo --krb5auth=${WIN_DOMAIN}/administrator%${WIN_PASS}",
["succeeded"])
def test_smbclient(t):
t.info('Testing smbclient')
smbclient = t.getvar("smbclient")
t.chdir('${PREFIX}')
t.cmd_contains("%s --version" % (smbclient), ["Version 4."])
t.cmd_contains('%s -L ${INTERFACE_IP} -U%%' % (smbclient), ["Domain=[${WIN_DOMAIN}]", "test", "IPC$", "Samba 4."],
casefold=True)
child = t.pexpect_spawn('%s //${HOSTNAME}.${WIN_REALM}/test -Uroot@${WIN_REALM}%%${PASSWORD2}' % (smbclient))
child.expect("smb:")
child.sendline("dir")
child.expect("blocks available")
child.sendline("mkdir testdir")
child.expect("smb:")
child.sendline("cd testdir")
child.expect('testdir')
child.sendline("cd ..")
child.sendline("rmdir testdir")
child = t.pexpect_spawn('%s //${HOSTNAME}.${WIN_REALM}/test -Uroot@${WIN_REALM}%%${PASSWORD2} -k' % (smbclient))
child.expect("smb:")
child.sendline("dir")
child.expect("blocks available")
child.sendline("mkdir testdir")
child.expect("smb:")
child.sendline("cd testdir")
child.expect('testdir')
child.sendline("cd ..")
child.sendline("rmdir testdir")
def create_shares(t):
t.info("Adding test shares")
t.chdir('${PREFIX}')
t.write_file("etc/smb.conf", '''
[test]
path = ${PREFIX}/test
read only = no
''',
mode='a')
t.run_cmd("mkdir -p test")
def prep_join_as_member(t, vm):
'''prepare to join a windows domain as a member server'''
t.setwinvars(vm)
t.info("Starting VMs for joining ${WIN_VM} as a member using net ads join")
t.chdir('${PREFIX}')
t.run_cmd('killall -9 -q samba smbd nmbd winbindd', checkfail=False)
t.vm_poweroff("${WIN_VM}", checkfail=False)
t.vm_restore("${WIN_VM}", "${WIN_SNAPSHOT}")
child = t.open_telnet("${WIN_HOSTNAME}", "administrator", "${WIN_PASS}", set_time=True)
t.get_ipconfig(child)
t.del_files(["var", "private"])
t.write_file("etc/smb.conf", '''
[global]
netbios name = ${HOSTNAME}
log level = ${DEBUGLEVEL}
realm = ${WIN_REALM}
workgroup = ${WIN_DOMAIN}
security = ADS
bind interfaces only = yes
interfaces = ${INTERFACE}
winbind separator = /
idmap uid = 1000000-2000000
idmap gid = 1000000-2000000
winbind enum users = yes
winbind enum groups = yes
max protocol = SMB2
map hidden = no
map system = no
ea support = yes
panic action = xterm -e gdb --pid %d
''')
def join_as_member(t, vm):
'''join a windows domain as a member server'''
t.setwinvars(vm)
t.info("Joining ${WIN_VM} as a member using net ads join")
t.port_wait("${WIN_IP}", 389)
t.retry_cmd("host -t SRV _ldap._tcp.${WIN_REALM} ${WIN_IP}", ['has SRV record'])
t.cmd_contains("bin/net ads join -Uadministrator%${WIN_PASS}", ["Joined"])
t.cmd_contains("bin/net ads testjoin", ["Join is OK"])
t.cmd_contains("bin/net ads dns register ${HOSTNAME}.${WIN_REALM} -P", ["Successfully registered hostname with DNS"])
t.cmd_contains("host -t A ${HOSTNAME}.${WIN_REALM}",
['${HOSTNAME}.${WIN_REALM} has address'])
def create_root_account(t, vm):
t.setwinvars(vm)
t.info("Creating 'root' account for testing Samba3 member server")
t.chdir('${PREFIX}')
t.run_cmd('bin/net ads user add root -Uadministrator%${WIN_PASS}')
child = t.pexpect_spawn('bin/net ads password root -Uadministrator%${WIN_PASS}')
child.expect("Enter new password for root")
child.sendline("${PASSWORD2}")
child.expect("Password change for ")
child.expect(" completed")
child = t.pexpect_spawn('bin/net rpc shell -S ${WIN_HOSTNAME}.${WIN_REALM} -Uadministrator%${WIN_PASS}')
child.expect("net rpc>")
child.sendline("user edit disabled root no")
child.expect("Set root's disabled flag")
def test_join_as_member(t, vm):
'''test the domain join'''
t.setwinvars(vm)
t.info('Testing join as member')
t.chdir('${PREFIX}')
test_wbinfo(t)
test_smbclient(t)
def test_s3(t):
'''basic s3 testing'''
t.setvar("SAMBA_VERSION", "Version 4")
t.setvar("smbclient", "bin/smbclient")
t.check_prerequesites()
set_libpath(t)
if not t.skip("configure_bind"):
t.configure_bind()
if not t.skip("stop_bind"):
t.stop_bind()
if not t.skip("stop_vms"):
t.stop_vms()
if not t.skip("build"):
build_s3(t)
set_krb5_conf(t)
if not t.skip("configure_bind2"):
t.configure_bind()
if not t.skip("start_bind"):
t.start_bind()
dc_started = False
if t.have_var('W2K8R2A_VM') and not t.skip("join_w2k8r2"):
t.start_winvm('W2K8R2A')
dc_started = True
prep_join_as_member(t, "W2K8R2A")
t.run_dcpromo_as_first_dc("W2K8R2A", func_level='2008r2')
join_as_member(t, "W2K8R2A")
create_shares(t)
start_s3(t)
create_root_account(t, "W2K8R2A")
test_join_as_member(t, "W2K8R2A")
if t.have_var('WINDOWS7_VM') and t.have_var('W2K8R2A_VM') and not t.skip("join_windows7_2008r2"):
if not dc_started:
t.start_winvm('W2K8R2A')
t.run_dcpromo_as_first_dc("W2K8R2A", func_level='2008r2')
dc_started = True
else:
t.setwinvars('W2K8R2A')
realm = t.getvar("WIN_REALM")
dom_username = t.getvar("WIN_USER")
dom_password = t.getvar("WIN_PASS")
dom_realm = t.getvar("WIN_REALM")
t.start_winvm('WINDOWS7')
t.test_remote_smbclient("WINDOWS7")
t.run_winjoin('WINDOWS7', realm, username=dom_username, password=dom_password)
t.test_remote_smbclient("WINDOWS7", dom_username, dom_password)
t.test_remote_smbclient('WINDOWS7', dom_username, dom_password, args='--option=clientntlmv2auth=no')
t.test_remote_smbclient('WINDOWS7', "%s@%s" % (dom_username, dom_realm), dom_password, args="-k")
t.test_remote_smbclient('WINDOWS7', "%s@%s" % (dom_username, dom_realm), dom_password, args="-k --option=clientusespnegoprincipal=yes")
t.test_net_use('WINDOWS7', dom_realm, t.getvar("W2K8R2A_DOMAIN"), 'root', '${PASSWORD2}')
if t.have_var('WINXP_VM') and t.have_var('W2K8R2A_VM') and not t.skip("join_winxp_2008r2"):
if not dc_started:
t.start_winvm('W2K8R2A')
t.run_dcpromo_as_first_dc("W2K8R2A", func_level='2008r2')
dc_started = True
else:
t.setwinvars('W2K8R2A')
realm = t.getvar("WIN_REALM")
dom_username = t.getvar("WIN_USER")
dom_password = t.getvar("WIN_PASS")
dom_realm = t.getvar("WIN_REALM")
t.start_winvm('WINXP')
t.run_winjoin('WINXP', realm, username=dom_username, password=dom_password)
t.test_remote_smbclient('WINXP', dom_username, dom_password)
t.test_remote_smbclient('WINXP', dom_username, dom_password, args='--option=clientntlmv2auth=no')
t.test_remote_smbclient('WINXP', "%s@%s" % (dom_username, dom_realm), dom_password, args="-k")
t.test_remote_smbclient('WINXP', "%s@%s" % (dom_username, dom_realm), dom_password, args="-k --clientusespnegoprincipal=yes")
t.test_net_use('WINXP', dom_realm, t.getvar("W2K8R2A_DOMAIN"), 'root', '${PASSWORD2}')
t.info("S3 test: All OK")
def test_cleanup(t):
'''cleanup after tests'''
t.info("Cleaning up ...")
t.restore_resolv_conf()
if getattr(t, 'bind_child', False):
t.bind_child.kill()
if __name__ == '__main__':
t = wintest.wintest()
t.setup("test-s3.py", "source3")
try:
test_s3(t)
except:
if not t.opts.nocleanup:
test_cleanup(t)
raise
if not t.opts.nocleanup:
test_cleanup(t)
t.info("S3 test: All OK")
| kernevil/samba | wintest/test-s3.py | Python | gpl-3.0 | 10,296 | 0.003011 |
# -*- coding: utf-8 -*-
# © 2015 Nedas Žilinskas <nedas.zilinskas@gmail.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Variant Pictures",
"category": "Website",
"summary": "Shows picture of the product variant instead of plain color",
"version": "8.0.1.0",
"description": """
Variant Pictures
======================================
Shows picture of the product variant instead of plain color
""",
"author": "Nedas Žilinskas <nedas.zilinskas@gmail.com>",
"website": "http://nedaszilinskas.com",
"depends": ["website_sale"],
"data": [
"views/assets.xml",
"views/templates.xml"
],
"images": [
"static/description/main_screenshot.png",
],
"installable": True
}
| nedaszilinskas/Odoo-CMS-Variant-Pictures | website_sale_variant_pictures/__openerp__.py | Python | mit | 783 | 0 |
from hubs.ha import haremote as ha
from hubs.ha.hasshub import HAnode, RegisterDomain
from controlevents import CEvent, PostEvent, ConsoleEvent, PostIfInterested
from utils import timers
import functools
# noinspection PyTypeChecker
class Thermostat(HAnode): # deprecated version
def __init__(self, HAitem, d):
super(Thermostat, self).__init__(HAitem, **d)
self.Hub.RegisterEntity('climate', self.entity_id, self)
self.timerseq = 0
# noinspection PyBroadException
try:
self.temperature = self.attributes['temperature']
self.curtemp = self.attributes['current_temperature']
self.target_low = self.attributes['target_temp_low']
self.target_high = self.attributes['target_temp_high']
self.mode = self.attributes['operation_mode']
self.fan = self.attributes['fan_mode']
self.fanstates = self.attributes['fan_list']
self.modelist = self.attributes['operation_list']
except:
pass
# noinspection PyUnusedLocal
def ErrorFakeChange(self, param=None):
PostEvent(ConsoleEvent(CEvent.HubNodeChange, hub=self.Hub.name, node=self.entity_id, value=self.internalstate))
def Update(self, **ns):
if 'attributes' in ns: self.attributes = ns['attributes']
self.temperature = self.attributes['temperature']
self.curtemp = self.attributes['current_temperature']
self.target_low = self.attributes['target_temp_low']
self.target_high = self.attributes['target_temp_high']
self.mode = self.attributes['operation_mode']
self.fan = self.attributes['fan_mode']
PostIfInterested(self.Hub, self.entity_id, self.internalstate)
# noinspection DuplicatedCode
def PushSetpoints(self, t_low, t_high):
ha.call_service_async(self.Hub.api, 'climate', 'set_temperature',
{'entity_id': '{}'.format(self.entity_id), 'target_temp_high': str(t_high),
'target_temp_low': str(t_low)})
self.timerseq += 1
_ = timers.OnceTimer(5, start=True, name='fakepushsetpoint-{}'.format(self.timerseq),
proc=self.ErrorFakeChange)
def GetThermInfo(self):
if self.target_low is not None:
return self.curtemp, self.target_low, self.target_high, self.HVAC_state, self.mode, self.fan
else:
return self.curtemp, self.temperature, self.temperature, self.HVAC_state, self.mode, self.fan
# noinspection PyUnusedLocal,PyUnusedLocal,PyUnusedLocal
def _HVACstatechange(self, storeitem, old, new, param, chgsource):
self.HVAC_state = new
PostIfInterested(self.Hub, self.entity_id, new)
def _connectsensors(self, HVACsensor):
self.HVAC_state = HVACsensor.state
# noinspection PyProtectedMember
HVACsensor.SetSensorAlert(functools.partial(self._HVACstatechange))
def GetModeInfo(self):
return self.modelist, self.fanstates
def PushFanState(self, mode):
ha.call_service_async(self.Hub.api, 'climate', 'set_fan_mode',
{'entity_id': '{}'.format(self.entity_id), 'fan_mode': mode})
self.timerseq += 1
_ = timers.OnceTimer(5, start=True, name='fakepushfanstate-{}'.format(self.timerseq),
proc=self.ErrorFakeChange)
def PushMode(self, mode):
# noinspection PyBroadException
ha.call_service_async(self.Hub.api, 'climate', 'set_operation_mode',
{'entity_id': '{}'.format(self.entity_id), 'operation_mode': mode})
self.timerseq += 1
_ = timers.OnceTimer(5, start=True, name='fakepushmode -{}'.format(self.timerseq),
proc=self.ErrorFakeChange)
RegisterDomain('climate', Thermostat)
| kevinkahn/softconsole | hubs/ha/domains/__oldthermostat.py | Python | apache-2.0 | 3,386 | 0.025399 |
# -*- coding: utf-8 -*-
from baseprovider import BaseProvider
from utils import clean
class Company(BaseProvider):
"""Basic definition of a Company"""
def __init__(self, locales):
super(Company, self).__init__(locales)
def new(self):
self.name = self.parse('company.name')
self.suffix = self.fetch('company.suffix')
self.website = "http://www.%s.%s" % (
clean(self.name),
self.fetch('internet.domain_suffix')
)
def __str__(self):
return "%s %s\n%s" % (
self.name,
self.suffix,
self.website)
| mpuig/faker | faker/providers/company.py | Python | isc | 624 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Unit tests for pywikibot/textlib.py"""
__version__ = '$Id: cadd8620e9dba64e3c2f3082e6f03f4551fd7641 $'
import unittest
from tests.test_pywiki import PyWikiTestCase
import wikipedia as pywikibot
import pywikibot.textlib as textlib
import catlib
class PyWikiTextLibTestCase(PyWikiTestCase):
end_of_line = '\r\n'
blank_line = '\r\n\r\n'
foo_page_start = 'Foo' + blank_line
iwresult1 = '[[de:German]]\r\n[[fr:French]]\r\n'
catresult1 = '[[Category:Cat1]]\r\n[[Category:Cat2]]\r\n'
result1 = foo_page_start + \
catresult1.strip() + \
blank_line + \
iwresult1.strip()
def setUp(self):
self.site = pywikibot.getSite('en', 'wikipedia')
self.data = [catlib.Category(self.site, 'Category:Cat1'),
catlib.Category(self.site, 'Category:Cat2')]
self.site_de = pywikibot.getSite('de', 'wikipedia')
self.site_fr = pywikibot.getSite('fr', 'wikipedia')
def test_interwikiFormat(self):
interwikis = {
'de':pywikibot.Page(self.site_de, 'German'),
'fr':pywikibot.Page(self.site_fr, 'French')
}
self.assertEqual(self.iwresult1,
textlib.interwikiFormat(interwikis, self.site))
def assertRoundtripInterwiki(self, text, count):
old_interwikis = textlib.getLanguageLinks(text, self.site)
new_text = textlib.replaceLanguageLinks(text, old_interwikis, site = self.site)
self.assertEqual(len(old_interwikis), count)
self.assertEqual(text, new_text)
def assertFailedRoundtripInterwiki(self, text):
old_interwikis = textlib.getLanguageLinks(text, self.site)
new_text = textlib.replaceLanguageLinks(text, old_interwikis, site = self.site)
self.assertNotEqual(text, new_text)
def test_replaceLanguageLinks(self):
# This case demonstrates that eol isnt stripped
self.assertFailedRoundtripInterwiki(self.result1)
self.assertRoundtripInterwiki(self.result1 + self.end_of_line, 2)
def test_replaceLanguageLinks1(self):
# This case demonstrates that eol isnt stripped
result = self.foo_page_start + self.iwresult1
self.assertFailedRoundtripInterwiki(self.iwresult1)
self.assertRoundtripInterwiki(result, 2)
def test_categoryFormat_raw(self):
self.assertEqual(self.catresult1,
textlib.categoryFormat(['[[Category:Cat1]]',
'[[Category:Cat2]]'],
self.site))
def test_categoryFormat_bare(self):
self.assertEqual(self.catresult1,
textlib.categoryFormat(['Cat1', 'Cat2'], self.site))
def test_categoryFormat_Category(self):
self.assertEqual(self.catresult1,
textlib.categoryFormat(self.data, self.site))
def test_categoryFormat_Page(self):
data = [pywikibot.Page(self.site, 'Category:Cat1'),
pywikibot.Page(self.site, 'Category:Cat2')]
self.assertEqual(self.catresult1, textlib.categoryFormat(self.data,
self.site))
def assertRoundtripCategory(self, text, catcount):
cats = textlib.getCategoryLinks(text)
self.assertEqual(len(cats), catcount)
self.assertEqual(text, textlib.replaceCategoryLinks(text,
cats,
site = self.site))
def test_replaceCategoryLinks(self):
self.assertRoundtripCategory(self.result1,2)
def test_replaceCategoryLinks1(self):
result = 'Blah\r\n\r\n[[Category:Cat1]]\r\n[[Category:Cat2]]'
self.assertRoundtripCategory(result,2)
def test_replaceCategoryLinks2(self):
result = 'Blah\r\n\r\n[[Category:Cat1]]\r\n[[Category:Cat2]]\r\n\r\n[[fr:Test]]'
self.assertRoundtripCategory(result,2)
if __name__ == "__main__":
unittest.main()
| races1986/SafeLanguage | CEM/tests/test_textlib.py | Python | epl-1.0 | 4,110 | 0.004623 |
import os
import numpy as np
from typing import List, Dict, Union, Optional
import subprocess as sp
import matplotlib
import matplotlib.transforms
import matplotlib.pyplot as plt
import matplotlib.gridspec as gs
import matplotlib.transforms as tf
import matplotlib.cm as cm
from matplotlib.axes import Axes
from matplotlib.artist import Artist
from matplotlib.figure import Figure
from createPlotUtils import debug_print, get_renderer
from dataclasses import dataclass
import global_options as go
import parse_file as pf
@dataclass
class PlotConfiguration:
plot_id: int
fig: Figure
gridspec_dict: Dict[str, Union[gs.GridSpec, gs.GridSpecFromSubplotSpec]]
subplot_dict: Dict[int, Axes]
extra_artists: List[Artist]
legend_handles: List[Artist]
def latex_available():
with open(os.devnull, "w") as f:
try:
status = sp.call(["latex", "--version"], stdout=f, stderr=f)
except OSError:
status = 1
if status:
return False
else:
return True
def init_params():
# Setup the matplotlib params
preamble = [r'\usepackage[T1]{fontenc}',
r'\usepackage{amsmath}',
r'\usepackage{txfonts}',
r'\usepackage{textcomp}']
matplotlib.rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']})
matplotlib.rc('text.latex', preamble="\n".join(preamble))
params = {'backend': 'pdf',
'axes.labelsize': go.get_int("font_size"),
'font.size': go.get_int("font_size"),
'legend.fontsize': go.get_int("legend_font_size"),
'xtick.labelsize': go.get_int("tick_font_size"),
'ytick.labelsize': go.get_int("tick_font_size"),
'text.usetex': latex_available(),
'figure.dpi': 100,
'savefig.dpi': 100}
matplotlib.rcParams.update(params)
def init_subplot(plot_config: PlotConfiguration, subplot_id, subplot_spec):
fig = plt.figure(plot_config.plot_id)
ax = fig.add_subplot(subplot_spec, label=str(subplot_id))
ax.set_ylim(go.get_float("y_axis_min", plot_config.plot_id, when_not_exist=go.RETURN_FIRST, default=None),
go.get_float("y_axis_max", plot_config.plot_id, when_not_exist=go.RETURN_FIRST, default=None))
ax.set_xlim(go.get_float("x_axis_min", plot_config.plot_id, when_not_exist=go.RETURN_FIRST, default=None),
go.get_float("x_axis_max", plot_config.plot_id, when_not_exist=go.RETURN_FIRST, default=None))
ax.set_ylabel(go.get_str("y_labels", plot_config.plot_id, when_not_exist=go.RETURN_FIRST))
ax.set_xlabel(go.get_str("x_labels", plot_config.plot_id, when_not_exist=go.RETURN_FIRST))
if go.get_bool("title"):
ax.set_title(go.get_str_list(
"titles",
plot_config.plot_id,
when_not_exist=go.RETURN_FIRST
)[subplot_id], fontsize=go.get_int("title_size"))
if go.get_exists("x_ticks"):
ax.set_xticks(go.get_float_list("x_ticks", plot_config.plot_id, when_not_exist=go.RETURN_FIRST))
if go.get_exists("y_ticks"):
ax.set_yticks(go.get_float_list("y_ticks", plot_config.plot_id, when_not_exist=go.RETURN_FIRST))
# ax.set_aspect(1.0)
# ax.apply_aspect()
plot_config.subplot_dict[subplot_id] = ax
return ax
def setup_figure(plot_id: int, gridspec: gs.GridSpec = gs.GridSpec(1, 1)) -> PlotConfiguration:
"""
Sets up a figure based on plot id.
By default, we assume there will only be one sub-figure, which is the main plot.
:param plot_id: The plot id.
:param gridspec: Gridspec layout for if the plot should contain multiple sub-figures.
:return: Returns the plot configuration for this figure.
"""
fig = plt.figure(plot_id, figsize=go.get_float_list("fig_size"))
plot_config = PlotConfiguration(
plot_id=plot_id,
fig=fig,
gridspec_dict={"main": gridspec},
subplot_dict={},
extra_artists=[],
legend_handles=[],
)
return plot_config
def get_plot_ids() -> List[int]:
"""
Currently we assume that the list of file-names holds the ground-truth on the
number of plots we want to create.
:return: A list of plot-ids.
"""
return list(range(len(go.get_indices("file_names"))))
def setup_plot(plot_config: PlotConfiguration, gridspec: Optional[gs.GridSpec] = None):
if gridspec is None:
gridspec = plot_config.gridspec_dict["main"]
init_subplot(plot_config, 0, gridspec[0])
def setup_plots(plot_ids: List[int] = None, gridspec=gs.GridSpec(1, 1)):
"""
A setup for the different plots
(both the main plot and the small bar at the bottom).
"""
init_params()
if plot_ids is None:
plot_ids = [0]
plot_configs = []
for plot_id in plot_ids:
plot_configs.append(setup_figure(plot_id, gridspec))
# We assume that the first entry in the gridspec will contain the "main" plot,
# so we initialize it with the parameters we read from the global options.
init_subplot(plot_configs[-1], 0, gridspec[0])
# axis = [init_subplot(plot_id, grid_spec[0]) for i, plot_id in enumerate(plot_ids)]
return plot_configs
class ParseColumns:
def __init__(self, columns: List[int]):
self.data = {col: [] for col in columns}
self.generations: List[int] = []
def __call__(self, split_line: List[str], generation: int):
self.generations.append(generation)
for col in self.data:
self.data[col].append(float(split_line[col]))
def plot_annotations(ax):
for index in go.get_indices("line_from_file"):
line_file = go.get_str("line_from_file", index)
x_column = go.get_int("line_from_file_x_column", index, when_not_exist=go.RETURN_FIRST)
y_column = go.get_int("line_from_file_y_column", index, when_not_exist=go.RETURN_FIRST)
color = go.get_str("line_from_file_color", index, when_not_exist=go.RETURN_FIRST)
linestyle = go.get_str("line_from_file_linestyle", index, when_not_exist=go.RETURN_FIRST)
linewidth = go.get_float("line_from_file_linewidth", index, when_not_exist=go.RETURN_FIRST)
column_parser = ParseColumns([x_column, y_column])
pf.read_file(line_file, column_parser)
ax.plot(column_parser.data[x_column],
column_parser.data[y_column],
color=color,
linestyle=linestyle,
linewidth=linewidth)
def plot_background(ax):
"""
Draw a gradient image based on a provided function.
:param ax: Axes The axes to draw on.
"""
y_min = go.get_float("y_axis_min")
y_max = go.get_float("y_axis_max")
x_max = go.get_float("x_axis_max")
x_min = go.get_float("x_axis_min")
background_func = go.get_str("background")
cmap = go.get_str("background_colormap")
cmap_min = go.get_float("background_colormap_min")
cmap_max = go.get_float("background_colormap_max")
x_res = round(ax.bbox.width)
y_res = round(ax.bbox.height)
image = np.zeros((y_res, x_res), dtype=np.float64)
for x in range(x_res):
for y in range(y_res):
x_val = (x * (x_max - x_min) / (x_res - 1))
y_val = (y * (y_max - y_min) / (y_res - 1))
val = eval(background_func, {}, {"x_val": x_val, "y_val": y_val})
image[y, x] = cmap_min + (cmap_max - cmap_min) * val
interpolation = 'nearest'
im = ax.imshow(image, extent=(x_min, x_max, y_min, y_max),
interpolation=interpolation,
vmin=0, vmax=1, aspect="equal", origin="lower",
cmap=plt.get_cmap(cmap))
return im
def create_color_bar(plot_config):
cmap = go.get_str("color_bar_colormap")
current_box = tf.Bbox.union([ax.get_position() for ax in plot_config.fig.axes])
cax = plot_config.fig.add_axes([
current_box.xmax + go.get_float("color_bar_margin"),
current_box.ymin,
go.get_float("color_bar_width"),
current_box.height
])
cbar = plot_config.fig.colorbar(cm.ScalarMappable(norm=None, cmap=plt.get_cmap(cmap)), cax=cax)
cbar.set_label(
go.get_str("color_bar_label"),
rotation=go.get_float("color_bar_label_rotation"),
fontsize=go.get_float("color_bar_label_font_size"),
labelpad=go.get_float("color_bar_label_pad"),
)
def export_legend(plot_config):
output_dir = go.get_str("output_directory")
ext = "." + go.get_str("type")
out_file_path = output_dir + "/" + go.get_str("file_names", plot_config.plot_id) + "_legend" + ext
# Create a new figure specifically for the legend
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1])
ax.axis('off')
# Setup the legend as normal, except always in the lower left of the figure
# and without any offset
lgd = _setup_legend(ax, plot_config.legend_handles, "lower left", (0, 0, 1, 1))
# Figure out the size of the legend if it would be rendered, and adjust the
# figure accordingly
renderer = get_renderer(fig)
bbox = lgd.get_window_extent(renderer).transformed(fig.dpi_scale_trans.inverted())
fig.set_size_inches(bbox.width, bbox.height)
# Save the legend to a file
fig.savefig(out_file_path, dpi="figure", bbox_inches=bbox)
def _setup_legend(ax, handles, legend_loc, bbox_to_anchor):
columns = go.get_int("legend_columns")
legend_label_spacing = go.get_float("legend_label_spacing")
legend_column_spacing = go.get_float("legend_column_spacing")
legend_handle_text_pad = go.get_float("legend_handle_text_pad")
debug_print("legend", "location:", legend_loc, "columns:", columns)
lgd = ax.legend(handles=handles,
loc=legend_loc, ncol=columns,
bbox_to_anchor=bbox_to_anchor,
labelspacing=legend_label_spacing,
columnspacing=legend_column_spacing,
handletextpad=legend_handle_text_pad)
return lgd
def setup_legend(plot_config: PlotConfiguration):
fig = plt.figure(plot_config.plot_id)
ax = fig.get_axes()[0]
# if getFloat("box_sep") == 0:
# plt.tight_layout()
legend_loc = go.get_str("legend_loc", plot_config.plot_id, when_not_exist=go.RETURN_FIRST)
if legend_loc != "none":
anchor_x = go.get_float("legend_x_offset")
anchor_y = go.get_float("legend_y_offset")
bbox_to_anchor = (anchor_x, anchor_y, 1, 1)
handles = None
if len(plot_config.legend_handles) > 0:
handles = plot_config.legend_handles
lgd = _setup_legend(ax, handles, legend_loc, bbox_to_anchor)
plot_config.extra_artists.append(lgd)
def write_plot(plot_config: PlotConfiguration):
print("Writing plot " + str(plot_config.plot_id) + " ...")
output_dir = go.get_str("output_directory")
ext = "." + go.get_str("type")
if not os.path.exists(output_dir):
os.makedirs(output_dir)
setup_legend(plot_config)
fig = plt.figure(plot_config.plot_id)
out_file_path = output_dir + "/" + go.get_str("file_names", plot_config.plot_id) + ext
print(f"Writing plot to: {out_file_path}")
# Determine custom bounding box
if go.get_str("bb") == "custom":
fig_size = go.get_float_list("fig_size")
renderer = get_renderer(fig)
# bb = fig.get_window_extent(renderer)
bb = fig.get_tightbbox(renderer)
target_bb = matplotlib.transforms.Bbox.from_bounds(0, 0, fig_size[0], fig_size[1])
trans2 = matplotlib.transforms.BboxTransformTo(target_bb)
trans = fig.transFigure.inverted()
print("Figure size:", fig_size)
print("Original bb box:", bb.get_points())
for artist in plot_config.extra_artists:
other_bb = artist.get_window_extent(renderer)
other_bb = other_bb.transformed(trans)
other_bb = other_bb.transformed(trans2)
print(other_bb.get_points())
bb = matplotlib.transforms.BboxBase.union([bb, other_bb])
target_aspect = fig_size[0] / fig_size[1]
bb_aspect = bb.width / bb.height
print(target_aspect, bb_aspect)
if target_aspect < bb_aspect:
bb = bb.expanded(1, bb_aspect / target_aspect)
else:
bb = bb.expanded(target_aspect / bb_aspect, 1)
bb = bb.padded(0.2)
print("Extended bb box:", bb.get_points())
plt.savefig(out_file_path,
bbox_extra_artists=plot_config.extra_artists, bbox_inches=bb)
elif go.get_str("bb") == "manual":
fig_size = go.get_float_list("fig_size")
renderer = get_renderer(fig)
ext_width = go.get_float("bb_width")
ext_heigth = go.get_float("bb_height")
x_offset = go.get_float("bb_x_offset")
y_offset = go.get_float("bb_y_offset")
x_tight_center = go.get_float("bb_x_center_includes_labels")
y_tight_center = go.get_float("bb_y_center_includes_labels")
# Get the transformations that we need
inches_to_pixels = fig.dpi_scale_trans
pixels_to_inches = inches_to_pixels.inverted()
# Get the bounding box of the window
win_bb_in_pixels = fig.get_window_extent(renderer)
# Get the bounding box of the actual figure, including labels
fig_bb_in_inches = fig.get_tightbbox(renderer)
fig_bb_in_pixels = fig_bb_in_inches.transformed(inches_to_pixels)
# Get a new bounding box just as wide as the window, but with the
# center of the figure bounding box
new_bb_in_pixels = win_bb_in_pixels.frozen()
if x_tight_center:
width_ratio = win_bb_in_pixels.width / fig_bb_in_pixels.width
new_bb_in_pixels.x0 = fig_bb_in_pixels.x0
new_bb_in_pixels.x1 = fig_bb_in_pixels.x1
new_bb_in_pixels = new_bb_in_pixels.expanded(width_ratio, 1)
if y_tight_center:
height_ratio = win_bb_in_pixels.height / fig_bb_in_pixels.height
new_bb_in_pixels.y0 = fig_bb_in_pixels.y0
new_bb_in_pixels.y1 = fig_bb_in_pixels.y1
new_bb_in_pixels = new_bb_in_pixels.expanded(1, height_ratio)
# Transform to inch space
bb_in_inches = new_bb_in_pixels.transformed(pixels_to_inches)
# Apply custom transformations
bb_in_inches = bb_in_inches.expanded(
float(ext_width) / float(fig_size[0]),
float(ext_heigth) / float(fig_size[1]))
bb_in_inches.y0 += y_offset
bb_in_inches.y1 += y_offset
bb_in_inches.x0 += x_offset
bb_in_inches.x1 += x_offset
plt.savefig(out_file_path,
bbox_extra_artists=plot_config.extra_artists,
bbox_inches=bb_in_inches)
elif go.get_str("bb") == "default":
plt.savefig(out_file_path,
bbox_extra_artists=plot_config.extra_artists)
elif go.get_str("bb") == "tight":
plt.savefig(out_file_path,
bbox_extra_artists=plot_config.extra_artists,
bbox_inches='tight')
else:
raise Exception("Invalid bounding box option.")
print("Writing plot " + str(plot_config.plot_id) + " done.")
def write_plots(plot_configs: List[PlotConfiguration]):
print("Writing plots...")
for plot_config in plot_configs:
write_plot(plot_config)
def def_legend_font_size(): return go.get_int("font_size") - 4
def def_title_font_size(): return go.get_int("font_size") + 4
def def_tick_font_size(): return go.get_int("font_size") - 6
def def_color_bar_label_font_size(): return go.get_float("font_size")
def def_color_bar_colormap(): return go.get_str("background_colormap")
def add_options():
def def_output_dir():
if go.get_exists("config_file"):
return pf.base(go.get_str("config_file")) + "_out"
else:
number = 1
name = "my_plot_" + str(number)
while os.path.exists(name):
number += 1
name = "my_plot_" + str(number)
return name
go.add_option("output_directory", def_output_dir, nargs=1,
help_str="Resulting plots will be put into this directory.")
go.add_option("type", "pdf", nargs=1,
help_str="The file type in which the plot will be written.")
go.add_option("fig_size", [[8, 6]], nargs=2,
help_str="The size of the resulting figure.")
go.add_option("title", True, nargs=1,
help_str="Show the title of the plot.")
# Font settings
go.add_option("font_size", 18, nargs=1,
help_str="The base font-size for the plot "
"(other font-sizes are relative to this one).")
go.add_option("title_size", def_title_font_size, nargs=1,
aliases=["title_font_size"],
help_str="Font size for the title.")
go.add_option("legend_font_size", def_legend_font_size, nargs=1,
help_str="Font size for the legend.")
go.add_option("tick_font_size", def_tick_font_size, nargs=1,
help_str="Font size for the tick-labels.")
# Per plot settings
go.add_option("file_names", "my_plot", aliases=["plot_output"],
help_str="The names of the output files for each plotted column.")
go.add_option("titles", "Unnamed plot", aliases=["plot_title"],
help_str="The titles for each plot.")
go.add_option("x_labels", "Number of Generations", aliases=["plot_x_label"],
help_str="The x labels for each plot.")
go.add_option("y_labels", "Value", aliases=["plot_y_label"],
help_str="The x labels for each plot.")
go.add_option("legend_loc", "best", aliases=["plot_legend_loc"],
help_str="Legend location for each plot.")
go.add_option("y_axis_min", aliases=["plot_y_min"],
help_str="The minimum value for the y axis.")
go.add_option("y_axis_max", aliases=["plot_y_max"],
help_str="The maximum value for the y axis.")
go.add_option("x_axis_max", aliases=["plot_x_max"],
help_str="The minimum value for the x axis.")
go.add_option("x_axis_min", aliases=["plot_x_min"],
help_str="The maximum value for the x axis.")
go.add_option("x_ticks",
help_str="Use the provided strings as labels for the x-ticks.")
go.add_option("y_ticks",
help_str="Use the provided strings as labels for the y-ticks.")
# Legend settings
go.add_option("legend_columns", 1, nargs=1,
help_str="Number of columns for the legend.")
go.add_option("legend_x_offset", 0, nargs=1,
help_str="Allows for fine movement of the legend.")
go.add_option("legend_y_offset", 0, nargs=1,
help_str="Allows for fine movement of the legend.")
go.add_option("legend_label_spacing", 0.5, nargs=1,
help_str="Space between legend labels.")
go.add_option("legend_column_spacing", 2.0, nargs=1,
help_str="Horizontal space between legend labels.")
go.add_option("legend_handle_text_pad", 0.8, nargs=1,
help_str="Horizontal space between legend labels.")
# Bounding box settings
go.add_option("bb", "tight", nargs=1,
help_str="How the bounding box of the image is determined. Options are "
"default (keep aspect ratio and white space), "
"tight (sacrifice aspect ratio to prune white space), "
"manual (specify the bounding box yourself),"
"and custom (keep aspect ratio but prune some white space).")
go.add_option("bb_width", nargs=1,
help_str="The width of the bounding box, in inches.")
go.add_option("bb_height", nargs=1,
help_str="The height of the bounding box, in inches.")
go.add_option("bb_x_offset", 0, nargs=1,
help_str="The x offset of the bounding box, in inches.")
go.add_option("bb_y_offset", 0, nargs=1,
help_str="The y offset of the bounding box, in inches.")
go.add_option("bb_x_center_includes_labels", True, nargs=1,
help_str="If True, take the figure labels into account when horizontally "
"centering the bounding box. If false, ignore the labels when "
"horizontally centering.")
go.add_option("bb_y_center_includes_labels", True, nargs=1,
help_str="If True, take the figure labels into account when vertically "
"centering the bounding box. If false, ignore the labels when "
"vertically centering.")
# Annotations
go.add_option("line_from_file", None,
help_str="")
go.add_option("line_from_file_x_column", 0,
help_str="")
go.add_option("line_from_file_y_column", 1,
help_str="")
go.add_option("line_from_file_color", "#000000",
help_str="")
go.add_option("line_from_file_linestyle", "-",
help_str="")
go.add_option("line_from_file_linewidth", 1,
help_str="")
# Background options
go.add_option("background", None, nargs=1,
help_str="")
go.add_option("background_colormap", "Greys", nargs=1,
help_str="'Accent', 'Accent_r', 'Blues', 'Blues_r', 'BrBG', 'BrBG_r', "
"'BuGn', 'BuGn_r', 'BuPu', 'BuPu_r', 'CMRmap', 'CMRmap_r', "
"'Dark2', 'Dark2_r', 'GnBu', 'GnBu_r', 'Greens', 'Greens_r', "
"'Greys', 'Greys_r', 'OrRd', 'OrRd_r', 'Oranges', 'Oranges_r', "
"'PRGn', 'PRGn_r', 'Paired', 'Paired_r', 'Pastel1', 'Pastel1_r', "
"'Pastel2', 'Pastel2_r', 'PiYG', 'PiYG_r', 'PuBu', 'PuBuGn', "
"'PuBuGn_r', 'PuBu_r', 'PuOr', 'PuOr_r', 'PuRd', 'PuRd_r', "
"'Purples', 'Purples_r', 'RdBu', 'RdBu_r', 'RdGy', 'RdGy_r', "
"'RdPu', 'RdPu_r', 'RdYlBu', 'RdYlBu_r', 'RdYlGn', 'RdYlGn_r', "
"'Reds', 'Reds_r', 'Set1', 'Set1_r', 'Set2', 'Set2_r', 'Set3', "
"'Set3_r', 'Spectral', 'Spectral_r', 'Wistia', 'Wistia_r', 'YlGn', "
"'YlGnBu', 'YlGnBu_r', 'YlGn_r', 'YlOrBr', 'YlOrBr_r', 'YlOrRd', "
"'YlOrRd_r', 'afmhot', 'afmhot_r', 'autumn', 'autumn_r', 'binary', "
"'binary_r', 'bone', 'bone_r', 'brg', 'brg_r', 'bwr', 'bwr_r', "
"'cividis', 'cividis_r', 'cool', 'cool_r', 'coolwarm', 'coolwarm_r', "
"'copper', 'copper_r', 'cubehelix', 'cubehelix_r', 'flag', 'flag_r', "
"'gist_earth', 'gist_earth_r', 'gist_gray', 'gist_gray_r', 'gist_heat', "
"'gist_heat_r', 'gist_ncar', 'gist_ncar_r', 'gist_rainbow', 'gist_rainbow_r', "
"'gist_stern', 'gist_stern_r', 'gist_yarg', 'gist_yarg_r', "
"'gnuplot', 'gnuplot2', 'gnuplot2_r', 'gnuplot_r', 'gray', "
"'gray_r', 'hot', 'hot_r', 'hsv', 'hsv_r', 'inferno', 'inferno_r', "
"'jet', 'jet_r', 'magma', 'magma_r', 'nipy_spectral', 'nipy_spectral_r', "
"'ocean', 'ocean_r', 'pink', 'pink_r', 'plasma', 'plasma_r', 'prism', "
"'prism_r', 'rainbow', 'rainbow_r', 'seismic', 'seismic_r', 'spring', "
"'spring_r', 'summer', 'summer_r', 'tab10', 'tab10_r', 'tab20', 'tab20_r', "
"'tab20b', 'tab20b_r', 'tab20c', 'tab20c_r', 'terrain', 'terrain_r', 'turbo', "
"'turbo_r', 'twilight', 'twilight_r', 'twilight_shifted', 'twilight_shifted_r', "
"'viridis', 'viridis_r', 'winter', 'winter_r'")
go.add_option("background_colormap_min", 0.0, nargs=1,
help_str="")
go.add_option("background_colormap_max", 1.0, nargs=1,
help_str="")
# Color bar options
go.add_option("color_bar_colormap", "Greys", nargs=1,
help_str="The colormap used for the color bar.")
go.add_option("color_bar_margin", 0.005, nargs=1,
help_str="The distance between the main plot and the color bar in a "
"percentage of the overall figure.")
go.add_option("color_bar_width", 0.015, nargs=1,
help_str="The width of the color bar as a percentage of the overall figure.")
go.add_option("color_bar_label", "", nargs=1,
help_str="The label next to the color bar.")
go.add_option("color_bar_label_rotation", 0, nargs=1,
help_str="The width of the color bar as a percentage of the overall figure.")
go.add_option("color_bar_label_font_size", def_color_bar_label_font_size, nargs=1,
help_str="The font size of the color bar label.")
go.add_option("color_bar_label_pad", 0, nargs=1,
help_str="The padding (x-offset of the color bar label.")
| JoostHuizinga/ea-plotting-scripts | configure_plots.py | Python | mit | 25,413 | 0.002873 |
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This Module performs Unit Tests for the TreeStructure classes
It cannot be considered part of the active code but of the regression test system
"""
#For future compatibility with Python 3
from __future__ import division, print_function, unicode_literals, absolute_import
import warnings
warnings.simplefilter('default',DeprecationWarning)
import os,sys
import numpy as np
frameworkDir = os.path.abspath(os.path.join(os.path.dirname(__file__),os.pardir,os.pardir,os.pardir,os.pardir,'framework'))
print('framework:',frameworkDir)
sys.path.append(frameworkDir)
from utils import TreeStructure as TS
results = {"pass":0,"fail":0}
#type comparison
def checkAnswer(comment,value,expected,tol=1e-10,updateResults=True):
"""
This method is aimed to compare two floats given a certain tolerance
@ In, comment, string, a comment printed out if it fails
@ In, value, float, the value to compare
@ In, expected, float, the expected value
@ In, tol, float, optional, the tolerance
@ In, updateResults, bool, optional, if True updates global results
@ Out, None
"""
if abs(value - expected) > tol:
print("checking answer",comment,value,"!=",expected)
if updateResults:
results["fail"] += 1
return False
else:
if updateResults:
results["pass"] += 1
return True
def checkSame(comment,value,expected,updateResults=True):
"""
This method is aimed to compare two floats given a certain tolerance
@ In, comment, string, a comment printed out if it fails
@ In, value, float, the value to compare
@ In, expected, float, the expected value
@ In, updateResults, bool, optional, if True updates global results
@ Out, None
"""
if value != expected:
print("checking answer",comment,value,"!=",expected)
if updateResults:
results["fail"] += 1
return False
else:
if updateResults:
results["pass"] += 1
return True
def checkArray(comment,check,expected,tol=1e-10):
"""
This method is aimed to compare two arrays of floats given a certain tolerance
@ In, comment, string, a comment printed out if it fails
@ In, check, list, the value to compare
@ In, expected, list, the expected value
@ In, tol, float, optional, the tolerance
@ Out, None
"""
same=True
if len(check) != len(expected):
same=False
else:
for i in range(len(check)):
same = same*checkAnswer(comment+'[%i]'%i,check[i],expected[i],tol,False)
if not same:
print("checking array",comment,"did not match!")
results['fail']+=1
return False
else:
results['pass']+=1
return True
def checkType(comment,value,expected,updateResults=True):
"""
This method compares the data type of two values
@ In, comment, string, a comment printed out if it fails
@ In, value, float, the value to compare
@ In, expected, float, the expected value
@ In, updateResults, bool, optional, if True updates global results
@ Out, None
"""
if type(value) != type(expected):
print("checking type",comment,value,'|',type(value),"!=",expected,'|',type(expected))
if updateResults:
results["fail"] += 1
return False
else:
if updateResults:
results["pass"] += 1
return True
##############
# Node Tests #
##############
# TODO not complete!
#test equivalency (eq, neq, hash)
## test all same are same
a = TS.HierarchicalNode('rightTag',valuesIn={'attrib1':1,'attrib2':'2'},text='sampleText')
b = TS.HierarchicalNode('rightTag',valuesIn={'attrib1':1,'attrib2':'2'},text='sampleText')
checkSame('Equivalency of nodes ==:',a==b,True)
checkSame('Equivalency of nodes !=:',a!=b,False)
## test different tag
b = TS.HierarchicalNode('diffTag',valuesIn={'attrib1':1,'attrib2':'2'},text='sampleText')
checkSame('Inequivalent tag ==:',a==b,False)
checkSame('Inequivalent tag !=:',a!=b,True)
## test different attribute name
b = TS.HierarchicalNode('rightTag',valuesIn={'attrib3':1,'attrib2':'2'},text='sampleText')
checkSame('Inequivalent value name ==:',a==b,False)
checkSame('Inequivalent value name !=:',a!=b,True)
## test different attribute value
b = TS.HierarchicalNode('rightTag',valuesIn={'attrib1':3,'attrib2':'2'},text='sampleText')
checkSame('Inequivalent value name ==:',a==b,False)
checkSame('Inequivalent value name !=:',a!=b,True)
## test different text value
b = TS.HierarchicalNode('rightTag',valuesIn={'attrib1':3,'attrib2':'2'},text='diffText')
checkSame('Inequivalent value name ==:',a==b,False)
checkSame('Inequivalent value name !=:',a!=b,True)
## test equivalent, only tags
a = TS.HierarchicalNode('rightTag')
b = TS.HierarchicalNode('rightTag')
checkSame('Equivalency only tag ==:',a==b,True)
checkSame('Equivalency only tag !=:',a!=b,False)
## test equivalent, only values
a = TS.HierarchicalNode('rightTag',valuesIn={'attrib1':1,'attrib2':'2'})
b = TS.HierarchicalNode('rightTag',valuesIn={'attrib1':1,'attrib2':'2'})
checkSame('Equivalency only values ==:',a==b,True)
checkSame('Equivalency only values !=:',a!=b,False)
## test equivalent, only text
a = TS.HierarchicalNode('rightTag',text='sampleText')
b = TS.HierarchicalNode('rightTag',text='sampleText')
checkSame('Equivalency only text ==:',a==b,True)
checkSame('Equivalency only text !=:',a!=b,False)
##############
# Tree Tests #
##############
# TODO
##################
# Metadata Tests #
##################
# construction
static = TS.StaticMetadataTree('myStaticData')
dynamic = TS.DynamicMetadataTree('myDynamicData','timeParam')
# test "dynamic" attribute set correctly
checkSame('Static "dynamic" property correctly set:',static.getrootnode().get('dynamic'),'False')
checkSame('Dynamic "dynamic" property correctly set:',dynamic.getrootnode().get('dynamic'),'True')
# test message handler works (implicit test, no error means success)
static.raiseADebug('Debug message in Static successful!')
dynamic.raiseADebug('Debug message in Dynamic successful!')
results['pass']+=2
#test adding scalar entries (implicit test, no error means success)
static.addScalar('myTarget','myMetric',3.14159)
results['pass']+=1
dynamic.addScalar('myTarget','myMetric',3.14159,pivotVal=0.1) #pivot value as float
results['pass']+=1
dynamic.addScalar('myTarget','myMetric',299792358,pivotVal='0.2') #pivot value as string
results['pass']+=1
#test finding pivotNode (dynamic only)
a = TS.HierarchicalNode('timeParam',valuesIn={'value':0.2})
b = dynamic._findPivot(dynamic.getrootnode(),0.2)
checkSame('Finding pivot node:',b,a)
#test finding targetNode
## static
a = TS.HierarchicalNode('myTarget')
b = static._findTarget(static.getrootnode(),'myTarget')
checkSame('Finding target (static):',b,a)
## dynamic
a = TS.HierarchicalNode('myTarget')
c = dynamic._findTarget(dynamic.getrootnode(),'myTarget',0.2)
checkSame('Finding target (dynamic):',c,a)
#test values recorded
checkAnswer('Recorded data (static):',b.findBranch('myMetric').text,3.14159)
c = dynamic._findTarget(dynamic.getrootnode(),'myTarget',0.1)
checkAnswer('Recorded data (dynamic 1):',c.findBranch('myMetric').text,3.14159)
c = dynamic._findTarget(dynamic.getrootnode(),'myTarget',0.2)
checkAnswer('Recorded data (dynamic 2):',c.findBranch('myMetric').text,299792358)
print('{0}ed: {2}, {1}ed: {3}'.format(*(list(str(r) for r in results.keys())+list(results.values()))))
sys.exit(results["fail"])
"""
<TestInfo>
<name>framework.treeStructure</name>
<author>talbpaul</author>
<created>2016-11-01</created>
<classesTested>utils.TreeStructure</classesTested>
<description>
This test performs Unit Tests for the TreeStructure classes
It cannot be considered part of the active code but of the regression test system
</description>
<revisions>
<revision author="talbpaul" date="2016-11-08">Relocated utils tests</revision>
<revision author="alfoa" date="2017-01-21">Adding this test description.</revision>
</revisions>
</TestInfo>
"""
| joshua-cogliati-inl/raven | tests/framework/unit_tests/utils/testTreeStructure.py | Python | apache-2.0 | 8,503 | 0.029401 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal widget - display photos from given collections
"""
from invenio.bibformat_engine import BibFormatObject
from invenio.search_engine import perform_request_search
from invenio.config import CFG_CERN_SITE, CFG_SITE_URL, CFG_SITE_RECORD
def format_element(bfo, collections, max_photos="3", separator="<br/>"):
"""
Display the latest pictures from the given collection(s)
@param collections: comma-separated list of collection form which photos have to be fetched
@param max_photos: maximum number of photos to display
@param separator: separator between photos
"""
try:
int_max_photos = int(max_photos)
except:
int_max_photos = 0
try:
collections_list = [coll.strip() for coll in collections.split(',')]
except:
collections_list = []
out = get_widget_html(bfo.lang, int_max_photos,
collections_list, separator, bfo.lang)
return out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
def get_widget_html(language, max_photos, collections, separator, ln):
"""
Returns the content of the widget
"""
latest_photo_ids = perform_request_search(c=collections,
rg=max_photos,
of='id')
images_urls = []
for recid in latest_photo_ids[:max_photos]:
try:
photo_record = BibFormatObject(recid)
except:
# todo: Exception, no photo in this selection
continue
if language == "fr":
try:
title = photo_record.fields('246_1a', escape=1)[0]
except KeyError:
try:
title = photo_record.fields('245__a', escape=1)[0]
except:
title = ""
else:
try:
title = photo_record.fields('245__a', escape=1)[0]
except KeyError:
# todo: exception, picture with no title
title = ""
if CFG_CERN_SITE and photo_record.fields('8567_'):
# Get from 8567_
dfs_images = photo_record.fields('8567_')
for image_block in dfs_images:
if image_block.get("y", '') == "Icon":
if image_block.get("u", '').startswith("http://"):
images_urls.append((recid, image_block["u"], title))
break # Just one image per record
else:
# Get from 8564_
images = photo_record.fields('8564_')
for image_block in images:
if image_block.get("x", '').lower() == "icon":
if image_block.get("q", '').startswith("http://"):
images_urls.append((recid, image_block["q"], title))
break # Just one image per record
# Build output
html_out = separator.join(['<a href="%s/%s/%i?ln=%s"><img class="phr" width="100" height="67" src="%s"/>%s</a>' % (CFG_SITE_URL, CFG_SITE_RECORD, recid, ln, photo_url, title) for (recid, photo_url, title) in images_urls])
return html_out
| Panos512/invenio | modules/webjournal/lib/widgets/bfe_webjournal_widget_latestPhoto.py | Python | gpl-2.0 | 4,058 | 0.005914 |
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.common.exceptions import NoSuchElementException
from webium import BasePage as WebiumBasePage, Find
class BasePage(WebiumBasePage):
url_path = None
a_tag = "//a[contains(.,'{link_text}')]"
login_link = Find(by=By.XPATH, value=a_tag.format(link_text='Sign in'))
logout_btn = Find(by=By.XPATH, value="//button[contains(.,'Sign out')]")
account_options_btn = Find(by=By.XPATH, value=a_tag.replace('.', '@aria-label').format(link_text='View profile and more'))
loader_xpath = "//div[@id='prestatus']"
def clear_send_keys(self, element_name, kwargs):
value = kwargs.get(element_name)
element = getattr(self, element_name)
element.clear()
element.send_keys(value)
def hover(self, element):
hov = ActionChains(self._driver).move_to_element(element)
hov.perform()
self.wait_for_loading()
self.wait_for_loader_disappear()
def get_login_status(self):
try:
self.account_options_btn.click()
return 'logged in' if self.logout_btn.is_displayed() == True else 'logged out'
except NoSuchElementException:
return 'logged out'
def wait_for_loading(self, seconds=180):
wait = WebDriverWait(self._driver, seconds)
wait.until(lambda x: self._driver.execute_script('return jQuery.active == 0') is True)
def replace_bad_elements(self, css_locator):
self._driver.execute_script("$('{}').remove()".format(css_locator))
def is_loader_displayed(self, *args):
return self._driver.find_element_by_xpath(self.loader_xpath).is_displayed()
def wait_for_loader_disappear(self):
WebDriverWait(self._driver, timeout=500).until_not(
self.is_loader_displayed, "Timeout waiting for loader disappear")
| KorolevskyMax/TestFrameworkTemplate | pages/base_page.py | Python | mit | 1,954 | 0.002559 |
#
# This file is only used in the Java (Jython) version
# It serves as an entry point.
#
import sys
import PyPlucker.Spider
from java.lang import Runtime
from java.util import Hashtable
import org.plkr.distiller.API
class InvokePluckerBuildFromJava (org.plkr.distiller.API.Invocation):
def __init__(self):
"@sig public InvokePluckerBuildFromJava()"
pass
def create_dict_from_hashtable (self, ht):
dict = {}
e = ht.keys()
while e.hasMoreElements():
key = e.nextElement()
value = ht.get(key)
dict[str(key)] = str(value)
return dict
def invoke(self, args, os, inputstring, config, callback):
"@sig public int invoke(java.lang.String[] args, java.io.OutputStream os, java.lang.String inputString, java.util.Hashtable config, org.plkr.distiller.API.Callback status)"
varargs = ['plucker-build']
if args:
for arg in args:
varargs.append(str(arg))
if os:
outputstream = org.python.core.PyFile(os, "<stream>", "wb")
else:
outputstream = None
if config:
configdict = self.create_dict_from_hashtable(config)
else:
configdict = None
val = PyPlucker.Spider.realmain(varargs, outputstream, inputstring, configdict, callback)
return val
if __name__ == '__main__':
theRuntime = Runtime.getRuntime()
try:
val = InvokePluckerBuildFromJava().invoke(sys.argv[1:], None, None, None, None)
theRuntime.exit(val)
except:
import traceback
traceback.print_exc(None, sys.stderr)
theRuntime.exit(1)
| arpruss/plucker | parser/python/InvokePluckerBuildFromJava.py | Python | gpl-2.0 | 1,742 | 0.004592 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-17 16:10
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('overseas', '0013_auto_20160914_1706'),
]
operations = [
migrations.CreateModel(
name='CDN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cdn_name', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
],
),
migrations.AlterField(
model_name='networkidentifiers',
name='service',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='overseas.Service'),
),
migrations.AddField(
model_name='networkidentifiers',
name='cdn',
field=models.ManyToManyField(blank=True, null=True, to='overseas.CDN'),
),
]
| nevermoreluo/privateoverseas | overseas/migrations/0014_auto_20160918_0010.py | Python | gpl-3.0 | 1,111 | 0.0027 |
import datetime as dt
import numpy as np
import pandas as pd
import thalesians.tsa.checks as checks
import thalesians.tsa.numpyutils as npu
import thalesians.tsa.processes as proc
import thalesians.tsa.randomness as rnd
def xtimes(start, stop=None, step=None):
checks.check_not_none(start)
if step is None:
if isinstance(start, (dt.date, dt.time, dt.datetime)) or isinstance(stop, (dt.date, dt.time, dt.datetime)):
step = dt.timedelta(days=1)
elif isinstance(start, float) or isinstance(stop, float):
step = 1.
else:
step = 1
resultwrap = lambda x: x
if isinstance(start, dt.time):
start = dt.datetime.combine(dt.datetime(1,1,1,0,0,0), start)
resultwrap = lambda x: x.time()
if isinstance(stop, dt.time):
stop = dt.datetime.combine(dt.datetime(1,1,1,0,0,0), stop) if stop is not None else None
resultwrap = lambda x: x.time()
stepfunc = step if checks.is_callable(step) else lambda x: step
s = stepfunc(start)
checks.check(npu.sign(s) != 0, 'Step must be positive or negative, not zero')
if stop is None:
while True:
yield resultwrap(start)
start += s
s = stepfunc(start)
else:
while npu.sign(start - stop) == -npu.sign(s):
yield resultwrap(start)
start += s
s = stepfunc(start)
def times(start, stop=None, step=None):
return list(xtimes(start, stop, step))
class EulerMaruyama(object):
def __init__(self, process, initial_value=None, times=None, variates=None, time_unit=dt.timedelta(days=1), flatten=False):
checks.check_instance(process, proc.ItoProcess)
self.__process = process
self.__value = npu.to_ndim_2(initial_value, ndim_1_to_col=True, copy=True) if initial_value is not None else npu.col_of(process.process_dim, 0.)
self.__times = iter(times) if times is not None else xtimes(0., None, 1.)
self.__variates = variates if variates is not None else rnd.multivariate_normals(ndim=process.noise_dim)
self._time = None
self._time_unit = time_unit
self.__flatten = flatten
def __next__(self):
if self._time is None:
self._time = next(self.__times)
else:
newtime = next(self.__times)
time_delta = newtime - self._time
if isinstance(time_delta, dt.timedelta):
time_delta = time_delta.total_seconds() / self._time_unit.total_seconds()
npu.col_of(self.__process.noise_dim, 0.)
variate_delta = np.sqrt(time_delta) * npu.to_ndim_2(next(self.__variates), ndim_1_to_col=True, copy=False)
drift = npu.to_ndim_2(self.__process.drift(self._time, self.__value), ndim_1_to_col=True, copy=False)
diffusion = npu.to_ndim_2(self.__process.diffusion(self._time, self.__value), ndim_1_to_col=True, copy=False)
self.__value += drift * time_delta + diffusion.dot(variate_delta)
self._time = newtime
v = np.copy(self.__value)
if self.__flatten: v = v.flatten()
return self._time, v
def __iter__(self):
return self
def run(sim, nstep=None, last_time=None):
checks.check_at_most_one_not_none(nstep, last_time)
ts, vs = [], []
if nstep is not None:
for _ in range(nstep):
try:
t, v = next(sim)
except StopIteration: break
ts.append(t)
vs.append(v.flatten())
elif last_time is not None:
while True:
try:
t, v = next(sim)
except StopIteration: break
ts.append(t)
vs.append(v.flatten())
if t >= last_time: break
else:
for t, v in sim:
ts.append(t)
vs.append(v.flatten())
return pd.DataFrame(data=vs, index=ts)
| thalesians/tsa | src/main/python/thalesians/tsa/simulation.py | Python | apache-2.0 | 4,012 | 0.008724 |
# -*- coding: utf-8 -*-
'''
Created on 05.09.2015
@author: derChris
'''
class ezDict(dict):
def __missing__(self, key):
self.update({key: ezDict()})
return self[key]
def reduce(self):
items = list(self.items())
for key, value in items:
if isinstance(value, ezDict):
value.reduce()
if not value:
self.pop(key)
return self
if __name__ == '__main__':
x = ezDict()
x['heinz']['klaus'] = 'wolfgang'
x['heinz']['juergen'] = 'stefan'
x['stefanie']['ursula'] = {}
print(x)
print(x.reduce()) | ChrisCuts/fnode | src/ezClasses/ezClasses.py | Python | gpl-2.0 | 755 | 0.025166 |
#!/usr/bin/env python
from setuptools import setup, find_packages
classifiers = [
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
setup(name="nanpy",
version="0.9.4",
description="Use your Arduino board with Python",
license="MIT",
author="Andrea Stagi",
author_email="stagi.andrea@gmail.com",
url="http://github.com/nanpy/nanpy",
packages = find_packages(),
keywords= "arduino library prototype",
install_requires=[
"pyserial",
],
classifiers=classifiers,
zip_safe = True)
| ryanvade/nanpy | setup.py | Python | mit | 1,020 | 0.017647 |
#!/usr/bin/env python
# Linux IEEE 802.15.4 userspace tools
#
# Copyright (C) 2008, 2009 Siemens AG
#
# Written-by: Dmitry Eremin-Solenikov
# Written-by: Sergey Lapin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys,os,time
from termios import *
from test_DQ import *
if len(sys.argv) < 3:
print "Bad arguments."
print "Usage: %s tty channel" %(sys.argv[0])
sys.exit(2)
cn = DQ(sys.argv[1])
print 'Result of close ' + hex(cn.close())
print 'Result of open ' + hex(cn.open())
print 'Result of set_channel' +hex(cn.set_channel(int(sys.argv[2])))
print 'Result of set_state' +hex(cn.set_state(RX_MODE))
try:
while 1:
print 'Result of recv_block' +hex(cn.recv_block())
except KeyboardInterrupt:
cn.close()
sys.exit(2)
for i in range(1, 12):
print 'Result of set_channel ' + hex(cn.set_channel(i))
time.sleep(1)
m = 0
res = 5
while res != 0 or m > 60:
res = cn.set_state(RX_MODE)
print "Got res %d" %(res)
m = m + 1
time.sleep(1)
if res == 5 or res == 8:
print "Unable to set RX mode :("
cn.close()
sys.exit(2)
print 'Result of ed ' + hex(cn.ed()) + ' ' + hex(ord(cn.data))
print 'Result of close ' + hex(cn.close())
sys.exit(2)
#state = 0
#try:
# f.write(cmd_open)
#except IOError:
# print "Error on write"
# sys.exit(2)
#
#resp = get_response(f)
#print "got response %d" % (resp);
#sys.exit(2)
#
#try:
# state = 0
# while 1:
# if state == 0:
# f.write(cmd_open)
# state = 1
# val = f.read(1)
#except KeyboardInterrupt:
# f.close()
#
| tcheneau/linux-zigbee | test-serial/test_recv.py | Python | gpl-2.0 | 2,105 | 0.023278 |
# Copyright (c) 2013, GlaxoSmithKline Research & Development Ltd.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of GlaxoSmithKline Research & Development Ltd.
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Created by Jameed Hussain, July 2013
from __future__ import print_function
import sys
import os
import re
import sqlite3
import subprocess
from optparse import OptionParser
from indexing import cansmirk,heavy_atom_count
from rfrag import fragment_mol
def cmpd_not_in_db_mmp_query(in_smi,cmpd_id):
query_contexts = set()
cmpd_frags = fragment_mol(in_smi,cmpd_id)
for row in cmpd_frags:
row = row.rstrip()
row_fields = re.split(',',row)
if(row_fields[3].count(".") == 1):
a,b = row_fields[3].split(".")
query_contexts.add(a)
query_contexts.add(b)
else:
query_contexts.add(row_fields[3])
q_string = "','".join(query_contexts)
q_string = "'%s'" % (q_string)
query_sql = """
select c.cmpd_id,
c.core_smi,
con.context_smi,
con.context_size
from core_table c, context_table con
where c.context_id in (select context_id from context_table where context_smi in (%s))
and c.context_id = con.context_id""" % (q_string)
cursor.execute(query_sql)
results = cursor.fetchall()
cmpd_size = heavy_atom_count(in_smi)
print_smallest_change_mmp(results,cmpd_id,cmpd_size)
def run_mmp_query(cmpd_id,cmpd_size):
query_sql = """
select c.cmpd_id,
c.core_smi,
con.context_smi,
con.context_size
from core_table c, context_table con
where c.context_id in (select context_id from core_table where cmpd_id = '%s')
and c.context_id = con.context_id""" % (cmpd_id)
cursor.execute(query_sql)
results = cursor.fetchall()
print_smallest_change_mmp(results,cmpd_id,cmpd_size)
def print_smallest_change_mmp(db_results,cmpd_id,query_size):
uniq_list={}
for r in db_results:
if(r[0] != cmpd_id):
#print r
#for each unique compound keep the largest one in common
if(r[0] not in uniq_list):
uniq_list[r[0]] = r
elif(r[3] > uniq_list[r[0]][3] ):
uniq_list[r[0]] = r
for key, value in uniq_list.items():
size_of_change = query_size-value[3]
#print "q_size: %s, Size od change: %s, Ratio: %s" % (query_size,size_of_change,float(size_of_change)/query_size)
if(use_ratio):
if(float(size_of_change)/query_size <= ratio):
cursor.execute("SELECT smiles FROM cmpd_smisp WHERE cmpd_id = ?", (key, ))
rsmi = cursor.fetchone()[0]
print("%s,%s,%s,%s,%s,%s" % (smi,rsmi,id,value[0],value[1],value[2]))
elif(size_of_change <= max_size):
cursor.execute("SELECT smiles FROM cmpd_smisp WHERE cmpd_id = ?", (key, ))
rsmi = cursor.fetchone()[0]
print("%s,%s,%s,%s,%s,%s" % (search_string,rsmi,id,value[0],value[1],value[2]))
def run_subs_query(subs):
query_sql = """
select lhs_smi.smiles,
lhs.cmpd_id,
lhs.core_smi,
rhs_smi.smiles,
rhs.cmpd_id,
rhs.core_smi,
context_table.context_smi,
rhs_smi.cmpd_size-context_table.context_size
from (select cmpd_id,core_smi,context_id from core_table where core_smi_ni = '%s') lhs,
core_table rhs,
cmpd_smisp lhs_smi,
cmpd_smisp rhs_smi,
context_table
where lhs.context_id = rhs.context_id
and context_table.context_id = rhs.context_id
and lhs_smi.cmpd_id = lhs.cmpd_id
and rhs_smi.cmpd_id = rhs.cmpd_id
and lhs.cmpd_id != rhs.cmpd_id
and rhs_smi.cmpd_size-context_table.context_size <= %s""" % (subs,max_size)
cursor.execute(query_sql)
results = cursor.fetchall()
for r in results:
#make sure it is not the same core on both sides
if(r[2] != r[5]):
#cansmirk
smirks,context = cansmirk(str(r[2]),str(r[5]),str(r[6]))
if(have_id):
print("%s,%s,%s,%s,%s,%s,%s,%s" % (subs,id,r[0],r[3],r[1],r[4],smirks,context))
else:
print("%s,%s,%s,%s,%s,%s,%s" % (subs,r[0],r[3],r[1],r[4],smirks,context))
def run_subs_smarts_query(subs_smarts):
#set os enviroment for rdkit to use sqllite
os.environ['RD_USESQLLITE'] = '1'
temp_core_ni_file = 'temp_core_ni_file_%s' % (os.getpid())
cmd = "python $RDBASE/Projects/DbCLI/SearchDb.py --dbDir=%s_smarts --smarts='%s' --silent >%s" % (pre,subs_smarts,temp_core_ni_file)
subprocess.Popen(cmd, shell=True).wait()
infile=open(temp_core_ni_file, 'r')
for row in infile:
row = row.rstrip()
query_sql = """
select lhs_smi.smiles,
lhs.cmpd_id,
lhs.core_smi,
rhs_smi.smiles,
rhs.cmpd_id,
rhs.core_smi,
context_table.context_smi,
rhs_smi.cmpd_size-context_table.context_size
from (select cmpd_id,core_smi,context_id from core_table where core_smi_ni = '%s') lhs,
core_table rhs,
cmpd_smisp lhs_smi,
cmpd_smisp rhs_smi,
context_table
where lhs.context_id = rhs.context_id
and context_table.context_id = rhs.context_id
and lhs_smi.cmpd_id = lhs.cmpd_id
and rhs_smi.cmpd_id = rhs.cmpd_id
and lhs.cmpd_id != rhs.cmpd_id
and rhs_smi.cmpd_size-context_table.context_size <= %s
and lhs_smi.cmpd_size-context_table.context_size <= %s""" % (row,max_size,max_size)
cursor.execute(query_sql)
results = cursor.fetchall()
for r in results:
#cansmirk
smirks,context = cansmirk(str(r[2]),str(r[5]),str(r[6]))
if(have_id):
print("%s,%s,%s,%s,%s,%s,%s" % (id,r[0],r[3],r[1],r[4],smirks,context))
else:
print("%s,%s,%s,%s,%s,%s" % (r[0],r[3],r[1],r[4],smirks,context))
infile.close()
#remove temporary files
os.unlink(temp_core_ni_file)
def run_trans_smarts_query(transform):
lhs,rhs = transform.split(">>")
matching_lhs = []
matching_rhs = []
#set os enviroment for rdkit to use sqllite
os.environ['RD_USESQLLITE'] = '1'
cmd = "python $RDBASE/Projects/DbCLI/SearchDb.py --dbDir=%s_smarts --smarts='%s' --silent" % (pre,lhs)
p1 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = p1.communicate()[0].decode().rstrip()
matching_lhs=output.split("\n")
#sys.stderr.write("rhs: %s\n" % (len(matching_lhs)) )
cmd = "python $RDBASE/Projects/DbCLI/SearchDb.py --dbDir=%s_smarts --smarts='%s' --silent" % (pre,rhs)
p1 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = p1.communicate()[0].decode().rstrip()
matching_rhs=output.split("\n")
#sys.stderr.write("rhs: %s\n" % (len(matching_rhs)) )
#sys.stderr.write('SQLlite method\n')
lhs_q_string = "','".join(matching_lhs)
lhs_q_string = "'%s'" % (lhs_q_string)
rhs_q_string = "','".join(matching_rhs)
rhs_q_string = "'%s'" % (rhs_q_string)
query_sql = """
select lhs_smi.smiles,
lhs.cmpd_id,
lhs.core_smi,
rhs_smi.smiles,
rhs.cmpd_id,
rhs.core_smi,
context_table.context_smi
from (select cmpd_id,core_smi,context_id from core_table where core_smi_ni in (%s) ) lhs,
(select cmpd_id,core_smi,context_id from core_table where core_smi_ni in (%s) ) rhs,
cmpd_smisp lhs_smi,
cmpd_smisp rhs_smi,
context_table
where lhs.context_id = rhs.context_id
and context_table.context_id = rhs.context_id
and lhs_smi.cmpd_id = lhs.cmpd_id
and rhs_smi.cmpd_id = rhs.cmpd_id
and lhs.cmpd_id != rhs.cmpd_id
and rhs_smi.cmpd_size-context_table.context_size <= %s
and lhs_smi.cmpd_size-context_table.context_size <= %s """ % (lhs_q_string,rhs_q_string,max_size,max_size)
cursor.execute(query_sql)
results = cursor.fetchall()
for r in results:
smirks,context = cansmirk(str(r[2]),str(r[5]),str(r[6]))
if(have_id):
print("%s,%s,%s,%s,%s,%s,%s,%s" % (transform,id,r[0],r[3],r[1],r[4],smirks,context))
else:
print("%s,%s,%s,%s,%s,%s,%s" % (transform,r[0],r[3],r[1],r[4],smirks,context))
def run_trans_query(transform):
lhs,rhs = transform.split(">>")
#remove connectivity info
lhs_ni = remove_numbers(lhs)
rhs_ni = remove_numbers(rhs)
query_sql = """
select lhs_smi.smiles,
lhs.cmpd_id,
lhs.core_smi,
rhs_smi.smiles,
rhs.cmpd_id,
rhs.core_smi,
context_table.context_smi
from (select cmpd_id,core_smi,context_id from core_table where core_smi_ni = '%s') lhs,
(select cmpd_id,core_smi,context_id from core_table where core_smi_ni = '%s') rhs,
cmpd_smisp lhs_smi,
cmpd_smisp rhs_smi,
context_table
where lhs.context_id = rhs.context_id
and context_table.context_id = rhs.context_id
and lhs_smi.cmpd_id = lhs.cmpd_id
and rhs_smi.cmpd_id = rhs.cmpd_id""" % (lhs_ni,rhs_ni)
cursor.execute(query_sql)
results = cursor.fetchall()
for r in results:
smirks,context = cansmirk(str(r[2]),str(r[5]),str(r[6]))
#make sure connectivity is correct
if(smirks == transform):
if(have_id):
print("%s,%s,%s,%s,%s,%s,%s" % (id,r[0],r[3],r[1],r[4],smirks,context))
else:
print("%s,%s,%s,%s,%s,%s" % (r[0],r[3],r[1],r[4],smirks,context))
def remove_numbers(in_string):
out_string = re.sub(r'\[\*\:1\]', '[*]' , in_string)
out_string = re.sub(r'\[\*\:2\]', '[*]' , out_string)
out_string = re.sub(r'\[\*\:3\]', '[*]' , out_string)
return out_string
#quick class to help with the formatting of optparse
class MyParser(OptionParser):
def format_description(self, formatter):
return self.description
parser = MyParser(
description=
"""Program to search MMP db. The types of searching that can be performed are as
follows:
mmp: Find all MMPs of a input/query compound to the compounds in the db
subs: Find all MMPs in the db where the LHS of the transform matches an input
substructure. Make sure the attached points are donated by an asterisk and the
input substructure has been canonicalised (eg. [*]c1ccccc1).
trans: Find all MMPs that match the input transform/SMIRKS. Make sure the input
SMIRKS has been canonicalised using the cansmirk.py program.
subs_smarts: Find all MMPs in the db where the LHS of the transform matches an
input SMARTS. The attachment points in the SMARTS can be donated by [#0] (eg.
[#0]c1ccccc1).
trans_smarts: Find all MMPs that match the LHS and RHS SMARTS of the input
transform. The transform SMARTS are input as LHS_SMARTS>>RHS_SMARTS (eg.
[#0]c1ccccc1>>[#0]c1ccncc1). Note: This search can take a long time to run if a
very general SMARTS expression is used.
""")
parser.add_option('-t','--type',action='store', dest='type', type='string',
help='Type of search required. Options are: mmp, subs, trans, subs_smarts, trans_smarts')
parser.add_option('-m','--maxsize',action='store', dest='maxsize', type='int',
help='Maximum size of change (in heavy atoms) allowed in matched molecular pairs identified. DEFAULT=10. \
Note: This option overrides the ratio option if both are specified.')
parser.add_option('-r','--ratio',action='store', dest='ratio', type='float',
help='Only applicable with the mmp search type. Maximum ratio of change allowed in matched molecular pairs identified. The ratio is: size of change / \
size of cmpd (in terms of heavy atoms) for the QUERY MOLECULE. DEFAULT=0.3. Note: If this option is used with the maxsize option, the maxsize option will be used.')
parser.add_option('-p','--prefix',action='store', dest='prefix', type='string',
help='Prefix for the db file. DEFAULT=mmp')
#parse the command line options
(options, args) = parser.parse_args()
#note max heavy atom count does not
#include the attachement points (*)
max_size = 10
ratio = 0.3
use_ratio = False
have_id = True
search_type = "mmp"
db_name = "mmp.db"
pre = "mmp"
if(options.maxsize != None):
max_size = options.maxsize
elif(options.ratio != None):
ratio = options.ratio
if(ratio >= 1):
print("Ratio specified: %s. Ratio needs to be less than 1.")
sys.exit(1)
use_ratio = True
if(options.type != None):
if( (options.type == "mmp") or (options.type == "subs") or (options.type == "trans") or (options.type == "subs_smarts") or (options.type == "trans_smarts")):
search_type = options.type
else:
print("Unrecognised search type. Please choose from: mmp, subs, trans, subs_smarts, trans_smarts")
sys.exit(1)
else:
print("Please specify search type. Please choose from: mmp, subs, trans, subs_smarts, trans_smarts")
sys.exit(1)
if(options.prefix != None):
pre = options.prefix
db_name = "%s.db" % (pre)
#connect to db
con = sqlite3.connect(db_name)
cursor = con.cursor()
#these setting increase performance
cursor.execute('PRAGMA main.page_size = 4096;')
cursor.execute('PRAGMA main.cache_size=10000;')
cursor.execute('PRAGMA main.locking_mode=EXCLUSIVE;')
cursor.execute('PRAGMA main.synchronous=NORMAL;')
cursor.execute('PRAGMA main.journal_mode=WAL;')
cursor.execute('PRAGMA main.cache_size=5000;')
cursor.execute('PRAGMA main.temp_store = MEMORY;')
#read the STDIN
for line in sys.stdin:
line = line.rstrip()
line_fields = re.split('\s|,',line)
if(len(line_fields) == 1):
id=line_fields[0]
have_id = False
else:
id=line_fields[1]
search_string = line_fields[0]
if(search_type == "mmp"):
#check smiles is in the database
cursor.execute("SELECT cmpd_id,cmpd_size FROM cmpd_smisp WHERE smiles = ?", (search_string, ))
d_res = cursor.fetchone()
#cmpd in the db
if( d_res ):
id_in_db,query_size = d_res
run_mmp_query(id_in_db,query_size)
else:
#print "Not in db"
cmpd_not_in_db_mmp_query(search_string,id)
#if doing a subs query
elif(search_type == "subs"):
run_subs_query(search_string)
elif(search_type == "trans"):
run_trans_query(search_string)
#smarts queries
elif(search_type == "subs_smarts"):
run_subs_smarts_query(search_string)
elif(search_type == "trans_smarts"):
run_trans_smarts_query(search_string)
| soerendip42/rdkit | Contrib/mmpa/search_mmp_db.py | Python | bsd-3-clause | 16,511 | 0.012234 |
# :coding: utf-8
import re
#: Regular Expression pattern for single line comments
_ONE_LINE_COMMENT_PATTERN = re.compile(r"(\n|^| )//.*?\n")
#: Regular Expression pattern for multi-line comments
_MULTI_LINES_COMMENT_PATTERN = re.compile(r"/\*.*?\*/", re.DOTALL)
#: Regular Expression pattern for nested element symbols
_NESTED_ELEMENT_PATTERN = re.compile(r"{[^{}]*}")
def filter_comments(
content, filter_multiline_comment=True, keep_content_size=False
):
"""Return *content* without the comments.
If *filter_multiline_comment* is set to False, only the one line comment
will be filtered out.
If *keep_content_size* is set to True, the size of the content is preserved.
.. note::
The filtered content keep the same number of lines as the
original content.
.. seealso:: https://www.w3schools.com/js/js_comments.asp
"""
def _replace_comment(element):
"""Replace matched *element* in content."""
replacement = ""
matched = element.group()
# Ensure that only the comment part is being replaced
if not matched.startswith("/"):
replacement += matched[0]
matched = matched[1:]
count = matched.count("\n")
# Add empty spaces with the size of the content if the size
# must be kept.
if keep_content_size:
_buffer = len(matched) - count
replacement += " " * _buffer + "\n" * count
# Otherwise simply keep the number of lines
else:
replacement += "\n" * count
return replacement
content = _ONE_LINE_COMMENT_PATTERN.sub(_replace_comment, content)
if filter_multiline_comment:
content = _MULTI_LINES_COMMENT_PATTERN.sub(_replace_comment, content)
return content
def collapse_all(content, filter_comment=False):
"""Return tuple of *content* with the top level elements only and dictionary
containing the collapsed content associated with the *line number*.
If *filter_comment* is set to True, all comment are removed from the content
before collapsing the elements. The collapsed content dictionary preserve
the comments.
.. note::
The content with collapsed elements keep the same number of
lines as the original content.
"""
_initial_content = content
collapsed_content = {}
if filter_comment:
# Filter comment before collapsing elements to prevent comment analysis
content = filter_comments(content, keep_content_size=True)
def _replace_element(element):
"""Replace matched *element* in content."""
# Guess line number
count = element.group().count("\n")
# Ensure that the replacement string keep the same length that
# the original content to be able to use the match positions
_buffer = len(element.group()) - count - 2
if len(element.group()) > 2:
line_number = content[:element.start()].count("\n")+1
collapsed_content[line_number] = (
_initial_content[element.start():element.end()]
)
return "<>{buffer}{lines}".format(
buffer=" " * _buffer,
lines="\n" * count
)
_content = None
while _content != content:
_content = content
content = _NESTED_ELEMENT_PATTERN.sub(_replace_element, content)
# Remove the space buffer before returning the content
content = re.sub(r"<> *", lambda x: "{}", content)
return content, collapsed_content
def get_docstring(line_number, lines):
"""Return docstrings for an element at a specific *line_number*.
Loop into the file *lines* in reverse, starting from the element's
*line_number* in order to parse the docstring if available.
The docstring must be in the form of::
/**
* Class doc.
*
* Detailed description.
*/
class AwesomeClass {
...
}
Which will return the following result::
"Class doc.\\n\\nDetailed description."
The docstring can also fit on one line, in the form of::
/** Class doc. */
class AwesomeClass {
...
}
"""
docstring = None
for index in reversed(range(line_number-1)):
line = lines[index].strip()
if len(line) == 0 or line.startswith("//"):
# Do not look for docstring when more than two blank lines precede
# the element.
if index < line_number - 1:
return
continue
# Start of the docstring (from the end)
if docstring is None:
# If the entire docstring fit in one line
match = re.search("(?<=/\*\* ).*(?= \*/)", line)
if match is not None:
return match.group()
# No docstring
if not line.startswith("*/"):
return
docstring = []
# Valid docstring line starts with a '*'
elif re.search("^\*( *| +.+)$", line) is not None:
indentation = 2 if len(line) > 1 else 1
docstring.append(line[indentation:].rstrip())
# Beginning of valid docstrings starts with '/**'
elif line.startswith("/**"):
return "\n".join(docstring[::-1])
# Error in the docstring
else:
return
| buddly27/champollion | source/champollion/parser/helper.py | Python | apache-2.0 | 5,395 | 0.001297 |
# Copyright (c) 2014 Quobyte Inc.
# Copyright (c) 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the Quobyte driver module."""
import errno
import os
import six
import traceback
import mock
from oslo_concurrency import processutils as putils
from oslo_utils import imageutils
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder.image import image_utils
from cinder import test
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.volume import configuration as conf
from cinder.volume.drivers import quobyte
class FakeDb(object):
msg = "Tests are broken: mock this out."
def volume_get(self, *a, **kw):
raise Exception(self.msg)
def snapshot_get_all_for_volume(self, *a, **kw):
"""Mock this if you want results from it."""
return []
class QuobyteDriverTestCase(test.TestCase):
"""Test case for Quobyte driver."""
TEST_QUOBYTE_VOLUME = 'quobyte://quobyte-host/openstack-volumes'
TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL = 'quobyte-host/openstack-volumes'
TEST_SIZE_IN_GB = 1
TEST_MNT_POINT = '/mnt/quobyte'
TEST_MNT_POINT_BASE = '/mnt'
TEST_FILE_NAME = 'test.txt'
TEST_SHARES_CONFIG_FILE = '/etc/cinder/test-shares.conf'
TEST_TMP_FILE = '/tmp/tempfile'
VOLUME_UUID = 'abcdefab-cdef-abcd-efab-cdefabcdefab'
SNAP_UUID = 'bacadaca-baca-daca-baca-dacadacadaca'
SNAP_UUID_2 = 'bebedede-bebe-dede-bebe-dedebebedede'
def setUp(self):
super(QuobyteDriverTestCase, self).setUp()
self._configuration = mock.Mock(conf.Configuration)
self._configuration.append_config_values(mock.ANY)
self._configuration.quobyte_volume_url = \
self.TEST_QUOBYTE_VOLUME
self._configuration.quobyte_client_cfg = None
self._configuration.quobyte_sparsed_volumes = True
self._configuration.quobyte_qcow2_volumes = False
self._configuration.quobyte_mount_point_base = \
self.TEST_MNT_POINT_BASE
self._configuration.nas_secure_file_operations = "auto"
self._configuration.nas_secure_file_permissions = "auto"
self._driver =\
quobyte.QuobyteDriver(configuration=self._configuration,
db=FakeDb())
self._driver.shares = {}
self._driver.set_nas_security_options(is_new_cinder_install=False)
self.context = context.get_admin_context()
def assertRaisesAndMessageMatches(
self, excClass, msg, callableObj, *args, **kwargs):
"""Ensure that the specified exception was raised. """
caught = False
try:
callableObj(*args, **kwargs)
except Exception as exc:
caught = True
self.assertIsInstance(exc, excClass,
'Wrong exception caught: %s Stacktrace: %s' %
(exc, traceback.format_exc()))
self.assertIn(msg, six.text_type(exc))
if not caught:
self.fail('Expected raised exception but nothing caught.')
def test_local_path(self):
"""local_path common use case."""
drv = self._driver
vol_id = self.VOLUME_UUID
volume = self._simple_volume(_name_id=vol_id)
self.assertEqual(
'/mnt/1331538734b757ed52d0e18c0a7210cd/volume-%s' % vol_id,
drv.local_path(volume))
def test_mount_quobyte_should_mount_correctly(self):
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open:
# Content of /proc/mount (not mounted yet).
mock_open.return_value = six.StringIO(
"/dev/sda5 / ext4 rw,relatime,data=ordered 0 0")
self._driver._mount_quobyte(self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT)
mkdir_call = mock.call('mkdir', '-p', self.TEST_MNT_POINT)
mount_call = mock.call(
'mount.quobyte', self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT, run_as_root=False)
getfattr_call = mock.call(
'getfattr', '-n', 'quobyte.info', self.TEST_MNT_POINT,
run_as_root=False)
mock_execute.assert_has_calls(
[mkdir_call, mount_call, getfattr_call], any_order=False)
def test_mount_quobyte_already_mounted_detected_seen_in_proc_mount(self):
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open:
# Content of /proc/mount (already mounted).
mock_open.return_value = six.StringIO(
"quobyte@%s %s fuse rw,nosuid,nodev,noatime,user_id=1000"
",group_id=100,default_permissions,allow_other 0 0"
% (self.TEST_QUOBYTE_VOLUME, self.TEST_MNT_POINT))
self._driver._mount_quobyte(self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT)
mock_execute.assert_called_once_with(
'getfattr', '-n', 'quobyte.info', self.TEST_MNT_POINT,
run_as_root=False)
def test_mount_quobyte_should_suppress_and_log_already_mounted_error(self):
"""test_mount_quobyte_should_suppress_and_log_already_mounted_error
Based on /proc/mount, the file system is not mounted yet. However,
mount.quobyte returns with an 'already mounted' error. This is
a last-resort safe-guard in case /proc/mount parsing was not
successful.
Because _mount_quobyte gets called with ensure=True, the error will
be suppressed and logged instead.
"""
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open, \
mock.patch('cinder.volume.drivers.quobyte.LOG') as mock_LOG:
# Content of /proc/mount (empty).
mock_open.return_value = six.StringIO()
mock_execute.side_effect = [None, putils.ProcessExecutionError(
stderr='is busy or already mounted')]
self._driver._mount_quobyte(self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT,
ensure=True)
mkdir_call = mock.call('mkdir', '-p', self.TEST_MNT_POINT)
mount_call = mock.call(
'mount.quobyte', self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT, run_as_root=False)
mock_execute.assert_has_calls([mkdir_call, mount_call],
any_order=False)
mock_LOG.warning.assert_called_once_with('%s is already mounted',
self.TEST_QUOBYTE_VOLUME)
def test_mount_quobyte_should_reraise_already_mounted_error(self):
"""test_mount_quobyte_should_reraise_already_mounted_error
Like test_mount_quobyte_should_suppress_and_log_already_mounted_error
but with ensure=False.
"""
with mock.patch.object(self._driver, '_execute') as mock_execute, \
mock.patch('cinder.volume.drivers.quobyte.QuobyteDriver'
'.read_proc_mount') as mock_open:
mock_open.return_value = six.StringIO()
mock_execute.side_effect = [
None, # mkdir
putils.ProcessExecutionError( # mount
stderr='is busy or already mounted')]
self.assertRaises(putils.ProcessExecutionError,
self._driver._mount_quobyte,
self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT,
ensure=False)
mkdir_call = mock.call('mkdir', '-p', self.TEST_MNT_POINT)
mount_call = mock.call(
'mount.quobyte', self.TEST_QUOBYTE_VOLUME,
self.TEST_MNT_POINT, run_as_root=False)
mock_execute.assert_has_calls([mkdir_call, mount_call],
any_order=False)
def test_get_hash_str(self):
"""_get_hash_str should calculation correct value."""
drv = self._driver
self.assertEqual('1331538734b757ed52d0e18c0a7210cd',
drv._get_hash_str(self.TEST_QUOBYTE_VOLUME))
def test_get_available_capacity_with_df(self):
"""_get_available_capacity should calculate correct value."""
drv = self._driver
df_total_size = 2620544
df_avail = 1490560
df_head = 'Filesystem 1K-blocks Used Available Use% Mounted on\n'
df_data = 'quobyte@%s %d 996864 %d 41%% %s' % \
(self.TEST_QUOBYTE_VOLUME, df_total_size, df_avail,
self.TEST_MNT_POINT)
df_output = df_head + df_data
drv._get_mount_point_for_share = mock.Mock(return_value=self.
TEST_MNT_POINT)
drv._execute = mock.Mock(return_value=(df_output, None))
self.assertEqual((df_avail, df_total_size),
drv._get_available_capacity(self.TEST_QUOBYTE_VOLUME))
(drv._get_mount_point_for_share.
assert_called_once_with(self.TEST_QUOBYTE_VOLUME))
(drv._execute.
assert_called_once_with('df',
'--portability',
'--block-size',
'1',
self.TEST_MNT_POINT,
run_as_root=self._driver._execute_as_root))
def test_get_capacity_info(self):
with mock.patch.object(self._driver, '_get_available_capacity') \
as mock_get_available_capacity:
drv = self._driver
df_size = 2620544
df_avail = 1490560
mock_get_available_capacity.return_value = (df_avail, df_size)
size, available, used = drv._get_capacity_info(mock.ANY)
mock_get_available_capacity.assert_called_once_with(mock.ANY)
self.assertEqual(df_size, size)
self.assertEqual(df_avail, available)
self.assertEqual(size - available, used)
def test_load_shares_config(self):
"""_load_shares_config takes the Volume URL and strips quobyte://."""
drv = self._driver
drv._load_shares_config()
self.assertIn(self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL, drv.shares)
def test_load_shares_config_without_protocol(self):
"""Same as test_load_shares_config, but URL is without quobyte://."""
drv = self._driver
drv.configuration.quobyte_volume_url = \
self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL
drv._load_shares_config()
self.assertIn(self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL, drv.shares)
def test_ensure_share_mounted(self):
"""_ensure_share_mounted simple use case."""
with mock.patch.object(self._driver, '_get_mount_point_for_share') as \
mock_get_mount_point, \
mock.patch.object(self._driver, '_mount_quobyte') as \
mock_mount:
drv = self._driver
drv._ensure_share_mounted(self.TEST_QUOBYTE_VOLUME)
mock_get_mount_point.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME)
mock_mount.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME,
mock_get_mount_point.return_value,
ensure=True)
def test_ensure_shares_mounted_should_save_mounting_successfully(self):
"""_ensure_shares_mounted should save share if mounted with success."""
with mock.patch.object(self._driver, '_ensure_share_mounted') \
as mock_ensure_share_mounted:
drv = self._driver
drv._ensure_shares_mounted()
mock_ensure_share_mounted.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL)
self.assertIn(self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL,
drv._mounted_shares)
def test_ensure_shares_mounted_should_not_save_mounting_with_error(self):
"""_ensure_shares_mounted should not save if mount raised an error."""
with mock.patch.object(self._driver, '_ensure_share_mounted') \
as mock_ensure_share_mounted:
drv = self._driver
mock_ensure_share_mounted.side_effect = Exception()
drv._ensure_shares_mounted()
mock_ensure_share_mounted.assert_called_once_with(
self.TEST_QUOBYTE_VOLUME_WITHOUT_PROTOCOL)
self.assertEqual(1, len(drv.shares))
self.assertEqual(0, len(drv._mounted_shares))
@mock.patch.object(quobyte.QuobyteDriver, "set_nas_security_options")
def test_do_setup(self, qb_snso_mock):
"""do_setup runs successfully."""
drv = self._driver
drv.do_setup(mock.create_autospec(context.RequestContext))
qb_snso_mock.assert_called_once_with(is_new_cinder_install=mock.ANY)
def test_check_for_setup_error_throws_quobyte_volume_url_not_set(self):
"""check_for_setup_error throws if 'quobyte_volume_url' is not set."""
drv = self._driver
drv.configuration.quobyte_volume_url = None
self.assertRaisesAndMessageMatches(exception.VolumeDriverException,
'no Quobyte volume configured',
drv.check_for_setup_error)
def test_check_for_setup_error_throws_client_not_installed(self):
"""check_for_setup_error throws if client is not installed."""
drv = self._driver
drv._execute = mock.Mock(side_effect=OSError
(errno.ENOENT, 'No such file or directory'))
self.assertRaisesAndMessageMatches(exception.VolumeDriverException,
'mount.quobyte is not installed',
drv.check_for_setup_error)
drv._execute.assert_called_once_with('mount.quobyte',
check_exit_code=False,
run_as_root=False)
def test_check_for_setup_error_throws_client_not_executable(self):
"""check_for_setup_error throws if client cannot be executed."""
drv = self._driver
drv._execute = mock.Mock(side_effect=OSError
(errno.EPERM, 'Operation not permitted'))
self.assertRaisesAndMessageMatches(OSError,
'Operation not permitted',
drv.check_for_setup_error)
drv._execute.assert_called_once_with('mount.quobyte',
check_exit_code=False,
run_as_root=False)
def test_find_share_should_throw_error_if_there_is_no_mounted_shares(self):
"""_find_share should throw error if there is no mounted share."""
drv = self._driver
drv._mounted_shares = []
self.assertRaises(exception.NotFound,
drv._find_share,
self.TEST_SIZE_IN_GB)
def test_find_share(self):
"""_find_share simple use case."""
drv = self._driver
drv._mounted_shares = [self.TEST_QUOBYTE_VOLUME]
self.assertEqual(self.TEST_QUOBYTE_VOLUME,
drv._find_share(self.TEST_SIZE_IN_GB))
def test_find_share_does_not_throw_error_if_there_isnt_enough_space(self):
"""_find_share intentionally does not throw when no space is left."""
with mock.patch.object(self._driver, '_get_available_capacity') \
as mock_get_available_capacity:
drv = self._driver
df_size = 2620544
df_avail = 0
mock_get_available_capacity.return_value = (df_avail, df_size)
drv._mounted_shares = [self.TEST_QUOBYTE_VOLUME]
self.assertEqual(self.TEST_QUOBYTE_VOLUME,
drv._find_share(self.TEST_SIZE_IN_GB))
# The current implementation does not call _get_available_capacity.
# Future ones might do and therefore we mocked it.
self.assertGreaterEqual(mock_get_available_capacity.call_count, 0)
def _simple_volume(self, **kwargs):
updates = {'id': self.VOLUME_UUID,
'provider_location': self.TEST_QUOBYTE_VOLUME,
'display_name': 'volume-%s' % self.VOLUME_UUID,
'size': 10,
'status': 'available'}
updates.update(kwargs)
if 'display_name' not in updates:
updates['display_name'] = 'volume-%s' % updates['id']
return fake_volume.fake_volume_obj(self.context, **updates)
def test_create_sparsed_volume(self):
drv = self._driver
volume = self._simple_volume()
drv._create_sparsed_file = mock.Mock()
drv._set_rw_permissions_for_all = mock.Mock()
drv._do_create_volume(volume)
drv._create_sparsed_file.assert_called_once_with(mock.ANY, mock.ANY)
drv._set_rw_permissions_for_all.assert_called_once_with(mock.ANY)
def test_create_nonsparsed_volume(self):
drv = self._driver
volume = self._simple_volume()
old_value = self._configuration.quobyte_sparsed_volumes
self._configuration.quobyte_sparsed_volumes = False
drv._create_regular_file = mock.Mock()
drv._set_rw_permissions_for_all = mock.Mock()
drv._do_create_volume(volume)
drv._create_regular_file.assert_called_once_with(mock.ANY, mock.ANY)
drv._set_rw_permissions_for_all.assert_called_once_with(mock.ANY)
self._configuration.quobyte_sparsed_volumes = old_value
def test_create_qcow2_volume(self):
drv = self._driver
volume = self._simple_volume()
old_value = self._configuration.quobyte_qcow2_volumes
self._configuration.quobyte_qcow2_volumes = True
drv._execute = mock.Mock()
hashed = drv._get_hash_str(volume['provider_location'])
path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
hashed,
self.VOLUME_UUID)
drv._do_create_volume(volume)
assert_calls = [mock.call('qemu-img', 'create', '-f', 'qcow2',
'-o', 'preallocation=metadata', path,
str(volume['size'] * units.Gi),
run_as_root=self._driver._execute_as_root),
mock.call('chmod', 'ugo+rw', path,
run_as_root=self._driver._execute_as_root)]
drv._execute.assert_has_calls(assert_calls)
self._configuration.quobyte_qcow2_volumes = old_value
def test_create_volume_should_ensure_quobyte_mounted(self):
"""create_volume ensures shares provided in config are mounted."""
drv = self._driver
drv.LOG = mock.Mock()
drv._find_share = mock.Mock()
drv._find_share.return_value = self.TEST_QUOBYTE_VOLUME
drv._do_create_volume = mock.Mock()
drv._ensure_shares_mounted = mock.Mock()
volume = self._simple_volume(size=self.TEST_SIZE_IN_GB)
drv.create_volume(volume)
drv._find_share.assert_called_once_with(mock.ANY)
drv._do_create_volume.assert_called_once_with(volume)
drv._ensure_shares_mounted.assert_called_once_with()
def test_create_volume_should_return_provider_location(self):
"""create_volume should return provider_location with found share."""
drv = self._driver
drv.LOG = mock.Mock()
drv._ensure_shares_mounted = mock.Mock()
drv._do_create_volume = mock.Mock()
drv._find_share = mock.Mock(return_value=self.TEST_QUOBYTE_VOLUME)
volume = self._simple_volume(size=self.TEST_SIZE_IN_GB)
result = drv.create_volume(volume)
self.assertEqual(self.TEST_QUOBYTE_VOLUME, result['provider_location'])
drv._do_create_volume.assert_called_once_with(volume)
drv._ensure_shares_mounted.assert_called_once_with()
drv._find_share.assert_called_once_with(self.TEST_SIZE_IN_GB)
@mock.patch('oslo_utils.fileutils.delete_if_exists')
def test_delete_volume(self, mock_delete_if_exists):
volume = self._simple_volume()
volume_filename = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume_filename)
info_file = volume_path + '.info'
with mock.patch.object(self._driver, '_ensure_share_mounted') as \
mock_ensure_share_mounted, \
mock.patch.object(self._driver, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(self._driver,
'get_active_image_from_info') as \
mock_active_image_from_info, \
mock.patch.object(self._driver, '_execute') as \
mock_execute, \
mock.patch.object(self._driver, '_local_path_volume') as \
mock_local_path_volume, \
mock.patch.object(self._driver, '_local_path_volume_info') as \
mock_local_path_volume_info:
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_active_image_from_info.return_value = volume_filename
mock_local_path_volume.return_value = volume_path
mock_local_path_volume_info.return_value = info_file
self._driver.delete_volume(volume)
mock_ensure_share_mounted.assert_called_once_with(
volume['provider_location'])
mock_local_volume_dir.assert_called_once_with(volume)
mock_active_image_from_info.assert_called_once_with(volume)
mock_execute.assert_called_once_with('rm', '-f', volume_path,
run_as_root=
self._driver._execute_as_root)
mock_local_path_volume_info.assert_called_once_with(volume)
mock_local_path_volume.assert_called_once_with(volume)
mock_delete_if_exists.assert_any_call(volume_path)
mock_delete_if_exists.assert_any_call(info_file)
def test_delete_should_ensure_share_mounted(self):
"""delete_volume should ensure that corresponding share is mounted."""
drv = self._driver
drv._execute = mock.Mock()
volume = self._simple_volume(display_name='volume-123')
drv._ensure_share_mounted = mock.Mock()
drv.delete_volume(volume)
(drv._ensure_share_mounted.
assert_called_once_with(self.TEST_QUOBYTE_VOLUME))
drv._execute.assert_called_once_with('rm', '-f',
mock.ANY,
run_as_root=False)
def test_delete_should_not_delete_if_provider_location_not_provided(self):
"""delete_volume shouldn't delete if provider_location missed."""
drv = self._driver
drv._ensure_share_mounted = mock.Mock()
drv._execute = mock.Mock()
volume = self._simple_volume(display_name='volume-123',
provider_location=None)
drv.delete_volume(volume)
drv._ensure_share_mounted.assert_not_called()
drv._execute.assert_not_called()
def test_extend_volume(self):
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
drv._get_hash_str(
self.TEST_QUOBYTE_VOLUME),
self.VOLUME_UUID)
qemu_img_info_output = """image: volume-%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 473K
""" % self.VOLUME_UUID
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
drv.get_active_image_from_info = mock.Mock(return_value=volume['name'])
image_utils.qemu_img_info = mock.Mock(return_value=img_info)
image_utils.resize_image = mock.Mock()
drv.extend_volume(volume, 3)
drv.get_active_image_from_info.assert_called_once_with(volume)
image_utils.qemu_img_info.assert_called_once_with(volume_path)
image_utils.resize_image.assert_called_once_with(volume_path, 3)
def test_copy_volume_from_snapshot(self):
drv = self._driver
# lots of test vars to be prepared at first
dest_volume = self._simple_volume(
id='c1073000-0000-0000-0000-0000000c1073')
src_volume = self._simple_volume()
vol_dir = os.path.join(self.TEST_MNT_POINT_BASE,
drv._get_hash_str(self.TEST_QUOBYTE_VOLUME))
src_vol_path = os.path.join(vol_dir, src_volume['name'])
dest_vol_path = os.path.join(vol_dir, dest_volume['name'])
info_path = os.path.join(vol_dir, src_volume['name']) + '.info'
snapshot = fake_snapshot.fake_snapshot_obj(
self.context,
volume_name=src_volume.name,
display_name='clone-snap-%s' % src_volume.id,
size=src_volume.size,
volume_size=src_volume.size,
volume_id=src_volume.id,
id=self.SNAP_UUID)
snapshot.volume = src_volume
snap_file = dest_volume['name'] + '.' + snapshot['id']
snap_path = os.path.join(vol_dir, snap_file)
size = dest_volume['size']
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (snap_file, src_volume['name'])
img_info = imageutils.QemuImgInfo(qemu_img_output)
# mocking and testing starts here
image_utils.convert_image = mock.Mock()
drv._read_info_file = mock.Mock(return_value=
{'active': snap_file,
snapshot['id']: snap_file})
image_utils.qemu_img_info = mock.Mock(return_value=img_info)
drv._set_rw_permissions_for_all = mock.Mock()
drv._copy_volume_from_snapshot(snapshot, dest_volume, size)
drv._read_info_file.assert_called_once_with(info_path)
image_utils.qemu_img_info.assert_called_once_with(snap_path)
(image_utils.convert_image.
assert_called_once_with(src_vol_path,
dest_vol_path,
'raw',
run_as_root=self._driver._execute_as_root))
drv._set_rw_permissions_for_all.assert_called_once_with(dest_vol_path)
def test_create_volume_from_snapshot_status_not_available(self):
"""Expect an error when the snapshot's status is not 'available'."""
drv = self._driver
src_volume = self._simple_volume()
snap_ref = fake_snapshot.fake_snapshot_obj(
self.context,
volume_name=src_volume.name,
display_name='clone-snap-%s' % src_volume.id,
volume_size=src_volume.size,
volume_id=src_volume.id,
id=self.SNAP_UUID,
status='error')
snap_ref.volume = src_volume
new_volume = self._simple_volume(size=snap_ref.volume_size)
self.assertRaises(exception.InvalidSnapshot,
drv.create_volume_from_snapshot,
new_volume,
snap_ref)
def test_create_volume_from_snapshot(self):
drv = self._driver
src_volume = self._simple_volume()
snap_ref = fake_snapshot.fake_snapshot_obj(
self.context,
volume_name=src_volume.name,
display_name='clone-snap-%s' % src_volume.id,
volume_size=src_volume.size,
volume_id=src_volume.id,
id=self.SNAP_UUID,
status='available')
snap_ref.volume = src_volume
new_volume = self._simple_volume(size=snap_ref.volume_size)
drv._ensure_shares_mounted = mock.Mock()
drv._find_share = mock.Mock(return_value=self.TEST_QUOBYTE_VOLUME)
drv._do_create_volume = mock.Mock()
drv._copy_volume_from_snapshot = mock.Mock()
drv.create_volume_from_snapshot(new_volume, snap_ref)
drv._ensure_shares_mounted.assert_called_once_with()
drv._find_share.assert_called_once_with(new_volume['size'])
drv._do_create_volume.assert_called_once_with(new_volume)
(drv._copy_volume_from_snapshot.
assert_called_once_with(snap_ref, new_volume, new_volume['size']))
def test_initialize_connection(self):
drv = self._driver
volume = self._simple_volume()
vol_dir = os.path.join(self.TEST_MNT_POINT_BASE,
drv._get_hash_str(self.TEST_QUOBYTE_VOLUME))
vol_path = os.path.join(vol_dir, volume['name'])
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
drv.get_active_image_from_info = mock.Mock(return_value=volume['name'])
image_utils.qemu_img_info = mock.Mock(return_value=img_info)
conn_info = drv.initialize_connection(volume, None)
drv.get_active_image_from_info.assert_called_once_with(volume)
image_utils.qemu_img_info.assert_called_once_with(vol_path)
self.assertEqual('raw', conn_info['data']['format'])
self.assertEqual('quobyte', conn_info['driver_volume_type'])
self.assertEqual(volume['name'], conn_info['data']['name'])
self.assertEqual(self.TEST_MNT_POINT_BASE,
conn_info['mount_point_base'])
def test_copy_volume_to_image_raw_image(self):
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume['name'])
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with mock.patch.object(drv, 'get_active_image_from_info') as \
mock_get_active_image_from_info, \
mock.patch.object(drv, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(image_utils, 'qemu_img_info') as \
mock_qemu_img_info, \
mock.patch.object(image_utils, 'upload_volume') as \
mock_upload_volume, \
mock.patch.object(image_utils, 'create_temporary_file') as \
mock_create_temporary_file:
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_create_temporary_file.return_value = self.TEST_TMP_FILE
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = volume_path
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_once_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
self.assertTrue(mock_create_temporary_file.called)
def test_copy_volume_to_image_qcow2_image(self):
"""Upload a qcow2 image file which has to be converted to raw first."""
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume['name'])
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with mock.patch.object(drv, 'get_active_image_from_info') as \
mock_get_active_image_from_info, \
mock.patch.object(drv, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(image_utils, 'qemu_img_info') as \
mock_qemu_img_info, \
mock.patch.object(image_utils, 'convert_image') as \
mock_convert_image, \
mock.patch.object(image_utils, 'upload_volume') as \
mock_upload_volume, \
mock.patch.object(image_utils, 'create_temporary_file') as \
mock_create_temporary_file:
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_create_temporary_file.return_value = self.TEST_TMP_FILE
qemu_img_output = """image: %s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = self.TEST_TMP_FILE
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_convert_image.assert_called_once_with(
volume_path, upload_path, 'raw')
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
self.assertTrue(mock_create_temporary_file.called)
def test_copy_volume_to_image_snapshot_exists(self):
"""Upload an active snapshot which has to be converted to raw first."""
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/volume-%s' % (self.TEST_MNT_POINT, self.VOLUME_UUID)
volume_filename = 'volume-%s' % self.VOLUME_UUID
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with mock.patch.object(drv, 'get_active_image_from_info') as \
mock_get_active_image_from_info, \
mock.patch.object(drv, '_local_volume_dir') as \
mock_local_volume_dir, \
mock.patch.object(image_utils, 'qemu_img_info') as \
mock_qemu_img_info, \
mock.patch.object(image_utils, 'convert_image') as \
mock_convert_image, \
mock.patch.object(image_utils, 'upload_volume') as \
mock_upload_volume, \
mock.patch.object(image_utils, 'create_temporary_file') as \
mock_create_temporary_file:
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_create_temporary_file.return_value = self.TEST_TMP_FILE
qemu_img_output = """image: volume-%s.%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (self.VOLUME_UUID, self.SNAP_UUID, volume_filename)
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = self.TEST_TMP_FILE
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_convert_image.assert_called_once_with(
volume_path, upload_path, 'raw')
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
self.assertTrue(mock_create_temporary_file.called)
def test_set_nas_security_options_default(self):
drv = self._driver
self.assertEqual("true", drv.configuration.nas_secure_file_operations)
self.assertEqual("true",
drv.configuration.nas_secure_file_permissions)
self.assertFalse(drv._execute_as_root)
def test_set_nas_security_options_insecure(self):
drv = self._driver
drv.configuration.nas_secure_file_operations = "false"
drv.configuration.nas_secure_file_permissions = "false"
drv.set_nas_security_options(is_new_cinder_install=True)
self.assertEqual("false",
drv.configuration.nas_secure_file_operations)
self.assertEqual("false",
drv.configuration.nas_secure_file_permissions)
self.assertTrue(drv._execute_as_root)
def test_set_nas_security_options_explicitly_secure(self):
drv = self._driver
drv.configuration.nas_secure_file_operations = "true"
drv.configuration.nas_secure_file_permissions = "true"
drv.set_nas_security_options(is_new_cinder_install=True)
self.assertEqual("true",
drv.configuration.nas_secure_file_operations)
self.assertEqual("true",
drv.configuration.nas_secure_file_permissions)
self.assertFalse(drv._execute_as_root)
| Hybrid-Cloud/cinder | cinder/tests/unit/volume/drivers/test_quobyte.py | Python | apache-2.0 | 38,830 | 0.000052 |
import sublime
import sublime_plugin
import re
import subprocess
from array import *
from .giterCommandThread import CommandThread
from .commandBuilders import buildCommand
from .jsonDecoderBuilder import JsonDecoderBuilder
from .sbtBuildFileEditor import SbtBuildFileEditor
from .logger import LoggerFacade
from .utils import EXECUTABLES
from .settings import SettingsManager
from .generatorFacadeExceptions import GeneratorFacadeInitializationError
from functools import *
class ScalaProjectGeneratorFacadeCommand(sublime_plugin.TextCommand):
def __init__(self, k):
sublime_plugin.TextCommand.__init__(self, k)
self.ProjectNamePrefix = "SBT Template: "
self.templateDefaultProperties = []
self.templateUserProps = []
self.selectedTemplateName = ''
self.projectPath = ''
self.ProjectBaseDir = ''
self.propertyIndex = 0
def __initProjectGeneratorFacade(self):
self.logger.info("Generator initialization started")
self.settingsManager = SettingsManager(EXECUTABLES)
self.settingsManager.create_executable_paths()
self.jsonDataDecoder = JsonDecoderBuilder(
self.settingsManager).createJsonDecoder()
self.sbtTemplates = [
self.ProjectNamePrefix + t for t in
self.jsonDataDecoder.getProjectTemplatesNames()]
def run(self, edit):
LoggerFacade.clear_log_file()
self.logger = LoggerFacade.getLogger()
self.logger.debug(
'\n\n----- Scala Project Generator Facade has started -----\n\n')
try:
self.__initProjectGeneratorFacade()
self.view.window().show_quick_panel(
self.sbtTemplates, self.on_projectTemplateSelected)
except GeneratorFacadeInitializationError as e:
self.logger.error(e.message + e.causedBy)
def on_projectTemplateSelected(self, user_input):
# this if is only temporary workaround for Sublime 3 Beta API problem with
# on_done event for show_quick_panel. The current Bug is that the event method is
# called twice. First invocation returns -1, the other one is correct.
if user_input == -1:
return 0
self.selectedTemplateName = (
self.sbtTemplates[user_input])[len(self.ProjectNamePrefix):]
self.logger.debug("Selected template: %s", self.selectedTemplateName)
self.templateDefaultProperties = self.jsonDataDecoder.getTemplateDefaultProperties(
self.selectedTemplateName)
self.view.window().show_input_panel(
"Project Path", '', self.on_projectPathEntered, None, None)
def on_projectPathEntered(self, user_input):
self.projectPath = user_input
item = self.templateDefaultProperties[self.propertyIndex]
self.view.window().show_input_panel(
item[0], item[1], self.on_propetySelected, None, None)
def on_propetySelected(self, user_input):
prop = self.templateDefaultProperties[self.propertyIndex]
if prop[0] == 'name':
self._buildProjectBaseDir(user_input)
self.templateUserProps.append((prop[0], user_input))
self.propertyIndex += 1
if self.propertyIndex < len(self.templateDefaultProperties):
item = self.templateDefaultProperties[self.propertyIndex]
self.view.window().show_input_panel(
item[0], item[1], self.on_propetySelected, None, None)
else:
self.propertyIndex = 0
self.gitterThread()
def _buildProjectBaseDir(self, user_input):
g8ProjectDirName = re.sub("\s+", '-', user_input).lower()
self.ProjectBaseDir = self.projectPath + '/' + g8ProjectDirName
self.logger.debug("Project Base Dir Path: %s", self.ProjectBaseDir)
def handleThread(self, thread, timeout, key, message, handleLiveThread, nextStep, i=0, dir=1):
if thread.is_alive():
handleLiveThread(key, message, partial(self.handleThread,
thread, 100, key, message, handleLiveThread, nextStep), i, dir)
else:
self.view.set_status(key, '')
nextStep()
def handleLiveThread(self, key, message, currentThread, i=0, dir=1):
def animate(i, dir):
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += 1
self.view.set_status(
key, message + ' [%s=%s]' % (' ' * before, ' ' * after))
return (i, dir)
a = animate(i, dir)
sublime.set_timeout(lambda: currentThread(a[0], a[1]))
def _prepareAndRunThread(self, commandName, path, isShellUsed, statusMessage, nextStep, additionalData=[]):
command = buildCommand(commandName,
self.settingsManager.get_executables(), additionalData)
thread = CommandThread(command, path, isShellUsed)
thread.start()
self.handleThread(
thread, 100, commandName, statusMessage, self.handleLiveThread, nextStep)
def gitterThread(self):
self._prepareAndRunThread(
'gitter', self.projectPath, False, 'Giter Template generation',
self.ensimeThread, additionalData=[self.selectedTemplateName, self.templateUserProps])
def ensimeThread(self):
self._prepareAndRunThread(
'ensime', self.ProjectBaseDir, True, "Ensime confiugration", self.genSublimeThread)
def genSublimeThread(self):
self.modifySbtBuildFile()
self._prepareAndRunThread(
'gen-sublime', self.ProjectBaseDir, True, "Gen Sublime", self.openProject)
def openProject(self):
self._execute_on_sublime_command_line(
['-a', self.ProjectBaseDir], self.settingsManager.get_executables())
def _execute_on_sublime_command_line(self, args, execs):
args.insert(0, execs.SUBLIME[2]['executable_path'])
return subprocess.Popen(args)
def modifySbtBuildFile(self):
sbtFile = open(self.ProjectBaseDir + "/build.sbt", "a")
sbtFileEditor = SbtBuildFileEditor(sbtFile)
sbtFileEditor.simpleTransformationBatch(
[('sublimeExternalSourceDirectoryName',
'"' + self._getSettingByKey('sublime_gen_external_source_dir') + '"'),
('sublimeTransitive',
self._getSettingByKey('sublime_gen_transitiv'))])
sbtFileEditor.transformUsingOtherKey(
('sublimeExternalSourceDirectoryParent',
self._getSettingByKey('sublime_gen_extenal_source_dir_parent')))
sbtFile.close()
def _getSettingByKey(self, key):
return self.settingsManager.get_setting(key)
| lgmerek/ScalaProjectGeneratorFacade | scalaProjectGeneratorFacade.py | Python | mit | 6,824 | 0.002638 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import swisseph as swe
import unittest
class TestSweLunEclipse(unittest.TestCase):
@classmethod
def setUpClass(cls):
swe.set_ephe_path()
def test_01(self):
jd = 2454466.5
flags = swe.FLG_SWIEPH
geopos = (12.1, 49.0, 330)
rflags, tret = swe.lun_eclipse_when(jd, flags, 0)
self.assertEqual(rflags, 4)
self.assertEqual(len(tret), 10)
t1 = (2454517.6430690456, 0.0, 2454517.57172334, 2454517.7144189165,
2454517.6258038115, 2454517.6603509136, 2454517.525389122,
2454517.7608554545, 0.0, 0.0)
for i in range(10):
self.assertAlmostEqual(tret[i], t1[i])
tjdut = tret[0]
rflags, tret, attr = swe.lun_eclipse_when_loc(tjdut, geopos, flags)
self.assertEqual(rflags, 29584)
self.assertEqual(len(tret), 10)
t1 = (2454695.3820517384, 0.0, 2454695.316710297, 2454695.447390333,
0.0, 0.0, 2454695.2672055247, 2454695.496797575, 0.0, 0.0)
for i in range(10):
self.assertAlmostEqual(tret[i], t1[i])
self.assertEqual(len(attr), 20)
t1 = (0.8076127691060245, 1.8366497324296667, 0.0, 0.0,
326.9885866287668, 21.362590458352507, 21.402251051495636,
0.5301609960196174, 0.8076127691060245, 138.0, 28.0, 28.0,
28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0)
for i in range(20):
self.assertAlmostEqual(attr[i], t1[i])
rflags, attr = swe.lun_eclipse_how(tjdut, geopos, flags)
self.assertEqual(rflags, 4)
self.assertEqual(len(attr), 20)
t1 = (1.1061093373639495, 2.145134309769692, 0.0, 0.0,
73.8203145568749, 26.299290272560974, 26.330700027276947,
0.3801625589840114, 1.1061093373639495, 133.0, 26.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
for i in range(20):
self.assertAlmostEqual(attr[i], t1[i])
if __name__ == '__main__':
unittest.main()
# vi: sw=4 ts=4 et
| astrorigin/pyswisseph | tests/test_swe_lun_eclipse.py | Python | gpl-2.0 | 2,097 | 0.005246 |
"""Common test objects."""
import copy
from datetime import datetime
import json
from unittest.mock import ANY, patch
from homeassistant.components import mqtt
from homeassistant.components.mqtt import debug_info
from homeassistant.components.mqtt.const import MQTT_DISCONNECTED
from homeassistant.components.mqtt.mixins import MQTT_ATTRIBUTES_BLOCKED
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNAVAILABLE
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.setup import async_setup_component
from tests.common import async_fire_mqtt_message, mock_registry
DEFAULT_CONFIG_DEVICE_INFO_ID = {
"identifiers": ["helloworld"],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
"suggested_area": "default_area",
}
DEFAULT_CONFIG_DEVICE_INFO_MAC = {
"connections": [[dr.CONNECTION_NETWORK_MAC, "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
"suggested_area": "default_area",
}
async def help_test_availability_when_connection_lost(hass, mqtt_mock, domain, config):
"""Test availability after MQTT disconnection."""
assert await async_setup_component(hass, domain, config)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
mqtt_mock.connected = False
async_dispatcher_send(hass, MQTT_DISCONNECTED)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async def help_test_availability_without_topic(hass, mqtt_mock, domain, config):
"""Test availability without defined availability topic."""
assert "availability_topic" not in config[domain]
assert await async_setup_component(hass, domain, config)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async def help_test_default_availability_payload(
hass,
mqtt_mock,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test availability by default payload with defined topic.
This is a test helper for the MqttAvailability mixin.
"""
# Add availability settings to config
config = copy.deepcopy(config)
config[domain]["availability_topic"] = "availability-topic"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
if state_topic:
async_fire_mqtt_message(hass, state_topic, state_message)
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async def help_test_default_availability_list_payload(
hass,
mqtt_mock,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test availability by default payload with defined topic.
This is a test helper for the MqttAvailability mixin.
"""
# Add availability settings to config
config = copy.deepcopy(config)
config[domain]["availability"] = [
{"topic": "availability-topic1"},
{"topic": "availability-topic2"},
]
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic1", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic2", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic2", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
if state_topic:
async_fire_mqtt_message(hass, state_topic, state_message)
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async def help_test_default_availability_list_payload_all(
hass,
mqtt_mock,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test availability by default payload with defined topic.
This is a test helper for the MqttAvailability mixin.
"""
# Add availability settings to config
config = copy.deepcopy(config)
config[domain]["availability_mode"] = "all"
config[domain]["availability"] = [
{"topic": "availability-topic1"},
{"topic": "availability-topic2"},
]
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic2", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic2", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic2", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async def help_test_default_availability_list_payload_any(
hass,
mqtt_mock,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test availability by default payload with defined topic.
This is a test helper for the MqttAvailability mixin.
"""
# Add availability settings to config
config = copy.deepcopy(config)
config[domain]["availability_mode"] = "any"
config[domain]["availability"] = [
{"topic": "availability-topic1"},
{"topic": "availability-topic2"},
]
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic2", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic2", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic1", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async def help_test_default_availability_list_single(
hass,
mqtt_mock,
caplog,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test availability list and availability_topic are mutually exclusive.
This is a test helper for the MqttAvailability mixin.
"""
# Add availability settings to config
config = copy.deepcopy(config)
config[domain]["availability"] = [
{"topic": "availability-topic1"},
]
config[domain]["availability_topic"] = "availability-topic"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state is None
assert (
"Invalid config for [sensor.mqtt]: two or more values in the same group of exclusion 'availability'"
in caplog.text
)
async def help_test_custom_availability_payload(
hass,
mqtt_mock,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test availability by custom payload with defined topic.
This is a test helper for the MqttAvailability mixin.
"""
# Add availability settings to config
config = copy.deepcopy(config)
config[domain]["availability_topic"] = "availability-topic"
config[domain]["payload_available"] = "good"
config[domain]["payload_not_available"] = "nogood"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "good")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
if no_assumed_state:
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "availability-topic", "nogood")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
if state_topic:
async_fire_mqtt_message(hass, state_topic, state_message)
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "good")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async def help_test_discovery_update_availability(
hass,
mqtt_mock,
domain,
config,
no_assumed_state=False,
state_topic=None,
state_message=None,
):
"""Test update of discovered MQTTAvailability.
This is a test helper for the MQTTAvailability mixin.
"""
# Add availability settings to config
config1 = copy.deepcopy(config)
config1[domain]["availability_topic"] = "availability-topic1"
config2 = copy.deepcopy(config)
config2[domain]["availability"] = [
{"topic": "availability-topic2"},
{"topic": "availability-topic3"},
]
config3 = copy.deepcopy(config)
config3[domain]["availability_topic"] = "availability-topic4"
data1 = json.dumps(config1[domain])
data2 = json.dumps(config2[domain])
data3 = json.dumps(config3[domain])
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic1", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
# Change availability_topic
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data2)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "availability-topic1", "online")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "availability-topic2", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "availability-topic3", "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
# Change availability_topic
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data3)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "availability-topic2", "online")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "availability-topic3", "online")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "availability-topic4", "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async def help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, domain, config
):
"""Test the setting of attribute via MQTT with JSON payload.
This is a test helper for the MqttAttributes mixin.
"""
# Add JSON attributes settings to config
config = copy.deepcopy(config)
config[domain]["json_attributes_topic"] = "attr-topic"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic", '{ "val": "100" }')
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("val") == "100"
async def help_test_setting_blocked_attribute_via_mqtt_json_message(
hass, mqtt_mock, domain, config, extra_blocked_attributes
):
"""Test the setting of blocked attribute via MQTT with JSON payload.
This is a test helper for the MqttAttributes mixin.
"""
extra_blocked_attributes = extra_blocked_attributes or []
# Add JSON attributes settings to config
config = copy.deepcopy(config)
config[domain]["json_attributes_topic"] = "attr-topic"
data = json.dumps(config[domain])
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
val = "abc123"
for attr in MQTT_ATTRIBUTES_BLOCKED:
async_fire_mqtt_message(hass, "attr-topic", json.dumps({attr: val}))
state = hass.states.get(f"{domain}.test")
assert state.attributes.get(attr) != val
for attr in extra_blocked_attributes:
async_fire_mqtt_message(hass, "attr-topic", json.dumps({attr: val}))
state = hass.states.get(f"{domain}.test")
assert state.attributes.get(attr) != val
async def help_test_setting_attribute_with_template(hass, mqtt_mock, domain, config):
"""Test the setting of attribute via MQTT with JSON payload.
This is a test helper for the MqttAttributes mixin.
"""
# Add JSON attributes settings to config
config = copy.deepcopy(config)
config[domain]["json_attributes_topic"] = "attr-topic"
config[domain]["json_attributes_template"] = "{{ value_json['Timer1'] | tojson }}"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass, "attr-topic", json.dumps({"Timer1": {"Arm": 0, "Time": "22:18"}})
)
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("Arm") == 0
assert state.attributes.get("Time") == "22:18"
async def help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, domain, config
):
"""Test attributes get extracted from a JSON result.
This is a test helper for the MqttAttributes mixin.
"""
# Add JSON attributes settings to config
config = copy.deepcopy(config)
config[domain]["json_attributes_topic"] = "attr-topic"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic", '[ "list", "of", "things"]')
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("val") is None
assert "JSON result was not a dictionary" in caplog.text
async def help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, domain, config
):
"""Test JSON validation of attributes.
This is a test helper for the MqttAttributes mixin.
"""
# Add JSON attributes settings to config
config = copy.deepcopy(config)
config[domain]["json_attributes_topic"] = "attr-topic"
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic", "This is not JSON")
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("val") is None
assert "Erroneous JSON: This is not JSON" in caplog.text
async def help_test_discovery_update_attr(hass, mqtt_mock, caplog, domain, config):
"""Test update of discovered MQTTAttributes.
This is a test helper for the MqttAttributes mixin.
"""
# Add JSON attributes settings to config
config1 = copy.deepcopy(config)
config1[domain]["json_attributes_topic"] = "attr-topic1"
config2 = copy.deepcopy(config)
config2[domain]["json_attributes_topic"] = "attr-topic2"
data1 = json.dumps(config1[domain])
data2 = json.dumps(config2[domain])
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "100" }')
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("val") == "100"
# Change json_attributes_topic
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data2)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "50" }')
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("val") == "100"
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "attr-topic2", '{ "val": "75" }')
state = hass.states.get(f"{domain}.test")
assert state.attributes.get("val") == "75"
async def help_test_unique_id(hass, mqtt_mock, domain, config):
"""Test unique id option only creates one entity per unique_id."""
assert await async_setup_component(hass, domain, config)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(domain)) == 1
async def help_test_discovery_removal(hass, mqtt_mock, caplog, domain, data):
"""Test removal of discovered component.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state is not None
assert state.name == "test"
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", "")
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state is None
async def help_test_discovery_update(
hass,
mqtt_mock,
caplog,
domain,
discovery_data1,
discovery_data2,
state_data1=None,
state_data2=None,
):
"""Test update of discovered component.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", discovery_data1)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.beer")
assert state is not None
assert state.name == "Beer"
if state_data1:
for (mqtt_messages, expected_state, attributes) in state_data1:
for (topic, data) in mqtt_messages:
async_fire_mqtt_message(hass, topic, data)
state = hass.states.get(f"{domain}.beer")
if expected_state:
assert state.state == expected_state
if attributes:
for (attr, value) in attributes:
assert state.attributes.get(attr) == value
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", discovery_data2)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.beer")
assert state is not None
assert state.name == "Milk"
if state_data2:
for (mqtt_messages, expected_state, attributes) in state_data2:
for (topic, data) in mqtt_messages:
async_fire_mqtt_message(hass, topic, data)
state = hass.states.get(f"{domain}.beer")
if expected_state:
assert state.state == expected_state
if attributes:
for (attr, value) in attributes:
assert state.attributes.get(attr) == value
state = hass.states.get(f"{domain}.milk")
assert state is None
async def help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, domain, data1, discovery_update
):
"""Test update of discovered component without changes.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1)
await hass.async_block_till_done()
assert not discovery_update.called
async def help_test_discovery_broken(hass, mqtt_mock, caplog, domain, data1, data2):
"""Test handling of bad discovery message."""
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.beer")
assert state is None
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.milk")
assert state is not None
assert state.name == "Milk"
state = hass.states.get(f"{domain}.beer")
assert state is None
async def help_test_entity_device_info_with_identifier(hass, mqtt_mock, domain, config):
"""Test device registry integration.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
assert device.identifiers == {("mqtt", "helloworld")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
assert device.suggested_area == "default_area"
async def help_test_entity_device_info_with_connection(hass, mqtt_mock, domain, config):
"""Test device registry integration.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_MAC)
config["unique_id"] = "veryunique"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device(
set(), {(dr.CONNECTION_NETWORK_MAC, "02:5b:26:a8:dc:12")}
)
assert device is not None
assert device.connections == {(dr.CONNECTION_NETWORK_MAC, "02:5b:26:a8:dc:12")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
assert device.suggested_area == "default_area"
async def help_test_entity_device_info_remove(hass, mqtt_mock, domain, config):
"""Test device registry remove."""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
dev_registry = dr.async_get(hass)
ent_registry = er.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = dev_registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
assert ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, "veryunique")
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", "")
await hass.async_block_till_done()
device = dev_registry.async_get_device({("mqtt", "helloworld")})
assert device is None
assert not ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, "veryunique")
async def help_test_entity_device_info_update(hass, mqtt_mock, domain, config):
"""Test device registry update.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
assert device.name == "Beer"
config["device"]["name"] = "Milk"
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
assert device.name == "Milk"
async def help_test_entity_id_update_subscriptions(
hass, mqtt_mock, domain, config, topics=None
):
"""Test MQTT subscriptions are managed when entity_id is updated."""
# Add unique_id to config
config = copy.deepcopy(config)
config[domain]["unique_id"] = "TOTALLY_UNIQUE"
if topics is None:
# Add default topics to config
config[domain]["availability_topic"] = "avty-topic"
config[domain]["state_topic"] = "test-topic"
topics = ["avty-topic", "test-topic"]
assert len(topics) > 0
registry = mock_registry(hass, {})
assert await async_setup_component(
hass,
domain,
config,
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state is not None
assert mqtt_mock.async_subscribe.call_count == len(topics)
for topic in topics:
mqtt_mock.async_subscribe.assert_any_call(topic, ANY, ANY, ANY)
mqtt_mock.async_subscribe.reset_mock()
registry.async_update_entity(f"{domain}.test", new_entity_id=f"{domain}.milk")
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.test")
assert state is None
state = hass.states.get(f"{domain}.milk")
assert state is not None
for topic in topics:
mqtt_mock.async_subscribe.assert_any_call(topic, ANY, ANY, ANY)
async def help_test_entity_id_update_discovery_update(
hass, mqtt_mock, domain, config, topic=None
):
"""Test MQTT discovery update after entity_id is updated."""
# Add unique_id to config
config = copy.deepcopy(config)
config[domain]["unique_id"] = "TOTALLY_UNIQUE"
if topic is None:
# Add default topic to config
config[domain]["availability_topic"] = "avty-topic"
topic = "avty-topic"
ent_registry = mock_registry(hass, {})
data = json.dumps(config[domain])
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, topic, "online")
state = hass.states.get(f"{domain}.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, topic, "offline")
state = hass.states.get(f"{domain}.test")
assert state.state == STATE_UNAVAILABLE
ent_registry.async_update_entity(f"{domain}.test", new_entity_id=f"{domain}.milk")
await hass.async_block_till_done()
config[domain]["availability_topic"] = f"{topic}_2"
data = json.dumps(config[domain])
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(domain)) == 1
async_fire_mqtt_message(hass, f"{topic}_2", "online")
state = hass.states.get(f"{domain}.milk")
assert state.state != STATE_UNAVAILABLE
async def help_test_entity_debug_info(hass, mqtt_mock, domain, config):
"""Test debug_info.
This is a test helper for MQTT debug_info.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"]) == 1
assert (
debug_info_data["entities"][0]["discovery_data"]["topic"]
== f"homeassistant/{domain}/bla/config"
)
assert debug_info_data["entities"][0]["discovery_data"]["payload"] == config
assert len(debug_info_data["entities"][0]["subscriptions"]) == 1
assert {"topic": "test-topic", "messages": []} in debug_info_data["entities"][0][
"subscriptions"
]
assert len(debug_info_data["triggers"]) == 0
async def help_test_entity_debug_info_max_messages(hass, mqtt_mock, domain, config):
"""Test debug_info message overflow.
This is a test helper for MQTT debug_info.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"][0]["subscriptions"]) == 1
assert {"topic": "test-topic", "messages": []} in debug_info_data["entities"][0][
"subscriptions"
]
start_dt = datetime(2019, 1, 1, 0, 0, 0)
with patch("homeassistant.util.dt.utcnow") as dt_utcnow:
dt_utcnow.return_value = start_dt
for i in range(0, debug_info.STORED_MESSAGES + 1):
async_fire_mqtt_message(hass, "test-topic", f"{i}")
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"][0]["subscriptions"]) == 1
assert (
len(debug_info_data["entities"][0]["subscriptions"][0]["messages"])
== debug_info.STORED_MESSAGES
)
messages = [
{
"payload": f"{i}",
"qos": 0,
"retain": False,
"time": start_dt,
"topic": "test-topic",
}
for i in range(1, debug_info.STORED_MESSAGES + 1)
]
assert {"topic": "test-topic", "messages": messages} in debug_info_data["entities"][
0
]["subscriptions"]
async def help_test_entity_debug_info_message(
hass, mqtt_mock, domain, config, topic=None, payload=None
):
"""Test debug_info message overflow.
This is a test helper for MQTT debug_info.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
if topic is None:
# Add default topic to config
config["state_topic"] = "state-topic"
topic = "state-topic"
if payload is None:
payload = "ON"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1
assert {"topic": topic, "messages": []} in debug_info_data["entities"][0][
"subscriptions"
]
start_dt = datetime(2019, 1, 1, 0, 0, 0)
with patch("homeassistant.util.dt.utcnow") as dt_utcnow:
dt_utcnow.return_value = start_dt
async_fire_mqtt_message(hass, topic, payload)
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1
assert {
"topic": topic,
"messages": [
{
"payload": payload,
"qos": 0,
"retain": False,
"time": start_dt,
"topic": topic,
}
],
} in debug_info_data["entities"][0]["subscriptions"]
async def help_test_entity_debug_info_remove(hass, mqtt_mock, domain, config):
"""Test debug_info.
This is a test helper for MQTT debug_info.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
registry = dr.async_get(hass)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"]) == 1
assert (
debug_info_data["entities"][0]["discovery_data"]["topic"]
== f"homeassistant/{domain}/bla/config"
)
assert debug_info_data["entities"][0]["discovery_data"]["payload"] == config
assert len(debug_info_data["entities"][0]["subscriptions"]) == 1
assert {"topic": "test-topic", "messages": []} in debug_info_data["entities"][0][
"subscriptions"
]
assert len(debug_info_data["triggers"]) == 0
assert debug_info_data["entities"][0]["entity_id"] == f"{domain}.test"
entity_id = debug_info_data["entities"][0]["entity_id"]
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", "")
await hass.async_block_till_done()
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"]) == 0
assert len(debug_info_data["triggers"]) == 0
assert entity_id not in hass.data[debug_info.DATA_MQTT_DEBUG_INFO]["entities"]
async def help_test_entity_debug_info_update_entity_id(hass, mqtt_mock, domain, config):
"""Test debug_info.
This is a test helper for MQTT debug_info.
"""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["unique_id"] = "veryunique"
dev_registry = dr.async_get(hass)
ent_registry = mock_registry(hass, {})
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data)
await hass.async_block_till_done()
device = dev_registry.async_get_device({("mqtt", "helloworld")})
assert device is not None
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"]) == 1
assert (
debug_info_data["entities"][0]["discovery_data"]["topic"]
== f"homeassistant/{domain}/bla/config"
)
assert debug_info_data["entities"][0]["discovery_data"]["payload"] == config
assert debug_info_data["entities"][0]["entity_id"] == f"{domain}.test"
assert len(debug_info_data["entities"][0]["subscriptions"]) == 1
assert {"topic": "test-topic", "messages": []} in debug_info_data["entities"][0][
"subscriptions"
]
assert len(debug_info_data["triggers"]) == 0
ent_registry.async_update_entity(f"{domain}.test", new_entity_id=f"{domain}.milk")
await hass.async_block_till_done()
await hass.async_block_till_done()
debug_info_data = await debug_info.info_for_device(hass, device.id)
assert len(debug_info_data["entities"]) == 1
assert (
debug_info_data["entities"][0]["discovery_data"]["topic"]
== f"homeassistant/{domain}/bla/config"
)
assert debug_info_data["entities"][0]["discovery_data"]["payload"] == config
assert debug_info_data["entities"][0]["entity_id"] == f"{domain}.milk"
assert len(debug_info_data["entities"][0]["subscriptions"]) == 1
assert {"topic": "test-topic", "messages": []} in debug_info_data["entities"][0][
"subscriptions"
]
assert len(debug_info_data["triggers"]) == 0
assert (
f"{domain}.test" not in hass.data[debug_info.DATA_MQTT_DEBUG_INFO]["entities"]
)
async def help_test_entity_disabled_by_default(hass, mqtt_mock, domain, config):
"""Test device registry remove."""
# Add device settings to config
config = copy.deepcopy(config[domain])
config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID)
config["enabled_by_default"] = False
config["unique_id"] = "veryunique1"
dev_registry = dr.async_get(hass)
ent_registry = er.async_get(hass)
# Discover a disabled entity
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla1/config", data)
await hass.async_block_till_done()
entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, "veryunique1")
assert not hass.states.get(entity_id)
assert dev_registry.async_get_device({("mqtt", "helloworld")})
# Discover an enabled entity, tied to the same device
config["enabled_by_default"] = True
config["unique_id"] = "veryunique2"
data = json.dumps(config)
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla2/config", data)
await hass.async_block_till_done()
entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, "veryunique2")
assert hass.states.get(entity_id)
# Remove the enabled entity, both entities and the device should be removed
async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla2/config", "")
await hass.async_block_till_done()
assert not ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, "veryunique1")
assert not ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, "veryunique2")
assert not dev_registry.async_get_device({("mqtt", "helloworld")})
| sander76/home-assistant | tests/components/mqtt/test_common.py | Python | apache-2.0 | 41,174 | 0.00085 |
from builtins import str
import logging
import subprocess
from airflow.executors.base_executor import BaseExecutor
from airflow.utils import State
class SequentialExecutor(BaseExecutor):
"""
This executor will only run one task instance at a time, can be used
for debugging. It is also the only executor that can be used with sqlite
since sqlite doesn't support multiple connections.
Since we want airflow to work out of the box, it defaults to this
SequentialExecutor alongside sqlite as you first install it.
"""
def __init__(self):
super(SequentialExecutor, self).__init__()
self.commands_to_run = []
def execute_async(self, key, command, queue=None):
self.commands_to_run.append((key, command,))
def sync(self):
for key, command in self.commands_to_run:
logging.info("command" + str(command))
try:
sp = subprocess.Popen(command, shell=True)
sp.wait()
except Exception as e:
self.change_state(key, State.FAILED)
raise e
self.change_state(key, State.SUCCESS)
self.commands_to_run = []
def end(self):
self.heartbeat()
| wangtuanjie/airflow | airflow/executors/sequential_executor.py | Python | apache-2.0 | 1,230 | 0 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""LSTM Block Cell ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
from tensorflow.contrib.rnn.ops import gen_lstm_ops
from tensorflow.contrib.util import loader
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.layers import base as base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.platform import resource_loader
_lstm_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_lstm_ops.so"))
LayerRNNCell = rnn_cell_impl.LayerRNNCell # pylint: disable=invalid-name
# pylint: disable=invalid-name
def _lstm_block_cell(x,
cs_prev,
h_prev,
w,
b,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""Computes the LSTM cell forward propagation for 1 time step.
This implementation uses 1 weight matrix and 1 bias vector, and there's an
optional peephole connection.
This kernel op implements the following mathematical equations:
```python
xh = [x, h_prev]
[i, ci, f, o] = xh * w + b
f = f + forget_bias
if not use_peephole:
wci = wcf = wco = 0
i = sigmoid(cs_prev * wci + i)
f = sigmoid(cs_prev * wcf + f)
ci = tanh(ci)
cs = ci .* i + cs_prev .* f
cs = clip(cs, cell_clip)
o = sigmoid(cs * wco + o)
co = tanh(cs)
h = co .* o
```
Args:
x: A `Tensor`. Must be one of the following types: `float32`.
The input to the LSTM cell, shape (batch_size, num_inputs).
cs_prev: A `Tensor`. Must have the same type as `x`.
Value of the cell state at previous time step.
h_prev: A `Tensor`. Must have the same type as `x`.
Output of the previous cell at previous time step.
w: A `Tensor`. Must have the same type as `x`. The weight matrix.
b: A `Tensor`. Must have the same type as `x`. The bias vector.
wci: A `Tensor`. Must have the same type as `x`.
The weight matrix for input gate peephole connection.
wcf: A `Tensor`. Must have the same type as `x`.
The weight matrix for forget gate peephole connection.
wco: A `Tensor`. Must have the same type as `x`.
The weight matrix for output gate peephole connection.
forget_bias: An optional `float`. Defaults to `1`. The forget gate bias.
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
Value to clip the 'cs' value to. Disable by setting to negative value.
use_peephole: An optional `bool`. Defaults to `False`.
Whether to use peephole weights.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A `Tensor`. Has the same type as `x`. The input gate.
cs: A `Tensor`. Has the same type as `x`. The cell state before the tanh.
f: A `Tensor`. Has the same type as `x`. The forget gate.
o: A `Tensor`. Has the same type as `x`. The output gate.
ci: A `Tensor`. Has the same type as `x`. The cell input.
co: A `Tensor`. Has the same type as `x`. The cell after the tanh.
h: A `Tensor`. Has the same type as `x`. The output h vector.
Raises:
ValueError: If cell_size is None.
"""
if wci is None:
cell_size = cs_prev.get_shape().with_rank(2).dims[1].value
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wcf = wci
wco = wci
# pylint: disable=protected-access
return gen_lstm_ops.lstm_block_cell(
x=x,
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip if cell_clip is not None else -1,
use_peephole=use_peephole,
name=name)
# pylint: enable=protected-access
def _block_lstm(seq_len_max,
x,
w,
b,
cs_prev=None,
h_prev=None,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""TODO(williamchan): add doc.
Args:
seq_len_max: A `Tensor` of type `int64`.
x: A list of at least 1 `Tensor` objects of the same type.
w: A `Tensor`. Must have the same type as `x`.
b: A `Tensor`. Must have the same type as `x`.
cs_prev: A `Tensor`. Must have the same type as `x`.
h_prev: A `Tensor`. Must have the same type as `x`.
wci: A `Tensor`. Must have the same type as `x`.
wcf: A `Tensor`. Must have the same type as `x`.
wco: A `Tensor`. Must have the same type as `x`.
forget_bias: An optional `float`. Defaults to `1`.
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
use_peephole: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
cs: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
f: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
o: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
ci: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
co: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
h: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
Raises:
ValueError: If `b` does not have a valid shape.
"""
dtype = x[0].dtype
batch_size = x[0].get_shape().with_rank(2).dims[0].value
cell_size4 = b.get_shape().with_rank(1).dims[0].value
if cell_size4 is None:
raise ValueError("`b` shape must not be None.")
cell_size = cell_size4 / 4
zero_state = None
if cs_prev is None or h_prev is None:
zero_state = array_ops.constant(
0, dtype=dtype, shape=[batch_size, cell_size])
if cs_prev is None:
cs_prev = zero_state
if h_prev is None:
h_prev = zero_state
if wci is None:
wci = array_ops.constant(0, dtype=dtype, shape=[cell_size])
wcf = wci
wco = wci
# pylint: disable=protected-access
i, cs, f, o, ci, co, h = gen_lstm_ops.block_lstm(
seq_len_max=seq_len_max,
x=array_ops.stack(x),
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip if cell_clip is not None else -1,
name=name,
use_peephole=use_peephole)
return array_ops.unstack(i), array_ops.unstack(cs), array_ops.unstack(
f), array_ops.unstack(o), array_ops.unstack(ci), array_ops.unstack(
co), array_ops.unstack(h)
# pylint: enable=protected-access
# pylint: enable=invalid-name
_lstm_block_cell_grad_outputs = ["cs_prev_grad", "dicfo"]
@ops.RegisterGradient("LSTMBlockCell")
def _LSTMBlockCellGrad(op, *grad):
"""Gradient for LSTMBlockCell."""
(x, cs_prev, h_prev, w, wci, wcf, wco, b) = op.inputs
(i, cs, f, o, ci, co, _) = op.outputs
(_, cs_grad, _, _, _, _, h_grad) = grad
batch_size = x.get_shape().with_rank(2).dims[0].value
if batch_size is None:
batch_size = -1
input_size = x.get_shape().with_rank(2).dims[1].value
if input_size is None:
raise ValueError("input_size from `x` should not be None.")
cell_size = cs_prev.get_shape().with_rank(2).dims[1].value
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
(cs_prev_grad, dicfo, wci_grad, wcf_grad,
wco_grad) = gen_lstm_ops.lstm_block_cell_grad(
x,
cs_prev,
h_prev,
w,
wci,
wcf,
wco,
b,
i,
cs,
f,
o,
ci,
co,
cs_grad,
h_grad,
use_peephole=op.get_attr("use_peephole"))
# Backprop from dicfo to xh.
xh_grad = math_ops.matmul(dicfo, w, transpose_b=True)
x_grad = array_ops.slice(xh_grad, (0, 0), (batch_size, input_size))
x_grad.get_shape().merge_with(x.get_shape())
h_prev_grad = array_ops.slice(xh_grad, (0, input_size),
(batch_size, cell_size))
h_prev_grad.get_shape().merge_with(h_prev.get_shape())
# Backprop from dicfo to w.
xh = array_ops.concat([x, h_prev], 1)
w_grad = math_ops.matmul(xh, dicfo, transpose_a=True)
w_grad.get_shape().merge_with(w.get_shape())
# Backprop from dicfo to b.
b_grad = nn_ops.bias_add_grad(dicfo)
b_grad.get_shape().merge_with(b.get_shape())
return (x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad)
@ops.RegisterGradient("BlockLSTM")
def _BlockLSTMGrad(op, *grad):
"""Gradient for BlockLSTM."""
seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b = op.inputs
i, cs, f, o, ci, co, h = op.outputs
cs_grad = grad[1]
h_grad = grad[6]
(x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad, wco_grad,
b_grad) = gen_lstm_ops.block_lstm_grad(
seq_len_max,
x,
cs_prev,
h_prev,
w,
wci,
wcf,
wco,
b,
i,
cs,
f,
o,
ci,
co,
h,
cs_grad,
h_grad,
use_peephole=op.get_attr("use_peephole"))
return [
None, x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad
]
class LSTMBlockCell(LayerRNNCell):
"""Basic LSTM recurrent network cell.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add `forget_bias` (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
Unlike `rnn_cell_impl.LSTMCell`, this is a monolithic op and should be much
faster. The weight and bias matrices should be compatible as long as the
variable scope matches.
"""
def __init__(self,
num_units,
forget_bias=1.0,
cell_clip=None,
use_peephole=False,
dtype=None,
reuse=None,
name="lstm_cell"):
"""Initialize the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
use_peephole: Whether to use peephole connections or not.
dtype: the variable dtype of this layer. Default to tf.float32.
reuse: (optional) boolean describing whether to reuse variables in an
existing scope. If not `True`, and the existing scope already has the
given variables, an error is raised.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases. By default this is "lstm_cell", for variable-name compatibility
with `tf.nn.rnn_cell.LSTMCell`.
When restoring from CudnnLSTM-trained checkpoints, must use
CudnnCompatibleLSTMBlockCell instead.
"""
super(LSTMBlockCell, self).__init__(_reuse=reuse, dtype=dtype, name=name)
self._num_units = num_units
self._forget_bias = forget_bias
self._use_peephole = use_peephole
self._cell_clip = cell_clip if cell_clip is not None else -1
self._names = {
"W": "kernel",
"b": "bias",
"wci": "w_i_diag",
"wcf": "w_f_diag",
"wco": "w_o_diag",
"scope": "lstm_cell"
}
# Inputs must be 2-dimensional.
self.input_spec = base_layer.InputSpec(ndim=2)
@property
def state_size(self):
return rnn_cell_impl.LSTMStateTuple(self._num_units, self._num_units)
@property
def output_size(self):
return self._num_units
def build(self, inputs_shape):
if not inputs_shape.dims[1].value:
raise ValueError(
"Expecting inputs_shape[1] to be set: %s" % str(inputs_shape))
input_size = inputs_shape.dims[1].value
self._kernel = self.add_variable(
self._names["W"], [input_size + self._num_units, self._num_units * 4])
self._bias = self.add_variable(
self._names["b"], [self._num_units * 4],
initializer=init_ops.constant_initializer(0.0))
if self._use_peephole:
self._w_i_diag = self.add_variable(self._names["wci"], [self._num_units])
self._w_f_diag = self.add_variable(self._names["wcf"], [self._num_units])
self._w_o_diag = self.add_variable(self._names["wco"], [self._num_units])
self.built = True
def call(self, inputs, state):
"""Long short-term memory cell (LSTM)."""
if len(state) != 2:
raise ValueError("Expecting state to be a tuple with length 2.")
if self._use_peephole:
wci = self._w_i_diag
wcf = self._w_f_diag
wco = self._w_o_diag
else:
wci = wcf = wco = array_ops.zeros([self._num_units], dtype=self.dtype)
(cs_prev, h_prev) = state
(_, cs, _, _, _, _, h) = _lstm_block_cell(
inputs,
cs_prev,
h_prev,
self._kernel,
self._bias,
wci=wci,
wcf=wcf,
wco=wco,
forget_bias=self._forget_bias,
cell_clip=self._cell_clip,
use_peephole=self._use_peephole)
new_state = rnn_cell_impl.LSTMStateTuple(cs, h)
return h, new_state
@six.add_metaclass(abc.ABCMeta)
class LSTMBlockWrapper(base_layer.Layer):
"""This is a helper class that provides housekeeping for LSTM cells.
This may be useful for alternative LSTM and similar type of cells.
The subclasses must implement `_call_cell` method and `num_units` property.
"""
@abc.abstractproperty
def num_units(self):
"""Number of units in this cell (output dimension)."""
pass
@abc.abstractmethod
def _call_cell(self, inputs, initial_cell_state, initial_output, dtype,
sequence_length):
"""Run this LSTM on inputs, starting from the given state.
This method must be implemented by subclasses and does the actual work
of calling the cell.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
initial_cell_state: initial value for cell state, shape `[batch_size,
self._num_units]`
initial_output: initial value of cell output, shape `[batch_size,
self._num_units]`
dtype: The data type for the initial state and expected output.
sequence_length: Specifies the length of each sequence in inputs. An int32
or int64 vector (tensor) size [batch_size], values in [0, time_len) or
None.
Returns:
A pair containing:
- State: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
- Output: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
"""
pass
def call(self, inputs, initial_state=None, dtype=None, sequence_length=None):
"""Run this LSTM on inputs, starting from the given state.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`.
initial_state: a tuple `(initial_cell_state, initial_output)` with tensors
of shape `[batch_size, self._num_units]`. If this is not provided, the
cell is expected to create a zero initial state of type `dtype`.
dtype: The data type for the initial state and expected output. Required
if `initial_state` is not provided or RNN state has a heterogeneous
dtype.
sequence_length: Specifies the length of each sequence in inputs. An
`int32` or `int64` vector (tensor) size `[batch_size]`, values in `[0,
time_len).`
Defaults to `time_len` for each element.
Returns:
A pair containing:
- Output: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
or a list of time_len tensors of shape `[batch_size, output_size]`,
to match the type of the `inputs`.
- Final state: a tuple `(cell_state, output)` matching `initial_state`.
Raises:
ValueError: in case of shape mismatches
"""
is_list = isinstance(inputs, list)
if is_list:
inputs = array_ops.stack(inputs)
inputs_shape = inputs.get_shape().with_rank(3)
if not inputs_shape[2]:
raise ValueError("Expecting inputs_shape[2] to be set: %s" % inputs_shape)
batch_size = inputs_shape.dims[1].value
if batch_size is None:
batch_size = array_ops.shape(inputs)[1]
time_len = inputs_shape.dims[0].value
if time_len is None:
time_len = array_ops.shape(inputs)[0]
# Provide default values for initial_state and dtype
if initial_state is None:
if dtype is None:
raise ValueError("Either initial_state or dtype needs to be specified")
z = array_ops.zeros(
array_ops.stack([batch_size, self.num_units]), dtype=dtype)
initial_state = z, z
else:
if len(initial_state) != 2:
raise ValueError(
"Expecting initial_state to be a tuple with length 2 or None")
if dtype is None:
dtype = initial_state[0].dtype
# create the actual cell
if sequence_length is not None:
sequence_length = ops.convert_to_tensor(sequence_length)
initial_cell_state, initial_output = initial_state # pylint: disable=unpacking-non-sequence
cell_states, outputs = self._call_cell(
inputs, initial_cell_state, initial_output, dtype, sequence_length)
if sequence_length is not None:
# Mask out the part beyond sequence_length
mask = array_ops.transpose(
array_ops.sequence_mask(sequence_length, time_len, dtype=dtype),
[1, 0])
mask = array_ops.tile(
array_ops.expand_dims(mask, [-1]), [1, 1, self.num_units])
outputs *= mask
# Prepend initial states to cell_states and outputs for indexing to work
# correctly,since we want to access the last valid state at
# sequence_length - 1, which can even be -1, corresponding to the
# initial state.
mod_cell_states = array_ops.concat(
[array_ops.expand_dims(initial_cell_state, [0]), cell_states], 0)
mod_outputs = array_ops.concat(
[array_ops.expand_dims(initial_output, [0]), outputs], 0)
final_cell_state = self._gather_states(mod_cell_states, sequence_length,
batch_size)
final_output = self._gather_states(mod_outputs, sequence_length,
batch_size)
else:
# No sequence_lengths used: final state is the last state
final_cell_state = cell_states[-1]
final_output = outputs[-1]
if is_list:
# Input was a list, so return a list
outputs = array_ops.unstack(outputs)
final_state = rnn_cell_impl.LSTMStateTuple(final_cell_state, final_output)
return outputs, final_state
def _gather_states(self, data, indices, batch_size):
"""Produce `out`, s.t. out(i, j) = data(indices(i), i, j)."""
return array_ops.gather_nd(
data, array_ops.stack([indices, math_ops.range(batch_size)], axis=1))
class LSTMBlockFusedCell(LSTMBlockWrapper):
"""FusedRNNCell implementation of LSTM.
This is an extremely efficient LSTM implementation, that uses a single TF op
for the entire LSTM. It should be both faster and more memory-efficient than
LSTMBlockCell defined above.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add forget_bias (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
The variable naming is consistent with `rnn_cell_impl.LSTMCell`.
"""
def __init__(self,
num_units,
forget_bias=1.0,
cell_clip=None,
use_peephole=False,
reuse=None,
dtype=None,
name="lstm_fused_cell"):
"""Initialize the LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
cell_clip: clip the cell to this value. Defaults is no cell clipping.
use_peephole: Whether to use peephole connections or not.
reuse: (optional) boolean describing whether to reuse variables in an
existing scope. If not `True`, and the existing scope already has the
given variables, an error is raised.
dtype: the dtype of variables of this layer.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases. By default this is "lstm_cell", for variable-name compatibility
with `tf.nn.rnn_cell.LSTMCell`.
"""
super(LSTMBlockFusedCell, self).__init__(
_reuse=reuse, name=name, dtype=dtype)
self._num_units = num_units
self._forget_bias = forget_bias
self._cell_clip = cell_clip if cell_clip is not None else -1
self._use_peephole = use_peephole
# Inputs must be 3-dimensional.
self.input_spec = base_layer.InputSpec(ndim=3)
@property
def num_units(self):
"""Number of units in this cell (output dimension)."""
return self._num_units
def build(self, input_shape):
input_size = input_shape.dims[2].value
self._kernel = self.add_variable(
"kernel", [input_size + self._num_units, self._num_units * 4])
self._bias = self.add_variable(
"bias", [self._num_units * 4],
initializer=init_ops.constant_initializer(0.0))
if self._use_peephole:
self._w_i_diag = self.add_variable("w_i_diag", [self._num_units])
self._w_f_diag = self.add_variable("w_f_diag", [self._num_units])
self._w_o_diag = self.add_variable("w_o_diag", [self._num_units])
self.built = True
def _call_cell(self,
inputs,
initial_cell_state=None,
initial_output=None,
dtype=None,
sequence_length=None):
"""Run this LSTM on inputs, starting from the given state.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
initial_cell_state: initial value for cell state, shape `[batch_size,
self._num_units]`
initial_output: initial value of cell output, shape `[batch_size,
self._num_units]`
dtype: The data type for the initial state and expected output.
sequence_length: Specifies the length of each sequence in inputs. An
`int32` or `int64` vector (tensor) size `[batch_size]`, values in `[0,
time_len)` or None.
Returns:
A pair containing:
- Cell state (cs): A `3-D` tensor of shape `[time_len, batch_size,
output_size]`
- Output (h): A `3-D` tensor of shape `[time_len, batch_size,
output_size]`
"""
inputs_shape = inputs.get_shape().with_rank(3)
time_len = inputs_shape.dims[0].value
if time_len is None:
time_len = array_ops.shape(inputs)[0]
if self._use_peephole:
wci = self._w_i_diag
wco = self._w_o_diag
wcf = self._w_f_diag
else:
wci = wcf = wco = array_ops.zeros([self._num_units], dtype=dtype)
if sequence_length is None:
max_seq_len = math_ops.to_int64(time_len)
else:
max_seq_len = math_ops.to_int64(math_ops.reduce_max(sequence_length))
_, cs, _, _, _, _, h = gen_lstm_ops.block_lstm(
seq_len_max=max_seq_len,
x=inputs,
cs_prev=initial_cell_state,
h_prev=initial_output,
w=self._kernel,
wci=wci,
wcf=wcf,
wco=wco,
b=self._bias,
forget_bias=self._forget_bias,
cell_clip=self._cell_clip,
use_peephole=self._use_peephole)
return cs, h
| seanli9jan/tensorflow | tensorflow/contrib/rnn/python/ops/lstm_ops.py | Python | apache-2.0 | 25,114 | 0.005017 |
# Copyright 2002, 2004 John T. Reese.
# email: jtr at ofb.net
#
# This file is part of Yammer.
#
# Yammer is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Yammer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Yammer; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from WebKit.ExceptionHandler import ExceptionHandler, htTitle, singleton
from WebUtils.Funcs import urlEncode, htmlForDict
from WebUtils.HTMLForException import HTMLForException
import YammerUtils, traceback, string, os.path, sys
from types import DictType
from InstallSettings import settings
class TicketExceptionHandler(ExceptionHandler):
def __init__(self, application, transaction, excInfo):
ExceptionHandler.__init__(self, application, transaction, excInfo)
def getGaleId(self):
trans= self._tra
if trans.hasSession():
session= trans.session()
if session.hasValue('username'):
username= session.value('username')
return username
return None
def publicErrorPage(self):
html= '''<html>
<head>
<title>Error</title>
</head>
<body fgcolor=black bgcolor=white>
%s
<p> %s
''' % (htTitle('Error'), self.setting('UserErrorMessage'))
debugInfo= self.generateDebugInfo()
html += debugInfo[0]
html += '</body></html>'
return html
def privateErrorPage(self):
''' Returns an HTML page intended for the developer with useful information such as the traceback. '''
html = ['''
<html>
<head>
<title>Error</title>
</head>
<body fgcolor=black bgcolor=white>
%s
<p> %s''' % (htTitle('Error'), self.setting('UserErrorMessage'))]
html.append(self.htmlDebugInfo())
html.append('</body></html>')
return string.join(html, '')
def htmlDebugInfo(self):
return string.join(self.generateDebugInfo(), '<hr>')
def generateDebugInfo(self):
''' Return HTML-formatted debugging information about the current exception. '''
self.html= []
self.bugdesc= "(please click *Edit* and enter a brief description of " + \
"what you were doing, here)\n\n====\n"
self.reporttitle= 'unexpected error'
self.writeHTML()
html= ''.join(self.html)
self.html= None
contact= self.getGaleId()
if contact:
contact= 'gale ' + contact
else:
contact= ''
version= YammerUtils.getVersionString()
desc= self.bugdesc.replace('"', '"')
title= self.reporttitle
return ("""<form method="post" action="http://cvstrac.ofb.net/tktnew">
<input type="hidden" name="t" value="%(title)s">
<input type="hidden" name="w" value="jtr">
<input type="hidden" name="c" value="%(contact)s">
<input type="hidden" name="s" value="yammer.net">
<input type="hidden" name="v" value="%(version)s">
<input type="hidden" name="y" value="event">
<input type="hidden" name="r" value="3">
<input type="hidden" name="p" value="3">
<input type="hidden" name="d" value="%(desc)s">
You can file an incident report about this error. If you file
an incident report, relevant information about the problem will
be saved in the bug database and you will be given a chance to
type in extra information, such as a description of what you
were doing. Filling out an incident report is very helpful and
makes it much more likely that the developer will be able to fix
the problem. If you would like to file an incident report,
please click here:<p>
<input type="submit" name="submit" value="submit incident report">
""" % locals(), html)
def htmlWrite(self, s):
ExceptionHandler.write(self, s)
def descWrite(self, s):
self.bugdesc += str(s)
def write(self, s):
self.htmlWrite(s)
self.descWrite(s)
def htmlWriteln(self, s):
ExceptionHandler.writeln(self, s)
def descWriteln(self, s):
self.bugdesc += str(s) + '\n\n'
def writeln(self, s):
self.htmlWriteln(s)
self.descWriteln(s)
def writeDict(self, d):
self.htmlWriteln(htmlForDict(d, filterValueCallBack=self.filterDictValue,
maxValueLength=self._maxValueLength))
keys= d.keys()
keys.sort()
for key in keys:
self.descWrite(self.descRepr(key) + ':')
values= string.split(str(d[key]), '\n')
self.descWriteln(values[0])
for value in values[1:]:
self.descWriteln(' ' + self.descRepr(value))
def htmlWriteTable(self, listOfDicts, keys=None):
"""
Writes a table whose contents are given by listOfDicts. The
keys of each dictionary are expected to be the same. If the
keys arg is None, the headings are taken in alphabetical order
from the first dictionary. If listOfDicts is "false", nothing
happens.
The keys and values are already considered to be HTML.
Caveat: There's no way to influence the formatting or to use
column titles that are different than the keys.
Note: Used by writeAttrs().
"""
if not listOfDicts:
return
if keys is None:
keys = listOfDicts[0].keys()
keys.sort()
wr = self.htmlWriteln
wr('<table>\n<tr>')
for key in keys:
wr('<td bgcolor=#F0F0F0><b>%s</b></td>' % key)
wr('</tr>\n')
for row in listOfDicts:
wr('<tr>')
for key in keys:
wr('<td bgcolor=#F0F0F0>%s</td>' % self.filterTableValue(row[key], key, row, listOfDicts))
wr('</tr>\n')
wr('</table>')
def descWriteTable(self, listOfDicts, keys=None):
if not listOfDicts: return
if keys is None:
keys= listOfDicts[0].keys()
keys.sort()
wrp= self.descWrite
wr= self.descWriteln
wr('keys: ' + string.join(keys, ' '))
for row in listOfDicts:
for key in keys:
wrp('{%s} ' % self.filterTableValue(row[key], key, row,
listOfDicts))
wr('')
def writeTable(self, listOfDicts, keys=None):
self.htmlWriteTable(listOfDicts, keys)
self.descWriteTable(listOfDicts, keys)
def htmlWriteTraceback(self):
self.htmlWriteTitle('Traceback')
self.htmlWrite('<p> <i>%s</i>' % self.servletPathname())
self.htmlWrite(HTMLForException(self._exc))
def htmlWriteTitle(self, s):
self.htmlWriteln(htTitle(s))
def writeTitle(self, s):
self.htmlWriteTitle(s)
self.descWriteln('\n\n====\n\n' + s)
def writeAttrs2(self, obj, attrNames, reprfunc, wrTableFunc):
"""
Writes the attributes of the object as given by attrNames.
Tries obj._name first, followed by obj.name(). Is resilient
regarding exceptions so as not to spoil the exception report.
"""
rows = []
for name in attrNames:
value = getattr(obj, '_'+name, singleton) # go for data attribute
try:
if value is singleton:
value = getattr(obj, name, singleton) # go for method
if value is singleton:
value = '(could not find attribute or method)'
else:
try:
if callable(value):
value = value()
except Exception, e:
value = '(exception during method call: %s: %s)' % (e.__class__.__name__, e)
value = reprfunc(value)
else:
value = reprfunc(value)
except Exception, e:
value = '(exception during value processing: %s: %s)' % (e.__class__.__name__, e)
rows.append({'attr': name, 'value': value})
wrTableFunc(rows, ('attr', 'value'))
def writeAttrs(self, obj, attrNames):
self.writeAttrs2(obj, attrNames, self.repr, self.htmlWriteTable)
self.writeAttrs2(obj, attrNames, self.descRepr, self.descWriteTable)
def descRepr(self, x):
if type(x) is DictType:
reps= []
for k in x.keys():
reps.append(self.descRepr(k) + ': ' + self.descRepr(x[k]))
return '{' + string.join(reps, ', ') + '}'
else:
rep = repr(x)
if self._maxValueLength and len(rep) > self._maxValueLength:
rep = rep[:self._maxValueLength] + '...'
if rep.find('_') >= 0 or rep.find('*') >= 0:
rep= '{quote: ' + rep + '}'
return rep
def writeTraceback(self):
self.htmlWriteTraceback()
self.descWriteln('Traceback:')
self.descWriteln('_%s_' % self.servletPathname())
excInfo= self._exc
out = apply(traceback.format_exception, excInfo)
(filename, function)= (None, None)
for line in out:
if string.find(line, 'File ') != -1:
i1= line.find('"')
if i1 > 0:
i2= line.find('"', i1+1)
if i2 > 0:
filename= line[i1+1:i2]
funcsep= ' in '
i3= line.find(funcsep, i2 + 1)
if i3 > 0:
i4= line.find('\n', i3 + 1)
function= line[i3 + len(funcsep):i4]
for l in line.strip().split('\n'):
self.descWriteln(l)
if len(out) > 1 and filename is not None and function is not None:
finalline= out[-1].strip()
sys.stdout.flush()
filename= os.path.splitext(os.path.basename(filename))[0]
if filename.startswith('_'):
fnparts= filename.split('_')
filename= '%s.%s' % (fnparts[-2], fnparts[-1])
self.reporttitle= "%-70.70s" % \
("incident in %s.%s: %s" % (filename, function, finalline))
| nandub/yammer | lib/TicketExceptionHandler.py | Python | gpl-2.0 | 9,659 | 0.013562 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from taskflow.utils import reflection
class Flow(six.with_metaclass(abc.ABCMeta)):
"""The base abstract class of all flow implementations.
A flow is a structure that defines relationships between tasks. You can
add tasks and other flows (as subflows) to the flow, and the flow provides
a way to implicitly or explicitly define how they are interdependent.
Exact structure of the relationships is defined by concrete
implementation, while this class defines common interface and adds
human-readable (not necessary unique) name.
NOTE(harlowja): if a flow is placed in another flow as a subflow, a desired
way to compose flows together, then it is valid and permissible that during
execution the subflow & parent flow may be flattened into a new flow. Since
a flow is just a 'structuring' concept this is typically a behavior that
should not be worried about (as it is not visible to the user), but it is
worth mentioning here.
Flows are expected to provide the following methods/properties:
- add
- __len__
- requires
- provides
"""
def __init__(self, name):
self._name = str(name)
@property
def name(self):
"""A non-unique name for this flow (human readable)"""
return self._name
@abc.abstractmethod
def __len__(self):
"""Returns how many items are in this flow."""
def __str__(self):
lines = ["%s: %s" % (reflection.get_class_name(self), self.name)]
lines.append("%s" % (len(self)))
return "; ".join(lines)
@abc.abstractmethod
def add(self, *items):
"""Adds a given item/items to this flow."""
@abc.abstractproperty
def requires(self):
"""Browse argument requirement names this flow requires to run."""
@abc.abstractproperty
def provides(self):
"""Browse argument names provided by the flow."""
| ntt-sic/taskflow | taskflow/flow.py | Python | apache-2.0 | 2,637 | 0 |
import os
import atexit
import string
import importlib
import threading
import socket
from time import sleep
def BYTE(message):
return bytes("%s\r\n" % message, "UTF-8")
class UserInput(threading.Thread):
isRunning = False
parent = None
def __init__(self, bot):
super().__init__()
self.parent = bot
self.setDaemon(True)
self.isRunning = False
self.start()
def createMessage(self, message):
temp = ""
for i in range(len(message)):
if (i != len(message) - 1):
temp += message[i] + " "
else:
temp += message[i]
return temp
def run(self):
self.isRunning = True
while (self.isRunning):
try:
message = input()
message = message.split(" ")
if (message[0] != ""):
if (message[0] == "/r" or message[0] == "/reload"):
self.parent.reloadAll()
elif (message[0] == "/q" or message[0] == "/quit"):
print("Quitting.")
self.parent.quit()
self.isRunning = False
elif (message[0] == "/j" or message[0] == "/join"):
if (len(message) < 2 or len(message) > 2):
print("Incorrect usage.")
else:
self.parent.switch(message[1])
elif (message[0] == "/l" or message[0] == "/leave"):
if (len(message) >= 2):
if (len(message) > 2):
for i in range(1, len(message)):
self.parent.leave(message[i], False)
if (len(self.parent.channels) > 0):
self.parent.focusedChannel = self.parent.channels[0]
print("Left channels. Focusing on %s" % self.parent.focusedChannel)
else:
print("No channels left.")
else:
self.parent.leave(message[1], False)
if (len(self.parent.channels) > 0):
self.parent.focusedChannel = self.parent.channels[0]
print("Left %s. Focusing on %s" % (message[1], self.parent.focusedChannel))
else:
print("No channels left.")
else:
print("Incorrect usage.")
elif (message[0] == "/?" or message[0] == "/help"):
print("1. Type anything to chat with others in %s." % self.parent.focusedChannel)
print("2. /? or /help -- Bring up the bot commands.")
print("3. /j or /join -- Join a new channel. Channel focus will switch over.")
print("4. /l or /leave -- Leave channel. Channel focus will change.")
print("5. /r or /reload -- Reload all plugins. (Hotswapping is supported.)")
print("6. /q or /quit -- Quit the bot.")
else:
self.parent.s.send(BYTE("PRIVMSG %s :%s" % (self.parent.focusedChannel, self.createMessage(message))))
except WindowsError as winError:
print(winError)
if (self.parent.s != None):
self.parent.s.close(socket.SHUT_RDWR)
self.parent.s = None
self.parent.connect()
except Exception as error:
print(error)
| tommai78101/IRCBot | UserInput.py | Python | mit | 2,759 | 0.029358 |
"""Selectors for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from typing import Any, cast
import voluptuous as vol
from homeassistant.const import CONF_MODE, CONF_UNIT_OF_MEASUREMENT
from homeassistant.util import decorator
SELECTORS = decorator.Registry()
def validate_selector(config: Any) -> dict:
"""Validate a selector."""
if not isinstance(config, dict):
raise vol.Invalid("Expected a dictionary")
if len(config) != 1:
raise vol.Invalid(f"Only one type can be specified. Found {', '.join(config)}")
selector_type = list(config)[0]
if (selector_class := SELECTORS.get(selector_type)) is None:
raise vol.Invalid(f"Unknown selector type {selector_type} found")
# Selectors can be empty
if config[selector_type] is None:
return {selector_type: {}}
return {
selector_type: cast(dict, selector_class.CONFIG_SCHEMA(config[selector_type]))
}
class Selector:
"""Base class for selectors."""
CONFIG_SCHEMA: Callable
@SELECTORS.register("entity")
class EntitySelector(Selector):
"""Selector of a single entity."""
CONFIG_SCHEMA = vol.Schema(
{
# Integration that provided the entity
vol.Optional("integration"): str,
# Domain the entity belongs to
vol.Optional("domain"): str,
# Device class of the entity
vol.Optional("device_class"): str,
}
)
@SELECTORS.register("device")
class DeviceSelector(Selector):
"""Selector of a single device."""
CONFIG_SCHEMA = vol.Schema(
{
# Integration linked to it with a config entry
vol.Optional("integration"): str,
# Manufacturer of device
vol.Optional("manufacturer"): str,
# Model of device
vol.Optional("model"): str,
# Device has to contain entities matching this selector
vol.Optional("entity"): EntitySelector.CONFIG_SCHEMA,
}
)
@SELECTORS.register("area")
class AreaSelector(Selector):
"""Selector of a single area."""
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("entity"): vol.Schema(
{
vol.Optional("domain"): str,
vol.Optional("device_class"): str,
vol.Optional("integration"): str,
}
),
vol.Optional("device"): vol.Schema(
{
vol.Optional("integration"): str,
vol.Optional("manufacturer"): str,
vol.Optional("model"): str,
}
),
}
)
@SELECTORS.register("number")
class NumberSelector(Selector):
"""Selector of a numeric value."""
CONFIG_SCHEMA = vol.Schema(
{
vol.Required("min"): vol.Coerce(float),
vol.Required("max"): vol.Coerce(float),
vol.Optional("step", default=1): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): str,
vol.Optional(CONF_MODE, default="slider"): vol.In(["box", "slider"]),
}
)
@SELECTORS.register("addon")
class AddonSelector(Selector):
"""Selector of a add-on."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("boolean")
class BooleanSelector(Selector):
"""Selector of a boolean value."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("time")
class TimeSelector(Selector):
"""Selector of a time value."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("target")
class TargetSelector(Selector):
"""Selector of a target value (area ID, device ID, entity ID etc).
Value should follow cv.ENTITY_SERVICE_FIELDS format.
"""
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("entity"): vol.Schema(
{
vol.Optional("domain"): str,
vol.Optional("device_class"): str,
vol.Optional("integration"): str,
}
),
vol.Optional("device"): vol.Schema(
{
vol.Optional("integration"): str,
vol.Optional("manufacturer"): str,
vol.Optional("model"): str,
}
),
}
)
@SELECTORS.register("action")
class ActionSelector(Selector):
"""Selector of an action sequence (script syntax)."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("object")
class ObjectSelector(Selector):
"""Selector for an arbitrary object."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("text")
class StringSelector(Selector):
"""Selector for a multi-line text string."""
CONFIG_SCHEMA = vol.Schema({vol.Optional("multiline", default=False): bool})
@SELECTORS.register("select")
class SelectSelector(Selector):
"""Selector for an single-choice input select."""
CONFIG_SCHEMA = vol.Schema(
{vol.Required("options"): vol.All([str], vol.Length(min=1))}
)
| jawilson/home-assistant | homeassistant/helpers/selector.py | Python | apache-2.0 | 5,138 | 0.000779 |
# -*- coding: utf-8 -*-
from datetime import date, timedelta
from subprocess import check_output
from celery import task
from django.template.loader import render_to_string
from django.db.models import Sum
from django.conf import settings
from django.contrib.auth.models import User
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from .models import Holiday, Project, Task
def get_fortune():
"""Gets a random fortune from the system."""
fortune = check_output(['/usr/games/fortune'])
return fortune
def verify_yesterday_tasks(user):
"""Returns True if a user created at least one task
yesterday. Checks if 'yesterday' was on weekend or was
a holiday."""
yesterday = date.today() - timedelta(days=1)
if Holiday.objects.filter(date=yesterday).exists() or \
yesterday.weekday() in [5, 6]:
return True
return Task.objects.filter(date=yesterday, owner=user).exists()
@task()
def disable_overdue_projects():
"""Disable all projects with an overdue date"""
today = date.today()
overdue_projects = Project.objects.filter(is_active=True,
due_date__lt=today).all()
for op in overdue_projects:
op.is_active = False
op.save()
@task()
def weekly_irregular_users():
"""Sends a weekly hall of shame email to admin users."""
subject = "Weekly hall of shame"
# active users, not in blacklist
active_users = User.objects.filter(is_active=True).all()
users = list(filter(lambda x: x.username not in
settings.ALERT_USERS_BLACKLIST,
active_users))
users_few_days = list(filter(lambda x: x.total_days_last_week() < 5,
users))
users_few_hours = list(filter(lambda x: x.avg_hours_last_week() < 7,
users))
data = {
"users_few_days": users_few_days,
"users_few_hours": users_few_hours
}
text_body = render_to_string(
'mails/weekly_shame_mail.txt', data)
to_mail = []
to_mail.append(settings.ADMIN_USERS_EMAIL)
print(text_body)
send_mail(
subject, text_body, settings.DEFAULT_FROM_EMAIL, to_mail)
@task()
def weekly_summary_user(user):
"""Sends a weekly summary."""
subject = "Resumen semanal de tareas"
wt = user.last_week_tasks()
if wt:
last_task = user.get_last_task()
week_days = user.total_days_last_week()
total_hours = user.total_hours_last_week()
avg_hours = user.avg_hours_last_week()
data = {
"username": user.username,
"week_days": week_days,
"total_hours": total_hours,
"avg_hours": avg_hours,
"last_task": last_task,
"weekly_tasks": wt
}
text_body = render_to_string(
'mails/weekly_tasks.txt', data)
to_mail = []
to_mail.append(user.email)
print(text_body)
send_mail(
subject, text_body, settings.DEFAULT_FROM_EMAIL, to_mail)
@task()
def send_alert_to_user(user):
subject = "No creaste tareas en Relojito ayer"
project_url = settings.SITE_URL
last_task = user.get_last_task()
fortune = get_fortune()
data = {
"username": user.username,
"project_url": project_url,
"last_task": last_task,
"fortune": fortune
}
text_body = render_to_string(
'mails/no_tasks_yesterday.txt', data)
to_mail = []
to_mail.append(user.email)
print(text_body)
send_mail(
subject, text_body, settings.DEFAULT_FROM_EMAIL, to_mail)
@task()
def mail_alert_no_created_task():
"""Sends an alert if a user didn't create any tasks the
day before."""
users = User.objects.filter(is_active=True).all()
for user in users:
if user.email and user.username not in settings.ALERT_USERS_BLACKLIST:
if not verify_yesterday_tasks(user):
send_alert_to_user(user)
@task()
def mail_weekly_summary():
"""Sends a weekly summary to all users."""
users = User.objects.filter(is_active=True).all()
for user in users:
if user.email and user.username not in settings.ALERT_USERS_BLACKLIST:
weekly_summary_user(user)
@task()
def mail_new_year_greeting():
"""Sends a happy new year greeting."""
users = User.objects.filter(is_active=True).all()
for user in users:
if user.email and user.username not in settings.ALERT_USERS_BLACKLIST:
if not verify_yesterday_tasks(user):
taskset = user.get_tasks()
projects = user.get_projects()
tx = taskset.aggregate(Sum('total_hours'))
total_hours = tx['total_hours__sum']
subject = _(u"Feliz año nuevo de parte de Relojito")
body = _(u"""Hola %(username)s, Relojito te cuenta que hasta ahora completaste %(total_tareas)s tareas,
para un total de %(total_proyectos)s proyectos. En total, cargaste %(total_horas)s horas.\n
Más allá de las estadísticas, Relojito te desea un excelente comienzo de año!""") % {'total_tareas': len(taskset),
'username': user.first_name,
'total_proyectos': len(projects),
'total_horas': total_hours
}
to_mail = []
to_mail.append(user.email)
print(user.username, subject, body)
send_mail(subject, body, settings.DEFAULT_FROM_EMAIL, to_mail)
@task()
def mail_alert_new_collaborator(instance):
project_name = instance.project.name
project_url = settings.SITE_URL + instance.project.get_absolute_url
subject = _(u'You are now a collaborator in %(project_name)s') % {
'project_name': project_name}
body = _(u"""Hi, you've been added as a colaborator in %(project_name)s.\n\n
Check the details at %(project_url)s.\n\n Bye!""") % {'project_name': project_name,
'project_url': project_url}
to_mail = []
to_mail.append(instance.user.email)
send_mail(subject, body, settings.DEFAULT_FROM_EMAIL, to_mail)
| MSA-Argentina/relojito_project | relojito/app/tasks.py | Python | mit | 6,517 | 0.001536 |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'brbappl.views.index', name='index'),
url(r'^done$', 'brbappl.views.done', name='done'),
url(r'^participate$', 'brbappl.views.participate', name='participate'),
url(r'^admin', include(admin.site.urls)),
url(r'^(?P<contestant>\w+)$', 'brbappl.views.questionnaire', name='questions'),
# url(r'^poolgame/', include('poolgame.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| thijsdezoete/BrBaFinals | poolgame/poolgame/urls.py | Python | gpl-2.0 | 816 | 0.003676 |
del x
| pyta-uoft/pyta | examples/ending_locations/del_name.py | Python | gpl-3.0 | 6 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Ref: http://doc.qt.io/qt-5/modelview.html#2-1-a-read-only-table
import sys
from PyQt5.QtCore import Qt, QAbstractTableModel, QVariant
from PyQt5.QtWidgets import QApplication, QTableView
class MyData:
def __init__(self):
self._num_rows = 3
self._num_columns = 2
self._data = [["hello" for j in range(self._num_columns)] for i in range(self._num_rows)]
def get_num_rows(self):
return self._num_rows
def get_num_columns(self):
return self._num_columns
def get_data(self, row_index, column_index):
value = self._data[row_index][column_index]
return value
def set_data(self, row_index, column_index, value):
self._data[row_index][column_index] = value
###############################################################################
class MyModel(QAbstractTableModel):
def __init__(self, data, parent=None):
super().__init__(parent)
self._data = data # DON'T CALL THIS ATTRIBUTE "data", A QAbstractItemModel METHOD ALREADY HAVE THIS NAME (model.data(index, role)) !!!
def rowCount(self, parent):
return self._data.get_num_rows()
def columnCount(self, parent):
return self._data.get_num_columns()
def data(self, index, role):
if role == Qt.DisplayRole:
return self._data.get_data(index.row(), index.column())
return QVariant()
def setData(self, index, value, role):
if role == Qt.EditRole:
try:
self._data.set_data(index.row(), index.column(), value)
# The following line are necessary e.g. to dynamically update the QSortFilterProxyModel
self.dataChanged.emit(index, index, [Qt.EditRole])
except Exception as e:
print(e)
return False
return True
def flags(self, index):
return Qt.ItemIsSelectable | Qt.ItemIsEditable | Qt.ItemIsEnabled
def changedCallback():
print("changed")
if __name__ == '__main__':
app = QApplication(sys.argv)
data = MyData()
table_view = QTableView()
my_model = MyModel(data)
my_model.dataChanged.connect(changedCallback)
my_model.rowsInserted.connect(changedCallback)
my_model.rowsRemoved.connect(changedCallback)
table_view.setModel(my_model)
table_view.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
| jeremiedecock/snippets | python/pyqt/pyqt5/widget_QTableView_edit_print_signal_when_data_changed.py | Python | mit | 2,784 | 0.002514 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import copy
import locale
import logging
import re
import reportlab
import openerp.tools as tools
from openerp.tools.safe_eval import safe_eval
from openerp.tools.misc import ustr
_logger = logging.getLogger(__name__)
_regex = re.compile('\[\[(.+?)\]\]')
def str2xml(s):
return (s or '').replace('&', '&').replace('<', '<').replace('>', '>')
def xml2str(s):
return (s or '').replace('&','&').replace('<','<').replace('>','>')
def _child_get(node, self=None, tagname=None):
for n in node:
if self and self.localcontext and n.get('rml_loop'):
for ctx in safe_eval(n.get('rml_loop'),{}, self.localcontext):
self.localcontext.update(ctx)
if (tagname is None) or (n.tag==tagname):
if n.get('rml_except', False):
try:
safe_eval(n.get('rml_except'), {}, self.localcontext)
except GeneratorExit:
continue
except Exception, e:
_logger.info('rml_except: "%s"', n.get('rml_except',''), exc_info=True)
continue
if n.get('rml_tag'):
try:
(tag,attr) = safe_eval(n.get('rml_tag'),{}, self.localcontext)
n2 = copy.deepcopy(n)
n2.tag = tag
n2.attrib.update(attr)
yield n2
except GeneratorExit:
yield n
except Exception, e:
_logger.info('rml_tag: "%s"', n.get('rml_tag',''), exc_info=True)
yield n
else:
yield n
continue
if self and self.localcontext and n.get('rml_except'):
try:
safe_eval(n.get('rml_except'), {}, self.localcontext)
except GeneratorExit:
continue
except Exception, e:
_logger.info('rml_except: "%s"', n.get('rml_except',''), exc_info=True)
continue
if self and self.localcontext and n.get('rml_tag'):
try:
(tag,attr) = safe_eval(n.get('rml_tag'),{}, self.localcontext)
n2 = copy.deepcopy(n)
n2.tag = tag
n2.attrib.update(attr or {})
yield n2
tagname = ''
except GeneratorExit:
pass
except Exception, e:
_logger.info('rml_tag: "%s"', n.get('rml_tag',''), exc_info=True)
pass
if (tagname is None) or (n.tag==tagname):
yield n
def _process_text(self, txt):
"""Translate ``txt`` according to the language in the local context,
replace dynamic ``[[expr]]`` with their real value, then escape
the result for XML.
:param str txt: original text to translate (must NOT be XML-escaped)
:return: translated text, with dynamic expressions evaluated and
with special XML characters escaped (``&,<,>``).
"""
if not self.localcontext:
return str2xml(txt)
if not txt:
return ''
result = ''
sps = _regex.split(txt)
while sps:
# This is a simple text to translate
to_translate = tools.ustr(sps.pop(0))
result += tools.ustr(self.localcontext.get('translate', lambda x:x)(to_translate))
if sps:
txt = None
try:
expr = sps.pop(0)
txt = safe_eval(expr, self.localcontext)
if txt and isinstance(txt, basestring):
txt = tools.ustr(txt)
except Exception:
_logger.info("Failed to evaluate expression [[ %s ]] with context %r while rendering report, ignored.", expr, self.localcontext)
if isinstance(txt, basestring):
result += txt
elif txt and (txt is not None) and (txt is not False):
result += ustr(txt)
return str2xml(result)
def text_get(node):
return ''.join([ustr(n.text) for n in node])
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
]
def unit_get(size):
global units
if size:
if size.find('.') == -1:
decimal_point = '.'
try:
decimal_point = locale.nl_langinfo(locale.RADIXCHAR)
except Exception:
decimal_point = locale.localeconv()['decimal_point']
size = size.replace(decimal_point, '.')
for unit in units:
res = unit[0].search(size, 0)
if res:
return unit[1]*float(res.group(1))
return False
def tuple_int_get(node, attr_name, default=None):
if not node.get(attr_name):
return default
return map(int, node.get(attr_name).split(','))
def bool_get(value):
return (str(value)=="1") or (value.lower()=='yes')
def attr_get(node, attrs, dict=None):
if dict is None:
dict = {}
res = {}
for name in attrs:
if node.get(name):
res[name] = unit_get(node.get(name))
for key in dict:
if node.get(key):
if dict[key]=='str':
res[key] = tools.ustr(node.get(key))
elif dict[key]=='bool':
res[key] = bool_get(node.get(key))
elif dict[key]=='int':
res[key] = int(node.get(key))
elif dict[key]=='unit':
res[key] = unit_get(node.get(key))
elif dict[key] == 'float' :
res[key] = float(node.get(key))
return res
| ayepezv/GAD_ERP | openerp/report/render/rml2pdf/utils.py | Python | gpl-3.0 | 6,163 | 0.008924 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('arm_settings', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='authsettings',
name='user_id',
field=models.ForeignKey(related_name='user_id', default=1, to=settings.AUTH_USER_MODEL, unique=True),
preserve_default=False,
),
]
| shwetams/arm-samples-py | arm_basic_samples/arm_settings/migrations/0002_authsettings_user_id.py | Python | mit | 600 | 0.001667 |
#!/usr/bin/env python
import sys
import argparse
def less(a, b):
return a < b
def greater(a, b):
return a > b
better = less
def dominated(x, y):
if len(x) != len(y):
print "Error: size mismatch!"
return None
dominated = False
for i,j in zip(x,y):
if better(i, j):
return False
if better(j, i):
dominated = True
return dominated
def dominates(x, y):
return dominated(y, x)
# Usage:
# echo '1,0,1[;0,0,0...]' | ./domination.py [-h] -t {less,greater} -a {dominated,dominates} '0,1,0[;1,1,1...]'
# Reading the input (accepts either '1,0,0;0,0,0;1,1,1' or '1,0,0;0,0,0\n1,1,1', for instance)
tmp = [i.split(';') for i in sys.stdin.read().splitlines()]
points_dataset = []
for i in tmp:
for j in i:
if len(j) == 0: continue
points_dataset.append([float(k) for k in j.split(',')])
#print points_dataset
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--type", required=True, choices=['less','greater'], help="Comparison type: less or greater")
parser.add_argument("-a", "--action", required=True, choices=['dominated','dominates'], help="Action type: dominated or dominates")
parser.add_argument("point", help="the point to compare against the dataset of points; format: 'x1,...,xN'")
args = parser.parse_args()
if args.type=='less':
better = less
elif args.type=='greater':
better = greater
if len(args.point.split(';')) > 1:
raise Exception("Only one point is accepted! For instance: domination.py '0,1,0'")
point = [float(i) for i in args.point.split(',')]
result = None
exit = 1 # Either the point does not dominate a single one or it isn't dominated by one of them
if args.action=='dominated':
for y in points_dataset:
result = dominated(point,y)
if result: exit = 0
print "Is", point, "dominated by", y, "? ->", result
elif args.action=='dominates':
for y in points_dataset:
result = dominates(point,y)
if result: exit = 0
print "Does", point, "dominate", y, "? ->", result
sys.exit(exit)
| daaugusto/ppi | script/domination-many.py | Python | gpl-3.0 | 2,043 | 0.026921 |
import sys; sys.path.append("../") # noqa
import unittest
import copy
import pymongo
import datetime
import bson
from baemo.connection import Connections
from baemo.delimited import DelimitedDict
from baemo.references import References
from baemo.projection import Projection
from baemo.entity import Entity
from baemo.exceptions import ModelTargetNotSet
from baemo.exceptions import ModelNotUpdated
from baemo.exceptions import ModelNotFound
from baemo.exceptions import ModelNotDeleted
from baemo.exceptions import ProjectionTypeMismatch
from baemo.exceptions import DereferenceError
class TestModel(unittest.TestCase):
def setUp(self):
global connection_name, collection_name, TestModel
connection_name = "baemo"
collection_name = "{}_{}".format(self.__class__.__name__, self._testMethodName)
connection = pymongo.MongoClient(connect=False)[connection_name]
Connections.set(connection_name, connection)
TestModel, TestCollection = Entity("TestModel", {
"connection": connection_name,
"collection": collection_name
})
def tearDown(self):
global connection_name, collection_name
Connections.get(connection_name).drop_collection(collection_name)
# __init__
def test___init____no_params(self):
m = TestModel()
self.assertEqual(m.id_attribute, "_id")
self.assertEqual(type(m.collection), str)
self.assertEqual(type(m.target), DelimitedDict)
self.assertEqual(type(m.attributes), DelimitedDict)
self.assertEqual(type(m.references), References)
self.assertEqual(type(m.find_projection), Projection)
self.assertEqual(type(m.get_projection), Projection)
self.assertEqual(m._delete, False)
self.assertEqual(type(m.original), DelimitedDict)
self.assertEqual(type(m.updates), DelimitedDict)
def test___init____dict_target_param(self):
m = TestModel({"k": "v"})
self.assertEqual(m.target.get(), {"k": "v"})
def test___init____target_param(self):
m = TestModel("value")
self.assertEqual(m.target.get(), {"_id": "value"})
# __copy__
def test___copy__(self):
m1 = TestModel({"k": "v"})
m2 = copy.copy(m1)
self.assertIsNot(m1, m2)
self.assertEqual(m1.attributes, m2.attributes)
m1.attributes["k"] = "bar"
self.assertEqual(m1.attributes, m2.attributes)
# __deepcopy__
def test___deepcopy__(self):
m1 = TestModel({"k": "v"})
m2 = copy.deepcopy(m1)
self.assertIsNot(m1, m2)
self.assertEqual(m1.attributes, m2.attributes)
m1.attributes["k"] = "bar"
self.assertNotEqual(m1.attributes, m2.attributes)
# __eq__
def test___eq____same_attributes__returns_True(self):
m1 = TestModel()
m1.attributes({"k": "v"})
m2 = TestModel()
m2.attributes({"k": "v"})
self.assertTrue(m1 == m2)
def test___eq____different_attributes__returns_False(self):
m1 = TestModel()
m1.attributes({"foo": "bar"})
m2 = TestModel()
m2.attributes({"baz": "qux"})
self.assertFalse(m1 == m2)
def test___eq____different_types__returns_False(self):
m1 = TestModel()
m1.attributes({"k": "v"})
m2 = object()
self.assertFalse(m1 == m2)
# __ne__
def test___ne____same_attributes__returns_False(self):
m1 = TestModel()
m1.attributes({"k": "v"})
m2 = TestModel()
m2.attributes({"k": "v"})
self.assertFalse(m1 != m2)
def test___ne____different_attributes__returns_True(self):
m1 = TestModel()
m1.attributes({"foo": "bar"})
m2 = TestModel()
m2.attributes({"baz": "qux"})
self.assertTrue(m1 != m2)
def test___ne____different_types__returns_True(self):
m1 = TestModel({"foo": "bar"})
m2 = object()
self.assertTrue(m1 != m2)
# set_target
def test_set_target__dict_param(self):
m = TestModel()
m.set_target({"k": "v"})
self.assertEqual(m.target.get(), {"k": "v"})
def test_set_target__string_param(self):
m = TestModel()
m.set_target("foo")
self.assertEqual(m.target.get(), {"_id": "foo"})
# get_target
def test_get_target__target_not_set__returns_None(self):
m = TestModel()
self.assertEqual(m.get_target(), None)
def test_get_target__target_set__returns_dict(self):
m = TestModel()
m.target = DelimitedDict({"k": "v"})
self.assertEqual(m.get_target(), {"k": "v"})
def test_get_id__id_not_set__returns_None(self):
m = TestModel()
self.assertEqual(m.get_id(), None)
def test_get_id__id_set__returns_id_type(self):
m = TestModel()
m.target = DelimitedDict({"_id": "foo"})
self.assertEqual(m.get_id(), "foo")
# find
def test_find(self):
original = TestModel()
original.attributes({"k": "v"})
original_id = original.save().get(original.id_attribute)
m = TestModel()
m.target({original.id_attribute: original_id})
m.find()
self.assertIn("k", m.attributes)
self.assertEqual(m.attributes["k"], "v")
def test_find__raises_ModelTargetNotSet(self):
m = TestModel()
with self.assertRaises(ModelTargetNotSet):
m.find()
def test_find__default_find_projection(self):
global connection_name, collection_name
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"find_projection": {
"k1": 0
}
})
original = TestModel()
original.attributes({"k1": "v", "k2": "v", "k3": "v"})
original_id = original.save().attributes[TestModel().id_attribute]
m = TestModel()
m.target({original.id_attribute: original_id})
m.find()
self.assertEqual(m.attributes.get(), {
TestModel.id_attribute: original_id,
"k2": "v",
"k3": "v"
})
def test_find__projection_param(self):
original = TestModel()
original.attributes({"k1": "v", "k2": "v", "k3": "v"})
original_id = original.save().attributes[TestModel.id_attribute]
m = TestModel()
m.target({original.id_attribute: original_id})
m.find(projection={"k1": 0})
self.assertEqual(m.attributes.get(), {
original.id_attribute: original_id,
"k2": "v",
"k3": "v"
})
def test_find__default_find_projection__projection_param(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"find_projection": {
"k1": 0
}
})
original = TestModel()
original.attributes({"k1": "v", "k2": "v", "k3": "v"})
original_id = original.save().attributes[TestModel.id_attribute]
m = TestModel()
m.target({original.id_attribute: original_id})
m.find(projection={"k3": 0}, default=True)
self.assertEqual(m.attributes.get(), {
original.id_attribute: original_id, "k2": "v"
})
self.tearDown()
def test_find__pre_find_hook(self):
class ModelAbstract(object):
def pre_find_hook(self):
self.target({"k": "v"})
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"find_projection": {
"k1": 0
},
"bases": ModelAbstract
})
m = TestModel()
m.target({"foo": "baz"})
try:
m.find()
except:
pass
self.assertEqual(m.target.get(), {"k": "v"})
def test_find__post_find_hook(self):
class ModelAbstract(object):
def post_find_hook(self):
self.target({"k": "v"})
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.set("foor", "bar")
m.save()
copy = TestModel(m.get("_id"))
copy.find()
self.assertEqual(copy.target.get(), {"k": "v"})
# ref
def test_ref__no_params(self):
value = "v"
m = TestModel()
m.attributes({"k": value})
self.assertIs(m.ref()["k"], value)
def test_ref__string_param(self):
value = "v"
m = TestModel()
m.attributes({"k": value})
self.assertIs(m.ref("k"), value)
def test_ref__delimited_string_param(self):
value = "v"
m = TestModel()
m.attributes({"k1": {"k2": {"k3": value}}})
self.assertIs(m.ref("k1.k2.k3"), value)
def test_ref__handle_dereference_error(self):
m = TestModel()
m.attributes({"k": DereferenceError()})
self.assertIsInstance(m.ref("k"), DereferenceError)
def test_ref__nested_entity(self):
value = "v"
child = TestModel()
child.attributes({"k": value})
parent = TestModel()
parent.attributes({"child": child})
self.assertIs(child.ref("k"), value)
self.assertIs(parent.ref("child.k"), value)
def test_ref__string_key_True_create_params__creates_missing_attributes(self):
m = TestModel()
m.ref("k", create=True)
self.assertEqual(m.attributes.get(), {"k": {}})
def test_ref__delimited_string_key_True_create_params__creates_missing_keys(self):
m = TestModel()
m.ref("k1.k2.k3", create=True)
self.assertEqual(m.attributes.get(), {
"k1": {
"k2": {
"k3": {}
}
}
})
# has
def test_has__string_param__key_exists__returns_True(self):
m = TestModel()
m.attributes({"k": "v"})
self.assertTrue(m.has("k"))
def test_has__string_param__key_does_not_exist__returns_False(self):
m = TestModel()
m.attributes({"k": "v"})
self.assertFalse(m.has("foo"))
def test_has__delimited_string_param__key_exists__returns_True(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": "v"}}})
self.assertTrue(m.has("k1.k2.k3"))
def test_has__delimited_string_param__key_does_not_exist__returns_False(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": "v"}}})
self.assertFalse(m.has("k1.k2.foo"))
def test_has__nested_entity__key_exists__returns_True(self):
child = TestModel()
child.attributes({"k": "v"})
parent = TestModel()
parent.attributes({"child": child})
self.assertTrue(parent.has("child.k"))
def test_has__nested_entity__key_does_not_exist__returns_False(self):
child = TestModel()
child.attributes({"k": "v"})
parent = TestModel()
parent.attributes({"child": child})
self.assertFalse(parent.has("child.foo"))
def test_has__delimited_string_param__dereference_error__returns_False(self):
m = TestModel()
m.attributes({"k": DereferenceError()})
self.assertFalse(m.has("k.foo"))
# get
def test_get__no_params__returns_value_copy(self):
value = {"k1": "v", "k2": "v", "k3": "v"}
m = TestModel()
m.attributes(value)
self.assertEqual(m.get(), {"k1": "v", "k2": "v", "k3": "v"})
self.assertIsNot(m.get(), value)
def test_get__exclusive_default_get_projection(self):
TestModel, TestCollection = Entity("Test", {
"get_projection": {
"k1": 0
}
})
m = TestModel()
m.attributes({"k1": "v", "k2": "v"})
self.assertEqual(m.get(), {
"k2": "v"
})
def test_get__exclusive_default_get_projection__projection_param(self):
TestModel, TestCollection = Entity("Test", {
"get_projection": {
"k1": 0
}
})
m = TestModel()
m.attributes({"k1": "v", "k2": "v"})
self.assertEqual(m.get(projection={"k3": 0}), {
"k2": "v"
})
def test_get__inclusive_default_get_projection(self):
TestModel, TestCollection = Entity("Test", {
"get_projection": {
"k1": 1
}
})
m = TestModel()
m.attributes({"k1": "v", "k2": "v"})
self.assertEqual(m.get(), {
"k1": "v"
})
def test_get__inclusive_default_get_projection__projection_param(self):
TestModel, TestCollection = Entity("Test", {
"get_projection": {
"k1": 1
}
})
m = TestModel()
m.attributes({"k1": "v", "k2": "v", "k3": "v"})
self.assertEqual(m.get(projection={"k3": 1}), {
"k1": "v",
"k3": "v"
})
def test_get__nested_projection_param(self):
m = TestModel()
m.attributes({
"k1": {
"k2": "v",
"k3": "v",
"k4": "v"
}
})
self.assertEqual(m.get(projection={"k1": {"k2": 0}}), {
"k1": {
"k3": "v",
"k4": "v"
}
})
def test_get__string_param__returns_value(self):
m = TestModel()
m.attributes({"k": "v"})
self.assertEqual(m.get("k"), "v")
def test_get__string_and_default_params__returns_default_value(self):
m = TestModel()
self.assertEqual(m.get("k", "Default"), "Default")
def test_get__delimited_string__returns_value(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": "v"}}})
self.assertEqual(m.get("k1.k2.k3"), "v")
def test_get__delimited_string__projection_param(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": {"k4": "v", "k5": "v", "k6": "v"}}}})
self.assertEqual(
m.get("k1.k2.k3", projection={"k1.k2.k3.k5": 0}),
{"k4": "v", "k6": "v"}
)
def test_get__nested_entity__no_params(self):
child = TestModel()
child.attributes({"k": "v"})
parent = TestModel()
parent.attributes({"child": child})
self.assertEqual(parent.get(), {"child": {"k": "v"}})
def test_get__nested_entity__delimited_string_param(self):
child = TestModel()
child.attributes({"k": "v"})
parent = TestModel()
parent.attributes({"child": child})
self.assertEqual(parent.get("child.k"), "v")
def test_get__nested_entity__delimited_string_and_projection_params(self):
child = TestModel()
child.attributes({"k1": "v", "k2": "v", "k3": "v"})
parent = TestModel()
parent.attributes({"child": child})
self.assertEqual(parent.get("child", projection={"child.k1": 0}), {
"k2": "v",
"k3": "v"
})
def test_get__DereferenceError(self):
child = TestModel()
child.attributes({"k": DereferenceError(data={"k": "v"})})
parent = TestModel()
parent.attributes({"child": child})
self.assertEqual(parent.get("child.k"), {
"message": "Dereference error",
"data": {
"k": "v"
}
})
# generate_id
def test_generate_id(self):
self.assertIsInstance(TestModel().generate_id(), TestModel.id_type)
# set
def test_set__string_param(self):
m = TestModel()
m.set("k", "v")
self.assertEqual(m.attributes.get(), {"k": "v"})
def test_set__dict_param(self):
m = TestModel()
m.set({"k": "v"})
self.assertEqual(m.attributes.get(), {"k": "v"})
def test_set__nested_dict_param(self):
m = TestModel()
m.set("k", {"foo": {"bar": {"baz": "qux"}}})
self.assertEqual(m.attributes.get(), {"k": {"foo": {"bar": {"baz": "qux"}}}})
def test_set__delimited_string_param(self):
m = TestModel()
m.set("k1.k2.k3", "v")
self.assertEqual(m.attributes.get(), {"k1": {"k2": {"k3": "v"}}})
def test_set__nested_entity(self):
child = TestModel()
child.attributes({"k": "v"})
parent = TestModel()
parent.attributes({"child": child})
parent.set("child.k", "foo")
self.assertEqual(child.attributes.get(), {"k": "foo"})
def test_set__DereferenceError(self):
m = TestModel()
m.attributes({"k": DereferenceError()})
m.set("k", "v")
self.assertEqual(m.get(), {"k": "v"})
def test_set__False_create_param__raises_KeyError(self):
m = TestModel()
with self.assertRaises(KeyError):
m.set("k", "v", create=False)
def test_set__False_create_Eparam_raises_TypeError(self):
m = TestModel()
m.attributes({"k1": {"k2": "v"}})
with self.assertRaises(TypeError):
m.set("k1.k2.k3", "v", create=False)
def test_set__False_create_param_DereferenceError__raisesTypeError(self):
m = TestModel()
m.attributes({"k1": DereferenceError()})
with self.assertRaises(TypeError):
m.set("k1.k2", "v", create=False)
# unset
def test_unset__string_param(self):
m = TestModel()
m.attributes({"k1": "v", "k2": "v", "k3": "v"})
m.unset("k1")
self.assertEqual(m.attributes.get(), {"k2": "v", "k3": "v"})
def test_unset__delimited_string_param(self):
m = TestModel()
m.attributes({"k1": {"k2": "v"}, "k3": "v"})
m.unset("k1.k2")
self.assertEqual(m.attributes.get(), {"k1": {}, "k3": "v"})
def test_unset__nested_entity(self):
child = TestModel()
child.attributes({"k1": "v", "k2": "v", "k3": "v"})
parent = TestModel()
parent.attributes({"child": child})
parent.unset("child.k2")
self.assertEqual(child.attributes.get(), {"k1": "v", "k3": "v"})
def test_unset__True_force_param__no_exception_raised(self):
m = TestModel()
m.original({"k": "v"})
try:
m.unset("k", force=True)
except Exception:
self.fail("exception raised")
def test_unset__string_param__raises_KeyError(self):
m = TestModel()
m.original({"k": "v"}) # force state
with self.assertRaises(KeyError):
m.unset("k")
def test_unset__delimited_string_param__raises_TypeError(self):
m = TestModel()
m.attributes({"k1": "v"})
m.original({"k1": "v"}) # force state
with self.assertRaises(TypeError):
m.unset("k1.k2.k3")
def test_unset__DereferenceError__raises_TypeError(self):
m = TestModel()
m.attributes({"k1": DereferenceError()})
m.original({"k1": "v"}) # force state
with self.assertRaises(TypeError):
m.unset("k1.k2.k3")
# unset_many
def test_unset_many(self):
m = TestModel()
m.attributes({"k1": "v", "k2": "v", "k3": "v"})
m.unset_many(["k1", "k2"])
self.assertEqual(m.attributes.get(), {"k3": "v"})
# push
def test_push(self):
m = TestModel()
m.attributes({"k": []})
m.push("k", "v")
self.assertEqual(m.get(), {"k": ["v"]})
def test_push__create_container(self):
m = TestModel()
m.push("k", "v")
self.assertEqual(m.get(), {"k": ["v"]})
def test_push__handle_existing_values(self):
m = TestModel()
m.attributes({"k": "foo"})
m.push("k", "bar")
self.assertEqual(m.get(), {"k": ["foo", "bar"]})
def test_push__dot_notation_string_param(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": ["foo"]}}})
m.push("k1.k2.k3", "bar")
self.assertEqual(m.attributes.get(), {"k1": {"k2": {"k3": ["foo", "bar"]}}})
def test_push__False_create_param__raises_KeyError(self):
m = TestModel()
with self.assertRaises(KeyError):
m.push("k1.k2.k3", "v", create=False)
def test_push__False_create_param__raises_TypeError(self):
m = TestModel()
m.attributes({"k1": {"k2": "v"}})
with self.assertRaises(TypeError):
m.push("k1.k2.k3", "v", create=False)
def test_push__nested_entity(self):
child = TestModel()
child.attributes({"k": ["foo"]})
parent = TestModel()
parent.attributes({"child": child})
parent.push("child.k", "bar")
self.assertEqual(child.attributes.get(), {"k": ["foo", "bar"]})
# push_many
def test_push_many(self):
m = TestModel()
m.push_many("k", ["v1", "v2", "v3"])
self.assertEqual(m.attributes.get(), {"k": ["v1", "v2", "v3"]})
# pull
def test_pull__string_param(self):
m = TestModel()
m.attributes({"k": ["v1", "v2", "v3"]})
m.pull("k", "v2")
self.assertEqual(m.attributes.get(), {"k": ["v1", "v3"]})
def test_pull__delimited_string_param(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": ["foo", "bar"]}}})
m.pull("k1.k2.k3", "foo")
self.assertEqual(m.attributes.get(), {"k1": {"k2": {"k3": ["bar"]}}})
def test_pull__missing_key__raises_KeyError(self):
m = TestModel()
m.attributes({"foo": "bar"})
m.original(m.attributes)
with self.assertRaises(KeyError):
m.pull("k", "v")
def test_pull__True_force_param__KeyError_not_raised(self):
m = TestModel()
m.attributes({"foo": "bar"})
m.original(m.attributes) # force state
try:
m.pull("k", "v", force=True)
except Exception:
self.fail("exception raised")
def test_pull__incorrect_type__raises_TypeError(self):
m = TestModel()
m.attributes({"k": "v"})
m.original(m.attributes) # force state
with self.assertRaises(TypeError):
m.pull("k", "v")
def test_pull__delimited_string_param__incorrect_type__raises_TypeError(self):
m = TestModel()
m.attributes({"k1": {"k2": "v"}})
m.original(m.attributes) # force state
with self.assertRaises(TypeError):
m.pull("k1.k2.k3", "v")
def test_pull__True_force_param__TypeError_not_raised(self):
m = TestModel()
m.attributes({"k": "v"})
try:
m.pull("k", "v", force=True)
except Exception:
self.fail("exception raised")
def test_pull__missing_value__raises_ValueError(self):
m = TestModel()
m.attributes({"k": ["foo"]})
m.original(m.attributes) # force state
with self.assertRaises(ValueError):
m.pull("k", "bar")
def test_pull__True_force_param__ValueError_not_raised(self):
m = TestModel()
m.attributes({"k": ["foo"]})
try:
m.pull("k", "bar", force=True)
except Exception:
self.fail("exception raised")
def test_pull__nested_entity(self):
child = TestModel()
child.attributes({"k": ["foo", "bar"]})
parent = TestModel()
parent.attributes({"child": child})
parent.pull("child.k", "bar")
self.assertEqual(child.attributes.get(), {"k": ["foo"]})
# pull_many
def test_pull_many(self):
m = TestModel()
m.attributes({"k": ["v1", "v2", "v3"]})
m.pull_many("k", ["v1", "v3"])
self.assertEqual(m.attributes.get(), {"k": ["v2"]})
# delete
def test_delete(self):
m = TestModel()
self.assertFalse(m._delete)
m.delete()
self.assertTrue(m._delete)
def test_delete__cascade(self):
p = TestModel()
c = TestModel()
p.set("c", c)
self.assertFalse(p._delete)
self.assertFalse(c._delete)
p.delete(cascade=True)
self.assertTrue(p._delete)
self.assertTrue(c._delete)
# reset
def test_reset(self):
m = TestModel()
m.attributes({"k": "v"})
m.target({"k": "v"})
m.delete()
self.assertEqual(bool(m.target), True)
self.assertEqual(bool(m.attributes), True)
self.assertEqual(m._delete, True)
m.reset()
self.assertEqual(bool(m.target), False)
self.assertEqual(bool(m.updates), False)
self.assertEqual(bool(m.attributes), False)
self.assertEqual(m._delete, False)
# record_update
def test_record_update__set__string(self):
m = TestModel()
self.assertEqual(m.original.get(), {})
m.set("k", "v")
self.assertEqual(m.updates.get(), {"$set": {"k": "v"}})
def test_record_update__set__delimited_string(self):
m = TestModel()
m.set("k1.k2.k3", "v")
self.assertEqual(m.updates.get(), {
"$set": {"k1": {"k2": {"k3": "v"}}}
})
def test_record_update__set__False_record_param(self):
m = TestModel()
m.set("k", "v", record=False)
self.assertEqual(m.attributes.get(), {"k": "v"})
self.assertEqual(m.updates.get(), {})
def test_record_update__set__original_not_set(self):
m = TestModel()
self.assertEqual(m.original.get(), {})
m.set("k", "v")
self.assertEqual(m.updates.get(), {"$set": {"k": "v"}})
def test_record_update__set__original_set(self):
m = TestModel()
m.original({"k": "v"})
self.assertEqual(m.original.get(), {"k": "v"})
m.set("k", "v")
self.assertEqual(m.updates.get(), {})
m.set("k", "foo")
self.assertEqual(m.updates.get(), {"$set": {"k": "foo"}})
m.set("k", "v")
self.assertEqual(m.updates.get(), {})
def test_record_update__unset__string(self):
m = TestModel()
m.attributes({"k": "v"})
m.unset("k")
self.assertEqual(m.updates.get(), {"$unset": {"k": ""}})
def test_record_update__unset__delimited_string(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": "v"}}})
m.unset("k1.k2.k3")
self.assertEqual(m.updates.get(), {
"$unset": {"k1": {"k2": {"k3": ""}}}
})
def test_record_update__unset__False_record_param(self):
m = TestModel()
m.attributes({"k": "v"})
m.unset("k", record=False)
self.assertEqual(m.attributes.get(), {})
self.assertEqual(m.updates.get(), {})
def test_record_update__unset__original_not_set(self):
m = TestModel()
m.unset("k")
self.assertEqual(m.updates.get(), {"$unset": {"k": ""}})
def test_record_update__unset__original_set(self):
m = TestModel()
m.attributes({"k": "v"})
m.original(m.attributes)
with self.assertRaises(KeyError):
m.unset("foo")
def test_record_update__unset__True_force_param(self):
m = TestModel()
m.attributes({"k": "v"})
m.unset("foo", force=True)
self.assertEqual(m.updates.get(), {"$unset": {"foo": ""}})
def test_record_update__push__string(self):
m = TestModel()
m.push("k", "v")
self.assertEqual(m.updates.get(), {"$push": {"k": "v"}})
def test_record_update__push__dict(self):
m = TestModel()
m.set("k", [{"k": "v"}])
m.save()
m.push("k", {"k": "v"})
self.assertEqual(m.updates.get(), {"$push": {"k": {"k": "v"}}})
def test_record_update__push__delimited_string(self):
m = TestModel()
m.push("k1.k2.k3", "v")
self.assertEqual(m.updates.get(), {
"$push": {"k1": {"k2": {"k3": "v"}}}
})
def test_record_update__push__False_record_param(self):
m = TestModel()
m.push("k", "v", record=False)
self.assertEqual(m.attributes.get(), {"k": ["v"]})
self.assertEqual(m.updates.get(), {})
def test_record_update__push__set_iterator(self):
m = TestModel()
m.push("k", "v1")
self.assertEqual(m.updates.get(), {"$push": {"k": "v1"}})
m.push("k", "v2")
self.assertEqual(m.updates.get(), {
"$push": {"k": {"$each": ["v1", "v2"]}}
})
def test_record_update__push__intersect_pull(self):
m = TestModel()
m.attributes({"k": ["v1", "v2", "v3"]})
m.pull("k", "v1")
m.pull("k", "v2")
m.pull("k", "v3")
self.assertEqual(m.updates.get(), {
"$pull": {"k": {"$in": ["v1", "v2", "v3"]}}
})
m.push("k", "v2")
self.assertEqual(m.updates.get(), {
"$push": {"k": "v2"},
"$pull": {"k": {"$in": ["v1", "v3"]}}
})
def test_record_update__push__intersect_pull_remove_iterator(self):
m = TestModel()
m.attributes({"k": ["v1", "v2"]})
m.pull("k", "v1")
m.pull("k", "v2")
self.assertEqual(m.updates.get(), {
"$pull": {"k": {"$in": ["v1", "v2"]}}
})
m.push("k", "v2")
self.assertEqual(m.updates.get(), {
"$push": {"k": "v2"},
"$pull": {"k": "v1"}
})
def test_record_update__push__intersect_pull_remove_operator(self):
m = TestModel()
m.attributes({"k": ["v1"]})
m.pull("k", "v1")
self.assertEqual(m.updates.get(), {"$pull": {"k": "v1"}})
m.push("k", "v1")
self.assertEqual(m.updates.get(), {"$push": {"k": "v1"}})
def test_record_update__pull__string(self):
m = TestModel()
m.attributes({"k": ["v"]})
m.pull("k", "v")
self.assertEqual(m.updates.get(), {"$pull": {"k": "v"}})
def test_record_update__pull__delimited_string(self):
m = TestModel()
m.attributes({"k1": {"k2": {"k3": ["v"]}}})
m.pull("k1.k2.k3", "v")
self.assertEqual(m.updates.get(), {
"$pull": {"k1": {"k2": {"k3": "v"}}}
})
def test_record_update__pull__False_record_param(self):
m = TestModel()
m.attributes({"k": ["v"]})
m.pull("k", "v", record=False)
self.assertEqual(m.attributes.get(), {"k": []})
self.assertEqual(m.updates.get(), {})
def test_record_update__pull__set_iterator(self):
m = TestModel()
m.push("k", "v1")
self.assertEqual(m.updates.get(), {"$push": {"k": "v1"}})
m.push("k", "v2")
self.assertEqual(m.updates.get(), {
"$push": {"k": {"$each": ["v1", "v2"]}}
})
def test_record_update__pull__intersect_push(self):
m = TestModel()
m.attributes({"k": ["v1", "v2", "v3"]})
m.pull("k", "v1")
m.pull("k", "v2")
m.pull("k", "v3")
self.assertEqual(m.updates.get(), {
"$pull": {"k": {"$in": ["v1", "v2", "v3"]}}
})
m.push("k", "v2")
self.assertEqual(m.updates.get(), {
"$push": {"k": "v2"},
"$pull": {"k": {"$in": ["v1", "v3"]}}
})
def test_record_update__pull__intersect_push_remove_iterator(self):
m = TestModel()
m.attributes({"k": ["v1", "v2"]})
m.pull("k", "v1")
m.pull("k", "v2")
self.assertEqual(m.updates.get(), {
"$pull": {"k": {"$in": ["v1", "v2"]}}
})
m.push("k", "v2")
self.assertEqual(m.updates.get(), {
"$push": {"k": "v2"},
"$pull": {"k": "v1"}
})
def test_record_update__pull__inersect_push_remove_operator(self):
m = TestModel()
m.attributes({"k": ["v1"]})
m.pull("k", "v1")
self.assertEqual(m.updates.get(), {"$pull": {"k": "v1"}})
m.push("k", "v1")
self.assertEqual(m.updates.get(), {"$push": {"k": "v1"}})
# save
def test_save__insert(self):
m = TestModel()
m.attributes({"k": "v"})
m.save()
find_result = Connections.get(
m.connection,
m.collection
).find_one()
self.assertEqual(find_result, m.attributes.get())
def test_save__insert__protected_post_insert_hook(self):
m = TestModel()
m.attributes({"k": "v"})
m.save()
self.assertIn(m.id_attribute, m.attributes)
self.assertEqual(
type(m.attributes[m.id_attribute]),
bson.objectid.ObjectId
)
self.assertEqual(m.attributes, m.original)
self.assertEqual(m.updates.get(), {})
self.assertEqual(
{m.id_attribute: m.attributes[m.id_attribute]},
m.target.get()
)
def test_save__insert__target_set__updates_empty(self):
m = TestModel()
m.attributes({"k": "v"})
m.save()
self.assertEqual(
{m.id_attribute: m.attributes[m.id_attribute]},
m.target.get()
)
self.assertEqual(m.updates.get(), {})
try:
m.save()
except Exception:
self.fail("exception raised")
def test_save__update(self):
original = TestModel()
original.attributes({"k1": "v"})
original_id = original.save().get(original.id_attribute)
m = TestModel()
m.set_target(original_id)
m.find()
m.set("k2", "v")
m.save()
copy = TestModel()
copy.set_target(original_id)
copy.find()
self.assertEqual(m.attributes, copy.attributes)
def test_save__update__push_pull_iterators(self):
m = TestModel()
m.save()
m.set("k1.k2.k3", "v")
m.push_many("k", ["v", "v", "v"])
m.save()
copy = TestModel(m.get_target()).find()
self.assertEqual(m.attributes, copy.attributes)
def test_save__update__without_find(self):
original = TestModel()
original.set("k1", "v")
original_id = original.save().get(original.id_attribute)
m = TestModel()
m.set_target(original_id)
m.set("k2", "v")
m.save()
copy = TestModel(original_id)
copy.find()
self.assertEqual(copy.get(), {
original.id_attribute: original_id,
"k1": "v",
"k2": "v"
})
def test_save__update__protected_post_update_hook(self):
m = TestModel()
m.set("k", "v")
self.assertEqual(m.target.get(), {})
self.assertEqual(m.original.get(), {})
self.assertEqual(m.updates.get(), {"$set": {"k": "v"}})
m.save()
self.assertEqual(
type(m.target[m.id_attribute]),
bson.objectid.ObjectId
)
self.assertEqual(
m.original.get(),
{
m.id_attribute: m.attributes[m.id_attribute],
"k": "v"
}
)
self.assertEqual(m.updates.get(), {})
def test_save__update__raises_ModelNotUpdated(self):
m = TestModel()
m.set_target(bson.objectid.ObjectId())
m.set("k", "v")
m.original({"k": "v"})
with self.assertRaises(ModelNotUpdated):
m.save()
def test_save__delete(self):
m = TestModel()
m.save()
m.delete()
m.save()
copy = TestModel()
copy.set_target(m.get_target())
with self.assertRaises(ModelNotFound):
copy.find()
def test_save__delete__raises_ModelNotDeleted(self):
m = TestModel()
m.set("k", "v")
m.save()
m.delete()
self.assertEqual(m._delete, True)
m.save()
copy = TestModel()
copy.set_target(m.get_target())
copy.delete()
with self.assertRaises(ModelNotDeleted):
copy.save()
def test_save__delete__raises_ModelTargetNotSet(self):
m = TestModel()
m.save()
m.target = {}
m.delete()
with self.assertRaises(ModelTargetNotSet):
m.save()
def test_save__insert__pre_insert_hook(self):
global connection_name, collection_name
class ModelAbstract(object):
def pre_insert_hook(self):
self.set("created", datetime.datetime.today())
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.save()
self.assertIn("created", m.attributes.get())
self.assertEqual(datetime.datetime, type(m.attributes["created"]))
def test_save__insert__post_insert_hook(self):
global connection_name, collection_name
class ModelAbstract(object):
def post_insert_hook(self):
self.set_baz()
def set_baz(self):
baz = "{} {}".format(self.get("foo"), self.get("bar"))
self.set("baz", baz)
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.set("foo", "Foo")
m.set("bar", "Bar")
m.save()
self.assertEqual(m.get("baz"), "Foo Bar")
def test_save__update__pre_update_hook(self):
global connection_name, collection_name
class ModelAbstract(object):
def pre_update_hook(self):
self.set("updated", datetime.datetime.today())
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.save()
self.assertNotIn("updated", m.attributes)
m.set("k", "v")
m.save()
self.assertIn("updated", m.attributes)
self.assertEqual(datetime.datetime, type(m.attributes["updated"]))
def test_save__update__post_update_hook(self):
global connection_name, collection_name
class ModelAbstract(object):
def post_update_hook(self):
self.set_baz()
def set_baz(self):
baz = "{} {}".format(self.get("foo"), self.get("bar"))
self.set("baz", baz)
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.save()
self.assertNotIn("baz", m.attributes)
m.set("foo", "Foo")
m.set("bar", "Bar")
m.save()
self.assertEqual(m.get("baz"), "Foo Bar")
def test_save__delete__post_delete_hook(self):
global connection_name, collection_name
class ModelAbstract(object):
def pre_delete_hook(self):
m = TestModel()
m.set_target(self.get_target())
m.find()
self.set("d", m.get())
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.set("k", "v")
m.save()
self.assertNotIn("d", m.attributes)
m.delete()
m.save()
self.assertIn("d", m.attributes)
self.assertEqual(m.get("d"), {
m.id_attribute: m.attributes[m.id_attribute],
"k": "v"
})
copy = TestModel(m.attributes[m.id_attribute])
with self.assertRaises(ModelNotFound):
copy.find()
def test_save__deleete__post_delete_hook(self):
global connection_name, collection_name
class ModelAbstract(object):
def post_delete_hook(self):
m = TestModel()
m.set(m.id_attribute, self.get_id())
m.save()
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"bases": ModelAbstract
})
m = TestModel()
m.set("k1", "v")
m.set("k2", "v")
m.set("k3", "v")
m.save()
m.delete()
m.save()
copy = TestModel(m.get(m.id_attribute))
copy.find()
self.assertEqual(copy.attributes.get(), {
copy.id_attribute: m.get(copy.id_attribute)
})
# dereference_entities
def test_dereference_entities__local_one(self):
global connection_name, collection_name
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r": {
"entity": "Test",
"type": "local_one",
"foreign_key": "_id"
}
}
})
original = TestModel()
original.set("k", "v")
original.save()
m = TestModel()
m.set("k", "v")
m.set("r", original.get_id())
m.save()
copy = TestModel(m.get_id()).find(projection={"r": 2})
self.assertEqual(type(copy.attributes["r"]), TestModel)
self.assertEqual(copy.get("r._id"), original.get("_id"))
def test_dereference_entities__local_one__with_projection(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r": {
"entity": "Test",
"type": "local_one",
"foreign_key": "_id"
}
}
})
original = TestModel()
original.set("k", "v")
original.save()
m = TestModel()
m.set("k", "v")
m.set("r", original.get_id())
m.save()
copy = TestModel(m.get_id()).find(projection={"r": {"k": 0}})
self.assertEqual(type(copy.attributes["r"]), TestModel)
self.assertEqual(copy.get("r._id"), original.get("_id"))
self.assertEqual(copy.get("r"), {"_id": original.get("_id")})
def test_dereference_entities__local_one__delimited_string_key(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r1.r2.r3": {
"entity": "Test",
"type": "local_one",
}
}
})
original = TestModel()
original.set("k", "v")
original.save()
m = TestModel()
m.set("k", "v")
m.set("r1.r2.r3", original.get_id())
m.save()
copy = TestModel(m.get_id()).find(projection={"r1.r2.r3": 2})
self.assertEqual(type(copy.attributes["r1.r2.r3"]), TestModel)
self.assertEqual(copy.get("r1.r2.r3._id"), original.get("_id"))
# def test_dereference_entities__many_to_one_local(self):
# TestModel, TestCollection = Entity("Test", {
# "connection": connection_name,
# "collection": collection_name,
# "references": {
# "r": {
# "entity": "Test",
# "type": "many_to_one",
# }
# }
# })
#
# original = TestModel()
# original.set("k", "v")
# original.save()
#
# m = TestModel()
# m.set("k", "v")
# m.set("r", original.get_id())
# m.save()
#
# copy = TestModel(m.get_id()).find(projection={"r": 2})
#
# self.assertEqual(type(copy.attributes["r"]), TestModel)
# self.assertEqual(copy.get("r._id"), original.get("_id"))
def test_dereference_entities__foreign_one(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r": {
"entity": "Test",
"type": "foreign_one",
"foreign_key": "r"
}
}
})
original = TestModel()
original.set("k", "v")
original.save()
m = TestModel()
m.set("k", "v")
m.set("r", original.get_id())
m.save()
copy = TestModel(original.get_id()).find(projection={"r": 2})
self.assertEqual(type(copy.attributes["r"]), TestModel)
self.assertEqual(copy.get("r._id"), m.get("_id"))
# def test_dereference_entities__one_to_one_foreign(self):
# TestModel, TestCollction = Entity("Test", {
# "connection": connection_name,
# "collection": collection_name,
# "references": {
# "r": {
# "entity": "Test",
# "type": "one_to_one",
# "foreign_key": "r"
# }
# }
# })
#
# original = TestModel()
# original.set("k", "v")
# original.save()
#
# m = TestModel()
# m.set("k1", "v")
# m.set("k2", "v")
# m.set("k3", "v")
# m.set("r", original.get_id())
# m.save()
#
# copy = TestModel(original.get_id()).find(projection={
# "r": {"k2": 1}
# })
#
# # assert resolved relationship
# self.assertEqual(type(copy.attributes["r"]), TestModel)
# self.assertEqual(copy.get("r._id"), m.get("_id"))
# self.assertEqual(copy.get("r"), {
# "_id": m.get("_id"),
# "k2": "v"
# })
#
# def test_dereference_entities__many_to_one_foreign(self):
# TestModel, TestCollection = Entity("Test", {
# "connection": connection_name,
# "collection": collection_name,
# "references": {
# "r": {
# "entity": "Test",
# "type": "many_to_one",
# "foreign_key": "r"
# }
# }
# })
#
# original = TestModel()
# original.set("k", "v")
# original.save()
#
# m = TestModel()
# m.set("k", "v")
# m.set("r", original.get_id())
# m.save()
#
# copy = TestModel(original.get_id()).find(projection={"r": 2})
#
# self.assertEqual(type(copy.attributes["r"]), TestModel)
# self.assertEqual(copy.get("r._id"), m.get("_id"))
def test_dereference_entities__local_one__returns_DereferenceError(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r": {
"entity": "Test",
"type": "local_one",
}
}
})
original = TestModel()
original.save()
m = TestModel()
m.set("r", original.get_id())
m.save()
original.delete()
original.save()
copy = TestModel(m.get_id()).find(projection={"r": 2})
self.assertEqual(
type(copy.attributes["r"]),
DereferenceError
)
def test_dereference_entities__foreign_one__returns_DereferenceError(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r": {
"entity": "Test",
"type": "foreign_one",
"foreign_key": "r"
}
}
})
original = TestModel()
original.save()
m = TestModel(original.get("_id"))
m.find(projection={"r": 2})
self.assertEqual(m.get("r"), None)
# reference entities
def test_reference_entities(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"r": {
"entity": "Test",
"type": "local_one",
}
}
})
child = TestModel()
child.save()
parent = TestModel()
parent.set("r", child)
parent.save()
copy = TestModel(parent.get_id()).find()
self.assertEqual(type(copy.attributes["r"]), bson.objectid.ObjectId)
self.assertEqual(copy.get("r"), child.get(child.id_attribute))
def test_reference_entities__foreign_key(self):
TestModel, TestCollection = Entity("Test", {
"connection": connection_name,
"collection": collection_name,
"references": {
"foo": {
"entity": "Test",
"type": "local_one",
"foreign_key": "bar"
}
}
})
child = TestModel()
child.set("bar", "something")
child.save()
parent = TestModel()
parent.set("foo", child)
parent.save()
if __name__ == "__main__":
unittest.main()
| chrisantonellis/pymongo_basemodel | test/test_model.py | Python | mit | 49,483 | 0.000364 |
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import uuid
from buildbot.changes.filter import ChangeFilter
from buildbot.scheduler import Dependent
from buildbot.scheduler import Nightly
from buildbot.scheduler import Periodic
from buildbot.schedulers.basic import SingleBranchScheduler as Scheduler
from buildbot.scheduler import Triggerable
from master import slaves_list
from master.url_poller import URLPoller
def GetGSUtilUrl(gs_bucket, root_folder):
return 'gs://%s/%s' % (gs_bucket, root_folder)
class Helper(object):
def __init__(self, defaults):
self._defaults = defaults
self._builders = []
self._factories = {}
self._schedulers = {}
def Builder(self, name, factory, gatekeeper=None, scheduler=None,
builddir=None, auto_reboot=True, notify_on_missing=False,
slavebuilddir=None, category=None):
category = category or self._defaults.get('category')
self._builders.append({'name': name,
'factory': factory,
'gatekeeper': gatekeeper,
'schedulers': scheduler.split('|'),
'builddir': builddir,
'category': category,
'auto_reboot': auto_reboot,
'notify_on_missing': notify_on_missing,
'slavebuilddir': slavebuilddir})
def Hourly(self, name, branch, hour='*'):
"""Helper method for the Nightly scheduler."""
if name in self._schedulers:
raise ValueError('Scheduler %s already exists' % name)
self._schedulers[name] = {'type': 'Nightly',
'builders': [],
'branch': branch,
'hour': hour}
def Periodic(self, name, periodicBuildTimer):
"""Helper method for the Periodic scheduler."""
if name in self._schedulers:
raise ValueError('Scheduler %s already exists' % name)
self._schedulers[name] = {'type': 'Periodic',
'builders': [],
'periodicBuildTimer': periodicBuildTimer}
def Dependent(self, name, parent):
if name in self._schedulers:
raise ValueError('Scheduler %s already exists' % name)
self._schedulers[name] = {'type': 'Dependent',
'parent': parent,
'builders': []}
def Triggerable(self, name):
if name in self._schedulers:
raise ValueError('Scheduler %s already exists' % name)
self._schedulers[name] = {'type': 'Triggerable',
'builders': []}
def Factory(self, name, factory):
if name in self._factories:
raise ValueError('Factory %s already exists' % name)
self._factories[name] = factory
def Scheduler(self, name, branch, treeStableTimer=60, categories=None):
if name in self._schedulers:
raise ValueError('Scheduler %s already exists' % name)
self._schedulers[name] = {'type': 'Scheduler',
'branch': branch,
'treeStableTimer': treeStableTimer,
'builders': [],
'categories': categories}
def URLScheduler(self, name, url, pollInterval=300, include_revision=False):
self._schedulers[name] = {'type': 'URLScheduler',
'url': url,
'builders': [],
'include_revision': include_revision,
'pollInterval': pollInterval}
def Update(self, c):
global_schedulers = dict((s.name, s) for s in c['schedulers']
if s.name.startswith('global_'))
assert not set(global_schedulers) & set(self._schedulers)
for builder in self._builders:
# Update the schedulers with the builder.
schedulers = builder['schedulers']
if schedulers:
for scheduler in schedulers:
if scheduler in global_schedulers:
global_schedulers[scheduler].builderNames.append(builder['name'])
else:
self._schedulers[scheduler]['builders'].append(builder['name'])
# Construct the category.
categories = []
if builder.get('category', None):
categories.append(builder['category'])
if builder.get('gatekeeper', None):
categories.extend(builder['gatekeeper'].split('|'))
category = '|'.join(categories)
# Append the builder to the list.
new_builder = {'name': builder['name'],
'factory': self._factories[builder['factory']],
'category': category,
'auto_reboot': builder['auto_reboot']}
if builder['builddir']:
new_builder['builddir'] = builder['builddir']
if builder['slavebuilddir']:
new_builder['slavebuilddir'] = builder['slavebuilddir']
c['builders'].append(new_builder)
# Process the main schedulers.
for s_name in self._schedulers:
scheduler = self._schedulers[s_name]
if scheduler['type'] == 'Scheduler':
instance = Scheduler(name=s_name,
branch=scheduler['branch'],
treeStableTimer=scheduler['treeStableTimer'],
builderNames=scheduler['builders'],
categories=scheduler['categories'])
scheduler['instance'] = instance
c['schedulers'].append(instance)
elif scheduler['type'] == 'URLScheduler':
ident = str(uuid.uuid4())
c['change_source'].append(
URLPoller(changeurl=scheduler['url'],
pollInterval=scheduler['pollInterval'],
category=ident,
include_revision=scheduler['include_revision']))
instance = Scheduler(name=s_name,
change_filter=ChangeFilter(category=ident),
builderNames=scheduler['builders'])
scheduler['instance'] = instance
c['schedulers'].append(instance)
# Process the dependent schedulers.
for s_name in self._schedulers:
scheduler = self._schedulers[s_name]
if scheduler['type'] == 'Dependent':
c['schedulers'].append(
Dependent(s_name,
self._schedulers[scheduler['parent']]['instance'],
scheduler['builders']))
# Process the triggerable schedulers.
for s_name in self._schedulers:
scheduler = self._schedulers[s_name]
if scheduler['type'] == 'Triggerable':
c['schedulers'].append(Triggerable(s_name,
scheduler['builders']))
# Process the periodic schedulers.
for s_name in self._schedulers:
scheduler = self._schedulers[s_name]
if scheduler['type'] == 'Periodic':
c['schedulers'].append(
Periodic(s_name,
periodicBuildTimer=scheduler['periodicBuildTimer'],
builderNames=scheduler['builders']))
# Process the nightly schedulers.
for s_name in self._schedulers:
scheduler = self._schedulers[s_name]
if scheduler['type'] == 'Nightly':
c['schedulers'].append(Nightly(s_name,
branch=scheduler['branch'],
hour=scheduler['hour'],
builderNames=scheduler['builders']))
| eunchong/build | scripts/master/master_config.py | Python | bsd-3-clause | 7,620 | 0.005906 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import optparse
from telemetry.core import camel_case
class ArgumentHandlerMixIn(object):
"""A structured way to handle command-line arguments.
In AddCommandLineArgs, add command-line arguments.
In ProcessCommandLineArgs, validate them and store them in a private class
variable. This way, each class encapsulates its own arguments, without needing
to pass an arguments object around everywhere.
"""
@classmethod
def AddCommandLineArgs(cls, parser):
"""Override to accept custom command-line arguments."""
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
"""Override to process command-line arguments.
We pass in parser so we can call parser.error()."""
class Command(ArgumentHandlerMixIn):
"""An abstraction for things that run from the command-line."""
@classmethod
def Name(cls):
return camel_case.ToUnderscore(cls.__name__)
@classmethod
def Description(cls):
if cls.__doc__:
return cls.__doc__.splitlines()[0]
else:
return ''
def Run(self, args):
raise NotImplementedError()
@classmethod
def main(cls, args=None):
"""Main method to run this command as a standalone script."""
parser = argparse.ArgumentParser()
cls.AddCommandLineArgs(parser)
args = parser.parse_args(args=args)
cls.ProcessCommandLineArgs(parser, args)
return min(cls().Run(args), 255)
# TODO: Convert everything to argparse.
class OptparseCommand(Command):
usage = ''
@classmethod
def CreateParser(cls):
return optparse.OptionParser('%%prog %s %s' % (cls.Name(), cls.usage),
description=cls.Description())
def Run(self, args):
raise NotImplementedError()
@classmethod
def main(cls, args=None):
"""Main method to run this command as a standalone script."""
parser = cls.CreateParser()
cls.AddCommandLineArgs(parser)
options, args = parser.parse_args(args=args)
options.positional_args = args
cls.ProcessCommandLineArgs(parser, options)
return min(cls().Run(options), 255)
class SubcommandCommand(Command):
"""Combines Commands into one big command with sub-commands.
E.g. "svn checkout", "svn update", and "svn commit" are separate sub-commands.
Example usage:
class MyCommand(command_line.SubcommandCommand):
commands = (Help, List, Run)
if __name__ == '__main__':
sys.exit(MyCommand.main())
"""
commands = ()
@classmethod
def AddCommandLineArgs(cls, parser):
subparsers = parser.add_subparsers()
for command in cls.commands:
subparser = subparsers.add_parser(
command.Name(), help=command.Description())
subparser.set_defaults(command=command)
command.AddCommandLineArgs(subparser)
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
args.command.ProcessCommandLineArgs(parser, args)
def Run(self, args):
return args.command().Run(args)
| Jonekee/chromium.src | tools/telemetry/telemetry/core/command_line.py | Python | bsd-3-clause | 3,100 | 0.010645 |
import os
from nose.tools import assert_equals, assert_items_equal
from rsk_mind.dataset import PandasDataset
from rsk_mind.transformer import *
import pandas as pd
class CustomTransformer(Transformer):
class Feats():
a1 = Feat()
a2 = Feat()
f1 = CompositeFeat(['a1', 'a2'])
def get_a1(self, feat):
return [-float(feat), 'fa']
def get_a2(self, feat):
return [-float(feat)]
def get_f1(self, a1, a2):
return [float(a1) + float(a2)]
class TestPandasDataset:
def __init__(self):
self.path = os.path.join(os.getcwd(), 'tests/files/in.csv')
self.reader = pd.read_table(self.path, sep=',', chunksize=1000)
def tearDown(self):
# delete variables to release memory
del self.path
del self.reader
def test_init(self):
_dataset = PandasDataset(self.reader)
assert_equals(_dataset.reader, self.reader)
assert_equals(_dataset.header, None)
assert_equals(_dataset.rows, None)
assert_items_equal(_dataset.transformed_rows, [])
assert_equals(_dataset.transformer, None)
assert_equals(_dataset.transformed_header, None)
def test_setTransformer(self):
_dataset = PandasDataset(self.reader)
_transformer = CustomTransformer()
_dataset.setTransformer(_transformer)
assert_equals(_dataset.transformer, _transformer)
def test_applyTransformations(self):
_dataset = PandasDataset(self.reader)
_transformer = CustomTransformer()
_dataset.setTransformer(_transformer)
_header = ['a1_0', 'a1_1', 'a2', 'f1', 'a3', 'a4', 'target']
_rows = [[-0.0, 'fa', -0.0, 0.0, 0, 0, 1], [-1.0, 'fa', -1.0, 2.0, 0, 1, 0], [-1.0, 'fa', -0.0, 1.0, 0, 1, 0]]
_dataset.applyTransformations()
assert_equals(_dataset.transformed_header, _header)
assert_items_equal(_dataset.transformed_rows, _rows)
assert_equals(_dataset.transformer, _transformer)
def test_applyTransformations_Without_Transformer(self):
_dataset = PandasDataset(self.reader)
_expected_header = ['a1', 'a2', 'a3', 'a4', 'target']
_expected_rows = [[0, 0, 0, 0, 1], [1, 1, 0, 1, 0], [1, 0, 0, 1, 0]]
_dataset.applyTransformations()
assert_equals(_dataset.transformed_header, _expected_header)
assert_items_equal(_dataset.transformed_rows, _expected_rows)
assert_equals(_dataset.transformer, None)
| rsk-mind/rsk-mind-framework | tests/dataset/test_dataset_pandas.py | Python | mit | 2,486 | 0.000402 |
# -*- coding: utf-8 -*-
# Copyright 2014 Foxdog Studios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from ddp.messages.client import MethodMessage
from ddp.messages.client import MethodMessageParser
class MethodMessageParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = MethodMessageParser()
def test_parse(self):
id = 'id'
method = 'method'
params = [True, 1.0]
message = self.parser.parse({'msg': 'method', 'id': id,
'method': method, 'params': params})
self.assertEqual(message, MethodMessage(id, method, params))
| foxdog-studios/pyddp | tests/messages/client/test_method_message_parser.py | Python | apache-2.0 | 1,258 | 0.000795 |
# Copyright 2016-2020 The GPflow Contributors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
import numpy as np
import tensorflow as tf
import gpflow
from .. import posteriors
from ..base import InputData, MeanAndVariance, Parameter, RegressionData
from ..conditionals import conditional
from ..config import default_float, default_jitter
from ..kernels import Kernel
from ..kullback_leiblers import gauss_kl
from ..likelihoods import Likelihood
from ..mean_functions import MeanFunction
from ..utilities import is_variable, triangular, triangular_size
from .model import GPModel
from .training_mixins import InternalDataTrainingLossMixin
from .util import data_input_to_tensor
class VGP_deprecated(GPModel, InternalDataTrainingLossMixin):
r"""
This method approximates the Gaussian process posterior using a multivariate Gaussian.
The idea is that the posterior over the function-value vector F is
approximated by a Gaussian, and the KL divergence is minimised between
the approximation and the posterior.
This implementation is equivalent to SVGP with X=Z, but is more efficient.
The whitened representation is used to aid optimization.
The posterior approximation is
.. math::
q(\mathbf f) = N(\mathbf f \,|\, \boldsymbol \mu, \boldsymbol \Sigma)
"""
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
X_data, _Y_data = self.data
static_num_data = X_data.shape[0]
if static_num_data is None:
q_sqrt_unconstrained_shape = (self.num_latent_gps, None)
else:
q_sqrt_unconstrained_shape = (self.num_latent_gps, triangular_size(static_num_data))
self.num_data = Parameter(tf.shape(X_data)[0], shape=[], dtype=tf.int32, trainable=False)
# Many functions below don't like `Parameter`s:
dynamic_num_data = tf.convert_to_tensor(self.num_data)
self.q_mu = Parameter(
tf.zeros((dynamic_num_data, self.num_latent_gps)),
shape=(static_num_data, num_latent_gps),
)
q_sqrt = tf.eye(dynamic_num_data, batch_shape=[self.num_latent_gps])
self.q_sqrt = Parameter(
q_sqrt,
transform=triangular(),
unconstrained_shape=q_sqrt_unconstrained_shape,
constrained_shape=(num_latent_gps, static_num_data, static_num_data),
)
# type-ignore is because of changed method signature:
def maximum_log_likelihood_objective(self) -> tf.Tensor: # type: ignore
return self.elbo()
def elbo(self) -> tf.Tensor:
r"""
This method computes the variational lower bound on the likelihood,
which is:
E_{q(F)} [ \log p(Y|F) ] - KL[ q(F) || p(F)]
with
q(\mathbf f) = N(\mathbf f \,|\, \boldsymbol \mu, \boldsymbol \Sigma)
"""
X_data, Y_data = self.data
num_data = tf.convert_to_tensor(self.num_data)
# Get prior KL.
KL = gauss_kl(self.q_mu, self.q_sqrt)
# Get conditionals
K = self.kernel(X_data) + tf.eye(num_data, dtype=default_float()) * default_jitter()
L = tf.linalg.cholesky(K)
fmean = tf.linalg.matmul(L, self.q_mu) + self.mean_function(X_data) # [NN, ND] -> ND
q_sqrt_dnn = tf.linalg.band_part(self.q_sqrt, -1, 0) # [D, N, N]
L_tiled = tf.tile(tf.expand_dims(L, 0), tf.stack([self.num_latent_gps, 1, 1]))
LTA = tf.linalg.matmul(L_tiled, q_sqrt_dnn) # [D, N, N]
fvar = tf.reduce_sum(tf.square(LTA), 2)
fvar = tf.transpose(fvar)
# Get variational expectations.
var_exp = self.likelihood.variational_expectations(fmean, fvar, Y_data)
return tf.reduce_sum(var_exp) - KL
def predict_f(
self, Xnew: InputData, full_cov: bool = False, full_output_cov: bool = False
) -> MeanAndVariance:
X_data, _Y_data = self.data
mu, var = conditional(
Xnew,
X_data,
self.kernel,
self.q_mu,
q_sqrt=self.q_sqrt,
full_cov=full_cov,
white=True,
)
return mu + self.mean_function(Xnew), var
class VGP_with_posterior(VGP_deprecated):
"""
This is an implementation of VGP that provides a posterior() method that
enables caching for faster subsequent predictions.
"""
def posterior(
self,
precompute_cache: posteriors.PrecomputeCacheType = posteriors.PrecomputeCacheType.TENSOR,
) -> posteriors.VGPPosterior:
"""
Create the Posterior object which contains precomputed matrices for
faster prediction.
precompute_cache has three settings:
- `PrecomputeCacheType.TENSOR` (or `"tensor"`): Precomputes the cached
quantities and stores them as tensors (which allows differentiating
through the prediction). This is the default.
- `PrecomputeCacheType.VARIABLE` (or `"variable"`): Precomputes the cached
quantities and stores them as variables, which allows for updating
their values without changing the compute graph (relevant for AOT
compilation).
- `PrecomputeCacheType.NOCACHE` (or `"nocache"` or `None`): Avoids
immediate cache computation. This is useful for avoiding extraneous
computations when you only want to call the posterior's
`fused_predict_f` method.
"""
X_data, _Y_data = self.data
return posteriors.VGPPosterior(
self.kernel,
X_data,
self.q_mu,
self.q_sqrt,
mean_function=self.mean_function,
precompute_cache=precompute_cache,
)
def predict_f(
self, Xnew: InputData, full_cov: bool = False, full_output_cov: bool = False
) -> MeanAndVariance:
"""
For backwards compatibility, VGP's predict_f uses the fused (no-cache)
computation, which is more efficient during training.
For faster (cached) prediction, predict directly from the posterior object, i.e.,:
model.posterior().predict_f(Xnew, ...)
"""
return self.posterior(posteriors.PrecomputeCacheType.NOCACHE).fused_predict_f(
Xnew, full_cov=full_cov, full_output_cov=full_output_cov
)
class VGP(VGP_with_posterior):
# subclassed to ensure __class__ == "VGP"
pass
def update_vgp_data(vgp: VGP_deprecated, new_data: RegressionData) -> None:
"""
Set the data on the given VGP model, and update its variational parameters.
As opposed to many of the other models the VGP has internal parameters whose shape depends on
the shape of the data. This functions updates the internal data of the given vgp, and updates
the variational parameters to fit.
This function requires that the input :param:`vgp` were create with :class:`tf.Variable`s for
:param:`data`.
"""
old_X_data, old_Y_data = vgp.data
assert is_variable(old_X_data) and is_variable(
old_Y_data
), "update_vgp_data requires the model to have been created with variable data."
new_X_data, new_Y_data = new_data
new_num_data = tf.shape(new_X_data)[0]
f_mu, f_cov = vgp.predict_f(new_X_data, full_cov=True) # [N, L], [L, N, N]
# This model is hard-coded to use the whitened representation, i.e. q_mu and q_sqrt
# parametrize q(v), and u = f(X) = L v, where L = cholesky(K(X, X)) Hence we need to
# back-transform from f_mu and f_cov to obtain the updated new_q_mu and new_q_sqrt:
Knn = vgp.kernel(new_X_data, full_cov=True) # [N, N]
jitter_mat = default_jitter() * tf.eye(new_num_data, dtype=Knn.dtype)
Lnn = tf.linalg.cholesky(Knn + jitter_mat) # [N, N]
new_q_mu = tf.linalg.triangular_solve(Lnn, f_mu) # [N, L]
tmp = tf.linalg.triangular_solve(Lnn[None], f_cov) # [L, N, N], L⁻¹ f_cov
S_v = tf.linalg.triangular_solve(Lnn[None], tf.linalg.matrix_transpose(tmp)) # [L, N, N]
new_q_sqrt = tf.linalg.cholesky(S_v + jitter_mat) # [L, N, N]
old_X_data.assign(new_X_data)
old_Y_data.assign(new_Y_data)
vgp.num_data.assign(new_num_data)
vgp.q_mu.assign(new_q_mu)
vgp.q_sqrt.assign(new_q_sqrt)
class VGPOpperArchambeau(GPModel, InternalDataTrainingLossMixin):
r"""
This method approximates the Gaussian process posterior using a multivariate Gaussian.
The key reference is:
::
@article{Opper:2009,
title = {The Variational Gaussian Approximation Revisited},
author = {Opper, Manfred and Archambeau, Cedric},
journal = {Neural Comput.},
year = {2009},
pages = {786--792},
}
The idea is that the posterior over the function-value vector F is
approximated by a Gaussian, and the KL divergence is minimised between
the approximation and the posterior. It turns out that the optimal
posterior precision shares off-diagonal elements with the prior, so
only the diagonal elements of the precision need be adjusted.
The posterior approximation is
.. math::
q(\mathbf f) = N(\mathbf f \,|\, \mathbf K \boldsymbol \alpha,
[\mathbf K^{-1} + \textrm{diag}(\boldsymbol \lambda))^2]^{-1})
This approach has only 2ND parameters, rather than the N + N^2 of vgp,
but the optimization is non-convex and in practice may cause difficulty.
"""
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
X_data, _Y_data = self.data
self.num_data = X_data.shape[0]
self.q_alpha = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.q_lambda = Parameter(
np.ones((self.num_data, self.num_latent_gps)), transform=gpflow.utilities.positive()
)
# type-ignore is because of changed method signature:
def maximum_log_likelihood_objective(self) -> tf.Tensor: # type: ignore
return self.elbo()
def elbo(self) -> tf.Tensor:
r"""
q_alpha, q_lambda are variational parameters, size [N, R]
This method computes the variational lower bound on the likelihood,
which is:
E_{q(F)} [ \log p(Y|F) ] - KL[ q(F) || p(F)]
with
q(f) = N(f | K alpha + mean, [K^-1 + diag(square(lambda))]^-1) .
"""
X_data, Y_data = self.data
K = self.kernel(X_data)
K_alpha = tf.linalg.matmul(K, self.q_alpha)
f_mean = K_alpha + self.mean_function(X_data)
# compute the variance for each of the outputs
I = tf.tile(
tf.eye(self.num_data, dtype=default_float())[None, ...], [self.num_latent_gps, 1, 1]
)
A = (
I
+ tf.transpose(self.q_lambda)[:, None, ...]
* tf.transpose(self.q_lambda)[:, :, None, ...]
* K
)
L = tf.linalg.cholesky(A)
Li = tf.linalg.triangular_solve(L, I)
tmp = Li / tf.transpose(self.q_lambda)[:, None, ...]
f_var = 1.0 / tf.square(self.q_lambda) - tf.transpose(tf.reduce_sum(tf.square(tmp), 1))
# some statistics about A are used in the KL
A_logdet = 2.0 * tf.reduce_sum(tf.math.log(tf.linalg.diag_part(L)))
trAi = tf.reduce_sum(tf.square(Li))
KL = 0.5 * (
A_logdet
+ trAi
- self.num_data * self.num_latent_gps
+ tf.reduce_sum(K_alpha * self.q_alpha)
)
v_exp = self.likelihood.variational_expectations(f_mean, f_var, Y_data)
return tf.reduce_sum(v_exp) - KL
def predict_f(
self, Xnew: InputData, full_cov: bool = False, full_output_cov: bool = False
) -> MeanAndVariance:
r"""
The posterior variance of F is given by
q(f) = N(f | K alpha + mean, [K^-1 + diag(lambda**2)]^-1)
Here we project this to F*, the values of the GP at Xnew which is given
by
q(F*) = N ( F* | K_{*F} alpha + mean, K_{**} - K_{*f}[K_{ff} +
diag(lambda**-2)]^-1 K_{f*} )
Note: This model currently does not allow full output covariances
"""
if full_output_cov:
raise NotImplementedError
X_data, _ = self.data
# compute kernel things
Kx = self.kernel(X_data, Xnew)
K = self.kernel(X_data)
# predictive mean
f_mean = tf.linalg.matmul(Kx, self.q_alpha, transpose_a=True) + self.mean_function(Xnew)
# predictive var
A = K + tf.linalg.diag(tf.transpose(1.0 / tf.square(self.q_lambda)))
L = tf.linalg.cholesky(A)
Kx_tiled = tf.tile(Kx[None, ...], [self.num_latent_gps, 1, 1])
LiKx = tf.linalg.triangular_solve(L, Kx_tiled)
if full_cov:
f_var = self.kernel(Xnew) - tf.linalg.matmul(LiKx, LiKx, transpose_a=True)
else:
f_var = self.kernel(Xnew, full_cov=False) - tf.reduce_sum(tf.square(LiKx), axis=1)
return f_mean, tf.transpose(f_var)
| GPflow/GPflow | gpflow/models/vgp.py | Python | apache-2.0 | 14,665 | 0.002524 |
from brv1 import *
main()
| bveina/TempSensorNode | main.py | Python | mit | 26 | 0 |
#!/usr/bin/env python
# Copyright 2015 The ChEMBL group.
# Author: Nathan Dedman <ndedman@ebi.ac.uk>
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of an S3-like storage server, using Pymongo, MongoDB and Tornado.
Useful to test features that will eventually run on S3, or if you want to
run something locally that was once running on S3.
We don't support all the features of S3, but it does work with the
standard S3 client for the most basic semantics. To use the standard
S3 client with this module:
c = S3.AWSAuthConnection("", "", server="localhost", port=8888,
is_secure=False)
c.create_bucket("mybucket")
c.put("mybucket", "mykey", "a value")
print c.get("mybucket", "mykey").body
Use s3cmd command line tool:
s3cmd mb s3://wibble
s3cmd put mytestfile.txt s3://wibble
s3cmd rb s3://wibble --force --recursive
"""
import bisect
import datetime
import hashlib
import os
import os.path
import urllib
import logging
import glob
import getpass
import re
from tornado import escape
from tornado import httpserver
from tornado import ioloop
from tornado import web
from pymongo import MongoClient
from pymongo import ASCENDING
import bson
from bson.binary import Binary
from tornado.log import enable_pretty_logging
def start(port,debug=False):
"""Starts the pymongo S3 server"""
application = mongoS3(debug)
http_server = httpserver.HTTPServer(application)
# Utilize all CPUs
if not debug:
http_server.bind(port)
http_server.start(0)
else:
enable_pretty_logging()
http_server.listen(port)
ioloop.IOLoop.current().start()
class mongoS3(web.Application):
"""Implementation of an S3-like storage server based on MongoDB using PyMongo
* Added compatibility with the s3cmd command line utility
* File names of arbitrary length are supported (stored as meta data)
* Multipart upload suported
"""
def __init__(self, debug=False):
web.Application.__init__(self, [
(r"/", RootHandler),
(r"/([^/]+)/(.+)", ObjectHandler),
(r"/([^/]+)/", BucketHandler),
(r"/ping",StatusHandler),
(r'/(favicon.ico)', web.StaticFileHandler, {"path": ""}),
# s3cmd
('http://s3.amazonaws.com/', s3cmdlHandler),
(r"(http://.+.s3.amazonaws.com.*)", s3cmdlHandler),
],debug=debug)
# Lazy connect the client
self.client = MongoClient(connect=False)
self.S3 = self.client.S3
self.metadata = self.client.metadata
class StatusHandler(web.RequestHandler):
SUPPORTED_METHODS = ("GET")
# Send a simple 'PONG' to show we're alive!
def get(self):
self.set_header('Content-Type', 'application/json')
self.finish({'response':'pong','UTC':datetime.datetime.now().isoformat()})
class BaseRequestHandler(web.RequestHandler):
SUPPORTED_METHODS = ("PUT", "GET", "DELETE", "HEAD","POST","OPTIONS")
def _get_bucket_names(self):
return self.application.S3.collection_names(include_system_collections=False)
def render_xml(self, value,**kwargs):
assert isinstance(value, dict) and len(value) == 1
self.set_header("Content-Type", "application/xml; charset=UTF-8")
name = value.keys()[0]
parts = []
parts.append('<' + escape.utf8(name) +' xmlns="http://s3.amazonaws.com/doc/2006-03-01/">')
parts.append('<Owner><ID>'+getpass.getuser()+'</ID><DisplayName>'+getpass.getuser()+'</DisplayName></Owner>')
self._render_parts(value.values()[0], parts)
parts.append('</' + escape.utf8(name) + '>')
if 'code' in kwargs.keys():
self.set_status(kwargs['code'])
self.finish('<?xml version="1.0" encoding="UTF-8"?>' +
''.join(parts))
def _render_parts(self, value, parts=[]):
if isinstance(value, (unicode, bytes)):
parts.append(escape.xhtml_escape(value))
elif isinstance(value, int) or isinstance(value, long):
parts.append(str(value))
elif isinstance(value, datetime.datetime):
parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
elif isinstance(value, dict):
for name, subvalue in value.iteritems():
if not isinstance(subvalue, list):
subvalue = [subvalue]
for subsubvalue in subvalue:
parts.append('<' + escape.utf8(name) + '>')
self._render_parts(subsubvalue, parts)
parts.append('</' + escape.utf8(name) + '>')
else:
raise Exception("Unknown S3 value type %r", value)
def _error(self,**kwargs):
bucket_name = object_name = None
if hasattr(self,'bucket_name'):
bucket_name = self.bucket_name
if hasattr(self,'object_name'):
object_name = self.object_name
s3errorcodes_bucket = {'NSK':'NoSuchKey','NSB':'NoSuchBucket','BNE':'BucketNotEmpty',"BAE":"BucketAlreadyExists"}
s3errorcodes_object = {'NSB':'NoSuchBucket','NSK':'NoSuchKey'}
errormessage_object = {404:'The specified key does not exist.'}
errormessage_bucket = {404:{'NSB':'The specified bucket does not exist.'},409:{'BNE':'The bucket you tried to delete is not empty.','BAE':'The requested bucket name is not available. Please select a different name and try again.'}}
if self.__class__.__name__== 'BucketHandler':
s3errorcodes = s3errorcodes_bucket
errormessage = errormessage_bucket
bucket_name = self.bucket_name
object_name = None
if self.__class__.__name__== 'ObjectHandler':
s3errorcodes = s3errorcodes_object
errormessage = errormessage_object
if hasattr(self,'s3cmd'):
returnDict = {'Error':{}}
errorDict = returnDict['Error']
errorDict['Code'] = s3errorcodes[kwargs['s3code']]
if self.__class__.__name__ == 'BucketHandler':
errorDict['Message'] = errormessage[kwargs['code']][kwargs['s3code']]
else:
errorDict['Message'] = errormessage[kwargs['code']]
errorDict['Resource'] = '/%s/%s' % (bucket_name,object_name)
self.render_xml(returnDict,code=kwargs['code'])
else:
raise web.HTTPError(kwargs['code'])
class s3cmdlHandler(web.RequestHandler):
def prepare(self):
# Handle s3 urls here
self.s3cmd = True
if self.application.settings['debug']:
print "%s %s" % (self.__class__.__name__, self.request.method)
s3match = re.match('(?:http://)(.+)(?:.s3.amazonaws.com\/)(.*)',self.request.uri)
self.prefix = self.get_argument("prefix", u"")
self.delimiter = self.get_argument("delimiter", u"")
self.partNumber = self.get_argument("partNumber",u"")
self.uploadId = self.get_argument("uploadId",u"")
try:
bucket_name = s3match.group(1)
except:
bucket_name = False
try:
if s3match.group(2).startswith('?'):
object_name = prefix
else:
object_name = s3match.group(2)
except:
object_name = False
if object_name:
if '?uploads' in object_name:
self.uploads = True
if '?delete' in object_name:
self.delete = True
if object_name:
object_name = object_name.split('?')[0]
if self.request.uri == 'http://s3.amazonaws.com/':
self.__class__ = RootHandler
if bucket_name and not object_name:
self.__class__ = BucketHandler
self.bucket_name = bucket_name
if bucket_name and object_name:
self.__class__ = ObjectHandler
self.bucket_name = bucket_name
self.object_name = object_name
class RootHandler(BaseRequestHandler):
def get(self):
buckets = []
bucket_names = self._get_bucket_names()
for bucket_name in bucket_names:
bucket_meta = self.application.metadata[bucket_name].find()
buckets.append({
"Name": bucket_name,
"CreationDate":bucket_meta.next()['created'],
})
self.render_xml({"ListAllMyBucketsResult": {
"Buckets": {"Bucket": buckets},
}})
class BucketHandler(BaseRequestHandler):
def _get_bucket_cursor(self,bucket_name):
return self.application.S3[bucket_name]
def _remove_bucket(self,bucket_name):
self.application.S3[bucket_name].drop()
self.application.metadata[bucket_name].drop()
def get(self, bucket_name):
if hasattr(self,'bucket_name'):
bucket_name = self.bucket_name
prefix = self.get_argument("prefix", u"")
marker = self.get_argument("marker", u"")
max_keys = int(self.get_argument("max-keys", 50000))
terse = int(self.get_argument("terse", 0))
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
objects = []
contents = []
for bucket_object in self._get_bucket_cursor(bucket_name).find({'partNumber': None}):
objects.append(bucket_object)
start_pos = 0
# To do:
# Fix bisection by dict lookup
if marker:
start_pos = bisect.bisect_right(objects, marker, start_pos)
if prefix:
start_pos = bisect.bisect_left(objects, prefix, start_pos)
truncated = False
for _object in objects[start_pos:]:
if not _object['object_name'].startswith(prefix):
break
if len(contents) >= max_keys:
truncated = True
break
c = {"Key": _object['object_name'],"ETag":_object['md5']}
if not terse:
c.update({
"LastModified":_object['added'],
"Size":_object['size'],
})
contents.append(c)
marker = _object['object_name']
self.render_xml({"ListBucketResult": {
"Name": bucket_name,
"Prefix": prefix,
"Marker": marker,
"MaxKeys": max_keys,
"IsTruncated": truncated,
"Contents": contents
}})
def put(self, bucket_name):
# Create bucket and metadata
if hasattr(self,'bucket_name'):
bucket_name = self.bucket_name
if bucket_name in self._get_bucket_names():
self._error(code=409,s3code='BAE')
return
self.application.S3.create_collection(bucket_name)
self.application.metadata[bucket_name].insert({"created":datetime.datetime.utcnow()})
self.application.S3[bucket_name].ensure_index([("partNumber",ASCENDING)])
self.finish()
def delete(self, bucket_name):
if hasattr(self,'bucket_name'):
bucket_name = self.bucket_name
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
if self.application.S3[bucket_name].count() > 0:
self._error(code=409,s3code='BNE')
return
self._remove_bucket(bucket_name)
self.set_status(204)
self.finish()
def post(self, bucket_name):
if hasattr(self,'bucket_name'):
bucket_name = self.bucket_name
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
self._remove_bucket(bucket_name)
self.set_status(204)
self.finish()
def head(self,bucket_name):
if hasattr(self,'bucket_name'):
bucket_name = self.bucket_name
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
self.set_header('Date', '"%s"' % datetime.datetime.utcnow())
self.finish()
class ObjectHandler(BaseRequestHandler):
def _object_md5(self,bucket_object):
object_md5 = hashlib.md5()
object_md5.update(bucket_object)
return object_md5.hexdigest()
def _get_bucket_object(self,**kwargs):
if '_id' in kwargs.keys():
object_id = kwargs['_id']
object_field = '_id'
if 'object_name' in kwargs.keys():
object_id = kwargs['object_name']
object_field = 'object_name'
if 'bucket_name' in kwargs.keys():
bucket_name = kwargs['bucket_name']
return self.application.S3[bucket_name].find_one({object_field:object_id},{'partNumber': None})
def get(self,*args):
if hasattr(self,'bucket_name') and hasattr(self,'object_name'):
bucket_name = self.bucket_name
object_name = self.object_name
else:
bucket_name,object_name = args
prefix = self.get_argument("prefix", u"")
marker = self.get_argument("marker", u"")
acl = self.get_argument("acl", u"")
object_name = urllib.unquote(object_name)
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
bucket_object = self._get_bucket_object(bucket_name=bucket_name,object_name=object_name)
if bucket_object:
self.set_header("Content-Type", "application/unknown")
self.set_header('etag', '"%s"' % bucket_object['md5'])
self.set_header("Last-Modified", bucket_object['added'])
if 'multipart' in bucket_object.keys():
print "MULTIPART"
self.set_header("Content-Length",bucket_object['size'])
for parts in self.application.S3[bucket_name].find({'object_name':object_name},{'partNumber': {'$exists':'true'}}):
print parts['partNumber']
self.write(parts['object'])
self.flush()
self.finish()
else:
self.finish(bucket_object['object'])
else:
self._error(code=404,s3code='NSK')
return
def put(self, *args):
if self.bucket_name and self.object_name:
bucket_name = self.bucket_name
object_name = self.object_name
else:
bucket_name,object_name = args
original_name = urllib.unquote(object_name)
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
# Insert object and then calculate computed md5 of stored object, size, then update and return
# If the object already exists, delete contents and add updated timestamp and update
existance = self.application.S3[bucket_name].find({"object_name":original_name})
if existance.count() > 0 and self.partNumber == None:
existance_id = existance.next()['_id']
update_object = Binary(self.request.body)
object_size = update_object.__len__()
object_md5 = self._object_md5(update_object)
self.application.S3[bucket_name].update({"_id":existance_id},{'$set': {'object':update_object,'md5':object_md5,'updated':datetime.datetime.utcnow(),'size':object_size}})
self.set_header('etag', '"%s"' % object_md5)
self.finish()
return
if self.partNumber:
tobeinserted = {'object_name':original_name,'object':Binary(self.request.body),'partNumber':self.partNumber}
else:
tobeinserted = {'object_name':original_name,'object':Binary(self.request.body)}
inserted_object_id = self.application.S3[bucket_name].insert_one(tobeinserted).inserted_id
inserted_object = self._get_bucket_object(bucket_name=bucket_name,_id=inserted_object_id)
object_size = inserted_object['object'].__len__()
object_md5 = self._object_md5(inserted_object['object'])
self.application.S3[bucket_name].update({'_id':inserted_object_id},{'$set': {'md5':object_md5,'updated':datetime.datetime.utcnow(),'added':datetime.datetime.utcnow(),'size':object_size}})
self.set_header('etag', '"%s"' % object_md5)
self.finish()
def post(self, *args):
# Add entry into bucket and flag as multipart upload
if self.bucket_name and self.object_name:
bucket_name = self.bucket_name
object_name = self.object_name
else:
bucket_name,object_name = args
if bucket_name not in self._get_bucket_names():
self._error(code=404,s3code='NSB')
return
original_name = urllib.unquote(object_name)
bucket_object = Binary(self.request.body)
object_size = bucket_object.__len__()
object_md5 = self._object_md5(bucket_object)
if self.uploadId:
# We have a multipart upload, so iterate over the parts to generate the md5 hash and calculate size
# This is the last call made after the mutlipart upload with the uploadId
mupmd5 = hashlib.md5()
mupsize = 0
for mup in self.application.S3[bucket_name].find({'object_name':object_name}):
mupmd5.update(mup['object'])
mupsize += mup['size']
self.application.S3[bucket_name].insert_one({'object_name':object_name,'object':bucket_object,'multipart':True,'md5':mupmd5.hexdigest(),'size':mupsize,'added':datetime.datetime.utcnow(),'updated':datetime.datetime.utcnow(),})
self.render_xml({"InitiateMultipartUploadResult": {
"Bucket": bucket_name,
"Prefix": self.prefix,
"Key":object_name,
"UploadId":object_name
}})
def delete(self, *args):
if self.bucket_name and self.object_name:
bucket_name = self.bucket_name
object_name = self.object_name
else:
bucket_name,object_name = args
original_name = urllib.unquote(object_name)
bucket_object = self._get_bucket_object(bucket_name=bucket_name,object_name=object_name)
if bucket_object:
self.set_status(204)
self.application.S3[bucket_name].remove({"_id":bucket_object['_id']})
self.finish()
else:
self._error(code=404,s3code='NSK')
return
def head(self, *args):
if hasattr(self,'bucket_name') and hasattr(self,'object_name'):
bucket_name = self.bucket_name
object_name = self.object_name
else:
bucket_name,object_name = args
object_name = urllib.unquote(object_name)
bucket_object = self._get_bucket_object(bucket_name=bucket_name,object_name=object_name)
if bucket_object:
self.set_header('etag', '"%s"' % bucket_object['md5'])
self.set_header('last-modified', '"%s"' % bucket_object['updated'])
self.finish()
else:
self._error(code=404,s3code='NSK')
return
if __name__ == "__main__":
start(8080,debug=False)
| chembl/the-S3-amongos | the-S3-amongos.py | Python | apache-2.0 | 21,343 | 0.014431 |
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.rst.
"""
Period looper for atomic scripts.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import datetime
import math
import re
from raster_store import utils
# period parsing
PERIOD = re.compile('([0-9]{1,2})([mhdw])')
UNITS = {'m': 'minutes', 'h': 'hours', 'd': 'days', 'w': 'weeks'}
def parse(text):
"""
Return start, stop tuple.
text can be:
start/stop: 2003/2004
start: 2003 - now
period: 2d - now
"""
if '/' in text:
return map(utils.parse_datetime, text.split('/'))
now = datetime.datetime.utcnow()
match = PERIOD.match(text)
if match:
value, unit = match.groups()
delta = datetime.timedelta(**{UNITS[unit]: int(value)})
return now - delta, now
return utils.parse_datetime(text), now
class Period(object):
""" Period looper. """
def __init__(self, text):
period = parse(text)
# init
self.step = datetime.timedelta(minutes=5)
# snap
ref = datetime.datetime(2000, 1, 1)
step = self.step.total_seconds()
start = step * math.ceil((period[0] - ref).total_seconds() / step)
stop = step * math.floor((period[1] - ref).total_seconds() / step)
self.start = ref + datetime.timedelta(seconds=start)
self.stop = ref + datetime.timedelta(seconds=stop)
def __iter__(self):
""" Return generator of datetimes. """
now = self.start
while now <= self.stop:
yield now
now += self.step
def __repr__(self):
return '{} - {}'.format(self.start, self.stop)
| tomvansteijn/openradar | openradar/periods.py | Python | gpl-3.0 | 1,780 | 0 |
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import traceback
import mock
from oslo_utils import fixture as utils_fixture
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
import six
from nova.db import api as db
from nova.objects import instance_action
from nova import test
from nova.tests.unit.objects import test_objects
NOW = timeutils.utcnow().replace(microsecond=0)
fake_action = {
'created_at': NOW,
'deleted_at': None,
'updated_at': None,
'deleted': False,
'id': 123,
'action': 'fake-action',
'instance_uuid': uuids.instance,
'request_id': 'fake-request',
'user_id': 'fake-user',
'project_id': 'fake-project',
'start_time': NOW,
'finish_time': None,
'message': 'foo',
}
fake_event = {
'created_at': NOW,
'deleted_at': None,
'updated_at': None,
'deleted': False,
'id': 123,
'event': 'fake-event',
'action_id': 123,
'start_time': NOW,
'finish_time': None,
'result': 'fake-result',
'traceback': 'fake-tb',
'host': 'fake-host',
}
class _TestInstanceActionObject(object):
@mock.patch.object(db, 'action_get_by_request_id')
def test_get_by_request_id(self, mock_get):
context = self.context
mock_get.return_value = fake_action
action = instance_action.InstanceAction.get_by_request_id(
context, 'fake-uuid', 'fake-request')
self.compare_obj(action, fake_action)
mock_get.assert_called_once_with(context,
'fake-uuid', 'fake-request')
def test_pack_action_start(self):
values = instance_action.InstanceAction.pack_action_start(
self.context, 'fake-uuid', 'fake-action')
self.assertEqual(values['request_id'], self.context.request_id)
self.assertEqual(values['user_id'], self.context.user_id)
self.assertEqual(values['project_id'], self.context.project_id)
self.assertEqual(values['instance_uuid'], 'fake-uuid')
self.assertEqual(values['action'], 'fake-action')
self.assertEqual(values['start_time'].replace(tzinfo=None),
self.context.timestamp)
def test_pack_action_finish(self):
self.useFixture(utils_fixture.TimeFixture(NOW))
values = instance_action.InstanceAction.pack_action_finish(
self.context, 'fake-uuid')
self.assertEqual(values['request_id'], self.context.request_id)
self.assertEqual(values['instance_uuid'], 'fake-uuid')
self.assertEqual(values['finish_time'].replace(tzinfo=None), NOW)
@mock.patch.object(db, 'action_start')
def test_action_start(self, mock_start):
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_start(
self.context, 'fake-uuid', 'fake-action')
mock_start.return_value = fake_action
action = instance_action.InstanceAction.action_start(
self.context, 'fake-uuid', 'fake-action', want_result=True)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(action, fake_action)
@mock.patch.object(db, 'action_start')
def test_action_start_no_result(self, mock_start):
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_start(
self.context, 'fake-uuid', 'fake-action')
mock_start.return_value = fake_action
action = instance_action.InstanceAction.action_start(
self.context, 'fake-uuid', 'fake-action', want_result=False)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(action)
@mock.patch.object(db, 'action_finish')
def test_action_finish(self, mock_finish):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_finish(
self.context, 'fake-uuid')
mock_finish.return_value = fake_action
action = instance_action.InstanceAction.action_finish(
self.context, 'fake-uuid', want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(action, fake_action)
@mock.patch.object(db, 'action_finish')
def test_action_finish_no_result(self, mock_finish):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_finish(
self.context, 'fake-uuid')
mock_finish.return_value = fake_action
action = instance_action.InstanceAction.action_finish(
self.context, 'fake-uuid', want_result=False)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(action)
@mock.patch.object(db, 'action_finish')
@mock.patch.object(db, 'action_start')
def test_finish(self, mock_start, mock_finish):
self.useFixture(utils_fixture.TimeFixture(NOW))
expected_packed_action_start = {
'request_id': self.context.request_id,
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'instance_uuid': uuids.instance,
'action': 'fake-action',
'start_time': self.context.timestamp,
'updated_at': self.context.timestamp,
}
expected_packed_action_finish = {
'request_id': self.context.request_id,
'instance_uuid': uuids.instance,
'finish_time': NOW,
'updated_at': NOW,
}
mock_start.return_value = fake_action
mock_finish.return_value = fake_action
action = instance_action.InstanceAction.action_start(
self.context, uuids.instance, 'fake-action')
action.finish()
mock_start.assert_called_once_with(self.context,
expected_packed_action_start)
mock_finish.assert_called_once_with(self.context,
expected_packed_action_finish)
self.compare_obj(action, fake_action)
@mock.patch.object(db, 'actions_get')
def test_get_list(self, mock_get):
fake_actions = [dict(fake_action, id=1234),
dict(fake_action, id=5678)]
mock_get.return_value = fake_actions
obj_list = instance_action.InstanceActionList.get_by_instance_uuid(
self.context, 'fake-uuid')
for index, action in enumerate(obj_list):
self.compare_obj(action, fake_actions[index])
mock_get.assert_called_once_with(self.context, 'fake-uuid', None,
None, None)
class TestInstanceActionObject(test_objects._LocalTest,
_TestInstanceActionObject):
pass
class TestRemoteInstanceActionObject(test_objects._RemoteTest,
_TestInstanceActionObject):
pass
class _TestInstanceActionEventObject(object):
@mock.patch.object(db, 'action_event_get_by_id')
def test_get_by_id(self, mock_get):
mock_get.return_value = fake_event
event = instance_action.InstanceActionEvent.get_by_id(
self.context, 'fake-action-id', 'fake-event-id')
self.compare_obj(event, fake_event)
mock_get.assert_called_once_with(self.context,
'fake-action-id', 'fake-event-id')
@mock.patch.object(db, 'action_event_start')
def test_event_start(self, mock_start):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_start(
self.context, 'fake-uuid', 'fake-event')
mock_start.return_value = fake_event
event = instance_action.InstanceActionEvent.event_start(
self.context, 'fake-uuid', 'fake-event', want_result=True)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(db, 'action_event_start')
def test_event_start_no_result(self, mock_start):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_start(
self.context, 'fake-uuid', 'fake-event')
mock_start.return_value = fake_event
event = instance_action.InstanceActionEvent.event_start(
self.context, 'fake-uuid', 'fake-event', want_result=False)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(event)
@mock.patch.object(db, 'action_event_finish')
def test_event_finish(self, mock_finish):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event')
expected_packed_values['finish_time'] = NOW
mock_finish.return_value = fake_event
event = instance_action.InstanceActionEvent.event_finish(
self.context, 'fake-uuid', 'fake-event', want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_no_result(self, mock_finish):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event')
expected_packed_values['finish_time'] = NOW
mock_finish.return_value = fake_event
event = instance_action.InstanceActionEvent.event_finish(
self.context, 'fake-uuid', 'fake-event', want_result=False)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(event)
@mock.patch.object(traceback, 'format_tb')
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure(self, mock_finish, mock_tb):
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb')
expected_packed_values['finish_time'] = NOW
mock_finish.return_value = fake_event
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb',
want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(traceback, 'format_tb')
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure_legacy(self, mock_finish, mock_tb):
# Tests that exc_tb is serialized when it's not a string type.
mock_tb.return_value = 'fake-tb'
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb')
expected_packed_values['finish_time'] = NOW
mock_finish.return_value = fake_event
fake_tb = mock.sentinel.fake_tb
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', exc_val='val',
exc_tb=fake_tb, want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
mock_tb.assert_called_once_with(fake_tb)
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure_legacy_unicode(self, mock_finish):
# Tests that traceback.format_tb is not called when exc_tb is unicode.
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val',
six.text_type('fake-tb'))
expected_packed_values['finish_time'] = NOW
mock_finish.return_value = fake_event
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', exc_val='val',
exc_tb=six.text_type('fake-tb'), want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(traceback, 'format_tb')
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure_no_result(self, mock_finish, mock_tb):
# Tests that traceback.format_tb is not called when exc_tb is a str
# and want_result is False, so no event should come back.
mock_tb.return_value = 'fake-tb'
self.useFixture(utils_fixture.TimeFixture(NOW))
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb')
expected_packed_values['finish_time'] = NOW
mock_finish.return_value = fake_event
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb',
want_result=False)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(event)
self.assertFalse(mock_tb.called)
@mock.patch.object(db, 'action_events_get')
def test_get_by_action(self, mock_get):
fake_events = [dict(fake_event, id=1234),
dict(fake_event, id=5678)]
mock_get.return_value = fake_events
obj_list = instance_action.InstanceActionEventList.get_by_action(
self.context, 'fake-action-id')
for index, event in enumerate(obj_list):
self.compare_obj(event, fake_events[index])
mock_get.assert_called_once_with(self.context, 'fake-action-id')
@mock.patch('nova.objects.instance_action.InstanceActionEvent.'
'pack_action_event_finish')
@mock.patch('traceback.format_tb')
def test_event_finish_with_failure_serialized(self, mock_format,
mock_pack):
mock_format.return_value = 'traceback'
mock_pack.side_effect = test.TestingException
self.assertRaises(
test.TestingException,
instance_action.InstanceActionEvent.event_finish_with_failure,
self.context, 'fake-uuid', 'fake-event',
exc_val=mock.sentinel.exc_val,
exc_tb=mock.sentinel.exc_tb)
mock_pack.assert_called_once_with(self.context, 'fake-uuid',
'fake-event',
exc_val=str(mock.sentinel.exc_val),
exc_tb='traceback')
mock_format.assert_called_once_with(mock.sentinel.exc_tb)
class TestInstanceActionEventObject(test_objects._LocalTest,
_TestInstanceActionEventObject):
pass
class TestRemoteInstanceActionEventObject(test_objects._RemoteTest,
_TestInstanceActionEventObject):
pass
| mikalstill/nova | nova/tests/unit/objects/test_instance_action.py | Python | apache-2.0 | 16,824 | 0.000178 |
"""
Low level *Skype for Linux* interface implemented using *XWindows messaging*.
Uses direct *Xlib* calls through *ctypes* module.
This module handles the options that you can pass to `Skype.__init__`
for Linux machines when the transport is set to *X11*.
No further options are currently supported.
Warning PyGTK framework users
=============================
The multithreaded architecture of Skype4Py requires a special treatment
if the Xlib transport is combined with PyGTK GUI framework.
The following code has to be called at the top of your script, before
PyGTK is even imported.
.. python::
from Skype4Py.api.posix_x11 import threads_init
threads_init()
This function enables multithreading support in Xlib and GDK. If not done
here, this is enabled for Xlib library when the `Skype` object is instantiated.
If your script imports the PyGTK module, doing this so late may lead to a
segmentation fault when the GUI is shown on the screen.
A remedy is to enable the multithreading support before PyGTK is imported
by calling the ``threads_init`` function.
"""
__docformat__ = 'restructuredtext en'
import sys
import threading
import os
from ctypes import *
from ctypes.util import find_library
import time
import logging
from Skype4Py.api import Command, SkypeAPIBase, \
timeout2float, finalize_opts
from Skype4Py.enums import *
from Skype4Py.errors import SkypeAPIError
__all__ = ['SkypeAPI', 'threads_init']
# The Xlib Programming Manual:
# ============================
# http://tronche.com/gui/x/xlib/
# some Xlib constants
PropertyChangeMask = 0x400000
PropertyNotify = 28
ClientMessage = 33
PropertyNewValue = 0
PropertyDelete = 1
# some Xlib types
c_ulong_p = POINTER(c_ulong)
DisplayP = c_void_p
Atom = c_ulong
AtomP = c_ulong_p
XID = c_ulong
Window = XID
Bool = c_int
Status = c_int
Time = c_ulong
c_int_p = POINTER(c_int)
# should the structures be aligned to 8 bytes?
align = (sizeof(c_long) == 8 and sizeof(c_int) == 4)
# some Xlib structures
class XClientMessageEvent(Structure):
if align:
_fields_ = [('type', c_int),
('pad0', c_int),
('serial', c_ulong),
('send_event', Bool),
('pad1', c_int),
('display', DisplayP),
('window', Window),
('message_type', Atom),
('format', c_int),
('pad2', c_int),
('data', c_char * 20)]
else:
_fields_ = [('type', c_int),
('serial', c_ulong),
('send_event', Bool),
('display', DisplayP),
('window', Window),
('message_type', Atom),
('format', c_int),
('data', c_char * 20)]
class XPropertyEvent(Structure):
if align:
_fields_ = [('type', c_int),
('pad0', c_int),
('serial', c_ulong),
('send_event', Bool),
('pad1', c_int),
('display', DisplayP),
('window', Window),
('atom', Atom),
('time', Time),
('state', c_int),
('pad2', c_int)]
else:
_fields_ = [('type', c_int),
('serial', c_ulong),
('send_event', Bool),
('display', DisplayP),
('window', Window),
('atom', Atom),
('time', Time),
('state', c_int)]
class XErrorEvent(Structure):
if align:
_fields_ = [('type', c_int),
('pad0', c_int),
('display', DisplayP),
('resourceid', XID),
('serial', c_ulong),
('error_code', c_ubyte),
('request_code', c_ubyte),
('minor_code', c_ubyte)]
else:
_fields_ = [('type', c_int),
('display', DisplayP),
('resourceid', XID),
('serial', c_ulong),
('error_code', c_ubyte),
('request_code', c_ubyte),
('minor_code', c_ubyte)]
class XEvent(Union):
if align:
_fields_ = [('type', c_int),
('xclient', XClientMessageEvent),
('xproperty', XPropertyEvent),
('xerror', XErrorEvent),
('pad', c_long * 24)]
else:
_fields_ = [('type', c_int),
('xclient', XClientMessageEvent),
('xproperty', XPropertyEvent),
('xerror', XErrorEvent),
('pad', c_long * 24)]
XEventP = POINTER(XEvent)
if getattr(sys, 'skype4py_setup', False):
# we get here if we're building docs; to let the module import without
# exceptions, we emulate the X11 library using a class:
class X(object):
def __getattr__(self, name):
return self
def __setattr__(self, name, value):
pass
def __call__(self, *args, **kwargs):
pass
x11 = X()
else:
# load X11 library (Xlib)
libpath = find_library('X11')
if not libpath:
raise ImportError('Could not find X11 library')
x11 = cdll.LoadLibrary(libpath)
del libpath
# setup Xlib function prototypes
x11.XCloseDisplay.argtypes = (DisplayP,)
x11.XCloseDisplay.restype = None
x11.XCreateSimpleWindow.argtypes = (DisplayP, Window, c_int, c_int, c_uint,
c_uint, c_uint, c_ulong, c_ulong)
x11.XCreateSimpleWindow.restype = Window
x11.XDefaultRootWindow.argtypes = (DisplayP,)
x11.XDefaultRootWindow.restype = Window
x11.XDeleteProperty.argtypes = (DisplayP, Window, Atom)
x11.XDeleteProperty.restype = None
x11.XDestroyWindow.argtypes = (DisplayP, Window)
x11.XDestroyWindow.restype = None
x11.XFree.argtypes = (c_void_p,)
x11.XFree.restype = None
x11.XGetAtomName.argtypes = (DisplayP, Atom)
x11.XGetAtomName.restype = c_void_p
x11.XGetErrorText.argtypes = (DisplayP, c_int, c_char_p, c_int)
x11.XGetErrorText.restype = None
x11.XGetWindowProperty.argtypes = (DisplayP, Window, Atom, c_long, c_long, Bool,
Atom, AtomP, c_int_p, c_ulong_p, c_ulong_p, POINTER(POINTER(Window)))
x11.XGetWindowProperty.restype = c_int
x11.XInitThreads.argtypes = ()
x11.XInitThreads.restype = Status
x11.XInternAtom.argtypes = (DisplayP, c_char_p, Bool)
x11.XInternAtom.restype = Atom
x11.XNextEvent.argtypes = (DisplayP, XEventP)
x11.XNextEvent.restype = None
x11.XOpenDisplay.argtypes = (c_char_p,)
x11.XOpenDisplay.restype = DisplayP
x11.XPending.argtypes = (DisplayP,)
x11.XPending.restype = c_int
x11.XSelectInput.argtypes = (DisplayP, Window, c_long)
x11.XSelectInput.restype = None
x11.XSendEvent.argtypes = (DisplayP, Window, Bool, c_long, XEventP)
x11.XSendEvent.restype = Status
x11.XLockDisplay.argtypes = (DisplayP,)
x11.XLockDisplay.restype = None
x11.XUnlockDisplay.argtypes = (DisplayP,)
x11.XUnlockDisplay.restype = None
def threads_init(gtk=True):
"""Enables multithreading support in Xlib and PyGTK.
See the module docstring for more info.
:Parameters:
gtk : bool
May be set to False to skip the PyGTK module.
"""
# enable X11 multithreading
x11.XInitThreads()
if gtk:
from gtk.gdk import threads_init
threads_init()
class SkypeAPI(SkypeAPIBase):
def __init__(self, opts):
self.logger = logging.getLogger('Skype4Py.api.posix_x11.SkypeAPI')
SkypeAPIBase.__init__(self)
finalize_opts(opts)
# initialize threads if not done already by the user
threads_init(gtk=False)
# init Xlib display
self.disp = x11.XOpenDisplay(None)
if not self.disp:
raise SkypeAPIError('Could not open XDisplay')
self.win_root = x11.XDefaultRootWindow(self.disp)
self.win_self = x11.XCreateSimpleWindow(self.disp, self.win_root,
100, 100, 100, 100, 1, 0, 0)
x11.XSelectInput(self.disp, self.win_root, PropertyChangeMask)
self.win_skype = self.get_skype()
ctrl = 'SKYPECONTROLAPI_MESSAGE'
self.atom_msg = x11.XInternAtom(self.disp, ctrl, False)
self.atom_msg_begin = x11.XInternAtom(self.disp, ctrl + '_BEGIN', False)
self.loop_event = threading.Event()
self.loop_timeout = 0.0001
self.loop_break = False
def __del__(self):
if x11:
if hasattr(self, 'disp'):
if hasattr(self, 'win_self'):
x11.XDestroyWindow(self.disp, self.win_self)
x11.XCloseDisplay(self.disp)
def run(self):
self.logger.info('thread started')
# main loop
event = XEvent()
data = ''
while not self.loop_break and x11:
while x11.XPending(self.disp):
self.loop_timeout = 0.0001
x11.XNextEvent(self.disp, byref(event))
# events we get here are already prefiltered by the predicate function
if event.type == ClientMessage:
if event.xclient.format == 8:
if event.xclient.message_type == self.atom_msg_begin:
data = str(event.xclient.data)
elif event.xclient.message_type == self.atom_msg:
if data != '':
data += str(event.xclient.data)
else:
self.logger.warning('Middle of Skype X11 message received with no beginning!')
else:
continue
if len(event.xclient.data) != 20 and data:
self.notify(data.decode('utf-8'))
data = ''
elif event.type == PropertyNotify:
namep = x11.XGetAtomName(self.disp, event.xproperty.atom)
is_inst = (c_char_p(namep).value == '_SKYPE_INSTANCE')
x11.XFree(namep)
if is_inst:
if event.xproperty.state == PropertyNewValue:
self.win_skype = self.get_skype()
# changing attachment status can cause an event handler to be fired, in
# turn it could try to call Attach() and doing this immediately seems to
# confuse Skype (command '#0 NAME xxx' returns '#0 CONNSTATUS OFFLINE' :D);
# to fix this, we give Skype some time to initialize itself
time.sleep(1.0)
self.set_attachment_status(apiAttachAvailable)
elif event.xproperty.state == PropertyDelete:
self.win_skype = None
self.set_attachment_status(apiAttachNotAvailable)
self.loop_event.wait(self.loop_timeout)
if self.loop_event.isSet():
self.loop_timeout = 0.0001
elif self.loop_timeout < 1.0:
self.loop_timeout *= 2
self.loop_event.clear()
self.logger.info('thread finished')
def get_skype(self):
"""Returns Skype window ID or None if Skype not running."""
skype_inst = x11.XInternAtom(self.disp, '_SKYPE_INSTANCE', True)
if not skype_inst:
return
type_ret = Atom()
format_ret = c_int()
nitems_ret = c_ulong()
bytes_after_ret = c_ulong()
winp = pointer(Window())
fail = x11.XGetWindowProperty(self.disp, self.win_root, skype_inst,
0, 1, False, 33, byref(type_ret), byref(format_ret),
byref(nitems_ret), byref(bytes_after_ret), byref(winp))
if not fail and format_ret.value == 32 and nitems_ret.value == 1:
return winp.contents.value
def close(self):
self.loop_break = True
self.loop_event.set()
while self.isAlive():
time.sleep(0.01)
SkypeAPIBase.close(self)
def set_friendly_name(self, friendly_name):
SkypeAPIBase.set_friendly_name(self, friendly_name)
if self.attachment_status == apiAttachSuccess:
# reattach with the new name
self.set_attachment_status(apiAttachUnknown)
self.attach()
def attach(self, timeout, wait=True):
if self.attachment_status == apiAttachSuccess:
return
self.acquire()
try:
if not self.isAlive():
try:
self.start()
except AssertionError:
raise SkypeAPIError('Skype API closed')
try:
self.wait = True
t = threading.Timer(timeout2float(timeout), lambda: setattr(self, 'wait', False))
if wait:
t.start()
while self.wait:
self.win_skype = self.get_skype()
if self.win_skype is not None:
break
else:
time.sleep(1.0)
else:
raise SkypeAPIError('Skype attach timeout')
finally:
t.cancel()
command = Command('NAME %s' % self.friendly_name, '', True, timeout)
self.release()
try:
self.send_command(command, True)
finally:
self.acquire()
if command.Reply != 'OK':
self.win_skype = None
self.set_attachment_status(apiAttachRefused)
return
self.set_attachment_status(apiAttachSuccess)
finally:
self.release()
command = Command('PROTOCOL %s' % self.protocol, Blocking=True)
self.send_command(command, True)
self.protocol = int(command.Reply.rsplit(None, 1)[-1])
def is_running(self):
return (self.get_skype() is not None)
def startup(self, minimized, nosplash):
# options are not supported as of Skype 1.4 Beta for Linux
if not self.is_running():
if os.fork() == 0: # we're the child
os.setsid()
os.execlp('skype')
def shutdown(self):
from signal import SIGINT
fh = os.popen('ps -o %p --no-heading -C skype')
pid = fh.readline().strip()
fh.close()
if pid:
os.kill(int(pid), SIGINT)
# Skype sometimes doesn't delete the '_SKYPE_INSTANCE' property
skype_inst = x11.XInternAtom(self.disp, '_SKYPE_INSTANCE', True)
if skype_inst:
x11.XDeleteProperty(self.disp, self.win_root, skype_inst)
self.win_skype = None
self.set_attachment_status(apiAttachNotAvailable)
def send_command(self, command, force=False):
if self.attachment_status != apiAttachSuccess and not force:
self.attach(command.Timeout)
self.push_command(command)
self.notifier.sending_command(command)
cmd = u'#%d %s' % (command.Id, command.Command)
self.logger.debug('sending %s', repr(cmd))
if command.Blocking:
command._event = bevent = threading.Event()
else:
command._timer = timer = threading.Timer(command.timeout2float(), self.pop_command, (command.Id,))
event = XEvent()
event.xclient.type = ClientMessage
event.xclient.display = self.disp
event.xclient.window = self.win_self
event.xclient.message_type = self.atom_msg_begin
event.xclient.format = 8
cmd = cmd.encode('utf-8') + '\x00'
for i in xrange(0, len(cmd), 20):
event.xclient.data = cmd[i:i + 20]
x11.XSendEvent(self.disp, self.win_skype, False, 0, byref(event))
event.xclient.message_type = self.atom_msg
self.loop_event.set()
if command.Blocking:
bevent.wait(command.timeout2float())
if not bevent.isSet():
raise SkypeAPIError('Skype command timeout')
else:
timer.start()
def notify(self, cmd):
self.logger.debug('received %s', repr(cmd))
# Called by main loop for all received Skype commands.
if cmd.startswith(u'#'):
p = cmd.find(u' ')
command = self.pop_command(int(cmd[1:p]))
if command is not None:
command.Reply = cmd[p + 1:]
if command.Blocking:
command._event.set()
else:
command._timer.cancel()
self.notifier.reply_received(command)
else:
self.notifier.notification_received(cmd[p + 1:])
else:
self.notifier.notification_received(cmd)
| jpablobr/emacs.d | vendor/misc/emacs-skype/build/Skype4Py/Skype4Py/api/posix_x11.py | Python | gpl-3.0 | 17,077 | 0.001581 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu),
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module contains classes and functions that are used by multiple modules
in this subpackage.
"""
import itertools
import logging
import warnings
from rmgpy.data.base import LogicNode
from rmgpy.reaction import Reaction
from rmgpy.molecule import Group, Molecule
from rmgpy.species import Species
from rmgpy.exceptions import DatabaseError, KineticsError
################################################################################
def saveEntry(f, entry):
"""
Save an `entry` in the kinetics database by writing a string to
the given file object `f`.
"""
from rmgpy.cantherm.output import prettify
def sortEfficiencies(efficiencies0):
efficiencies = {}
for mol, eff in efficiencies0.iteritems():
if isinstance(mol, str):
# already in SMILES string format
smiles = mol
else:
smiles = mol.toSMILES()
efficiencies[smiles] = eff
keys = efficiencies.keys()
keys.sort()
return [(key, efficiencies[key]) for key in keys]
f.write('entry(\n')
f.write(' index = {0:d},\n'.format(entry.index))
if entry.label != '':
f.write(' label = "{0}",\n'.format(entry.label))
#Entries for kinetic rules, libraries, training reactions
#and depositories will have an Reaction object for its item
if isinstance(entry.item, Reaction):
#Write out additional data if depository or library
#kinetic rules would have a Group object for its reactants instead of Species
if isinstance(entry.item.reactants[0], Species):
# Add degeneracy if the reaction is coming from a depository or kinetics library
f.write(' degeneracy = {0:.1f},\n'.format(entry.item.degeneracy))
if entry.item.duplicate:
f.write(' duplicate = {0!r},\n'.format(entry.item.duplicate))
if not entry.item.reversible:
f.write(' reversible = {0!r},\n'.format(entry.item.reversible))
#Entries for groups with have a group or logicNode for its item
elif isinstance(entry.item, Group):
f.write(' group = \n')
f.write('"""\n')
f.write(entry.item.toAdjacencyList())
f.write('""",\n')
elif isinstance(entry.item, LogicNode):
f.write(' group = "{0}",\n'.format(entry.item))
else:
raise DatabaseError("Encountered unexpected item of type {0} while saving database.".format(entry.item.__class__))
# Write kinetics
if isinstance(entry.data, str):
f.write(' kinetics = "{0}",\n'.format(entry.data))
elif entry.data is not None:
efficiencies = None
if hasattr(entry.data, 'efficiencies'):
efficiencies = entry.data.efficiencies
entry.data.efficiencies = dict(sortEfficiencies(entry.data.efficiencies))
kinetics = prettify(repr(entry.data))
kinetics = ' kinetics = {0},\n'.format(kinetics.replace('\n', '\n '))
f.write(kinetics)
if hasattr(entry.data, 'efficiencies'):
entry.data.efficiencies = efficiencies
else:
f.write(' kinetics = None,\n')
# Write reference
if entry.reference is not None:
reference = entry.reference.toPrettyRepr()
lines = reference.splitlines()
f.write(' reference = {0}\n'.format(lines[0]))
for line in lines[1:-1]:
f.write(' {0}\n'.format(line))
f.write(' ),\n'.format(lines[0]))
if entry.referenceType != "":
f.write(' referenceType = "{0}",\n'.format(entry.referenceType))
if entry.rank is not None:
f.write(' rank = {0},\n'.format(entry.rank))
if entry.shortDesc.strip() !='':
f.write(' shortDesc = u"""')
try:
f.write(entry.shortDesc.encode('utf-8'))
except:
f.write(entry.shortDesc.strip().encode('ascii', 'ignore')+ "\n")
f.write('""",\n')
if entry.longDesc.strip() !='':
f.write(' longDesc = \n')
f.write('u"""\n')
try:
f.write(entry.longDesc.strip().encode('utf-8') + "\n")
except:
f.write(entry.longDesc.strip().encode('ascii', 'ignore')+ "\n")
f.write('""",\n')
f.write(')\n\n')
def filter_reactions(reactants, products, reactionList):
"""
Remove any reactions from the given `reactionList` whose reactants do
not involve all the given `reactants` or whose products do not involve
all the given `products`. This method checks both forward and reverse
directions, and only filters out reactions that don't match either.
reactants and products can be either molecule or species objects
"""
warnings.warn("The filter_reactions method is no longer used and may be removed in a future version.", DeprecationWarning)
# Convert from molecules to species and generate resonance isomers.
reactants = ensure_species(reactants, resonance=True)
products = ensure_species(products, resonance=True)
reactions = reactionList[:]
for reaction in reactionList:
# Forward direction
reactants0 = [r for r in reaction.reactants]
for reactant in reactants:
for reactant0 in reactants0:
if reactant.isIsomorphic(reactant0):
reactants0.remove(reactant0)
break
products0 = [p for p in reaction.products]
for product in products:
for product0 in products0:
if product.isIsomorphic(product0):
products0.remove(product0)
break
forward = not (len(reactants0) != 0 or len(products0) != 0)
# Reverse direction
reactants0 = [r for r in reaction.products]
for reactant in reactants:
for reactant0 in reactants0:
if reactant.isIsomorphic(reactant0):
reactants0.remove(reactant0)
break
products0 = [p for p in reaction.reactants]
for product in products:
for product0 in products0:
if product.isIsomorphic(product0):
products0.remove(product0)
break
reverse = not (len(reactants0) != 0 or len(products0) != 0)
if not forward and not reverse:
reactions.remove(reaction)
return reactions
def ensure_species(input_list, resonance=False, keepIsomorphic=False):
"""
Given an input list of molecules or species, return a list with only
species objects.
"""
output_list = []
for item in input_list:
if isinstance(item, Molecule):
new_item = Species(molecule=[item])
elif isinstance(item, Species):
new_item = item
else:
raise TypeError('Only Molecule or Species objects can be handled.')
if resonance:
new_item.generate_resonance_structures(keepIsomorphic=keepIsomorphic)
output_list.append(new_item)
return output_list
def generate_molecule_combos(input_species):
"""
Generate combinations of molecules from the given species objects.
"""
if len(input_species) == 1:
combos = [(mol,) for mol in input_species[0].molecule]
elif len(input_species) == 2:
combos = itertools.product(input_species[0].molecule, input_species[1].molecule)
else:
raise ValueError('Reaction generation can be done for 1 or 2 species, not {0}.'.format(len(input_species)))
return combos
def ensure_independent_atom_ids(input_species, resonance=True):
"""
Given a list or tuple of :class:`Species` objects, ensure that atom ids are
independent across all of the species. Optionally, the `resonance` argument
can be set to False to not generate resonance structures.
Modifies the input species in place, nothing is returned.
"""
# Method to check that all species' atom ids are different
def independent_ids():
num_atoms = 0
IDs = []
for species in input_species:
num_atoms += len(species.molecule[0].atoms)
IDs.extend([atom.id for atom in species.molecule[0].atoms])
num_ID = len(set(IDs))
return num_ID == num_atoms
# If they are not all different, reassign ids and remake resonance structures
if not independent_ids():
logging.debug('identical atom ids found between species. regenerating')
for species in input_species:
mol = species.molecule[0]
mol.assignAtomIDs()
species.molecule = [mol]
# Remake resonance structures with new labels
if resonance:
species.generate_resonance_structures(keepIsomorphic=True)
elif resonance:
# IDs are already independent, generate resonance structures if needed
for species in input_species:
species.generate_resonance_structures(keepIsomorphic=True)
def find_degenerate_reactions(rxnList, same_reactants=None, kinetics_database=None, kinetics_family=None):
"""
given a list of Reaction object with Molecule objects, this method
removes degenerate reactions and increments the degeneracy of the
reaction object. For multiple transition states, this method adds
them as separate duplicate reactions. This method modifies
rxnList in place and does not return anything.
This algorithm used to exist in family.__generateReactions, but was moved
here because it didn't have any family dependence.
"""
# We want to sort all the reactions into sublists composed of isomorphic reactions
# with degenerate transition states
rxnSorted = []
for rxn0 in rxnList:
# find resonance structures for rxn0
ensure_species_in_reaction(rxn0)
if len(rxnSorted) == 0:
# This is the first reaction, so create a new sublist
rxnSorted.append([rxn0])
else:
# Loop through each sublist, which represents a unique reaction
for rxnList1 in rxnSorted:
# Try to determine if the current rxn0 is identical or isomorphic to any reactions in the sublist
isomorphic = False
identical = False
sameTemplate = False
for rxn in rxnList1:
isomorphic = rxn0.isIsomorphic(rxn, checkIdentical=False, checkTemplateRxnProducts=True)
if not isomorphic:
identical = False
else:
identical = rxn0.isIsomorphic(rxn, checkIdentical=True, checkTemplateRxnProducts=True)
sameTemplate = frozenset(rxn.template) == frozenset(rxn0.template)
if not isomorphic:
# a different product was found, go to next list
break
elif not sameTemplate:
# a different transition state was found, mark as duplicate and
# go to the next sublist
rxn.duplicate = True
rxn0.duplicate = True
break
elif identical:
# An exact copy of rxn0 is already in our list, so we can move on to the next rxn
break
else: # sameTemplate and isomorphic but not identical
# This is the right sublist for rxn0, but continue to see if there is an identical rxn
continue
else:
# We did not break, so this is the right sublist, but there is no identical reaction
# This means that we should add rxn0 to the sublist as a degenerate rxn
rxnList1.append(rxn0)
if isomorphic and sameTemplate:
# We already found the right sublist, so we can move on to the next rxn
break
else:
# We did not break, which means that there was no isomorphic sublist, so create a new one
rxnSorted.append([rxn0])
rxnList = []
for rxnList1 in rxnSorted:
# Collapse our sorted reaction list by taking one reaction from each sublist
rxn = rxnList1[0]
# The degeneracy of each reaction is the number of reactions that were in the sublist
rxn.degeneracy = sum([reaction0.degeneracy for reaction0 in rxnList1])
rxnList.append(rxn)
for rxn in rxnList:
if rxn.isForward:
reduce_same_reactant_degeneracy(rxn, same_reactants)
else:
# fix the degeneracy of (not ownReverse) reactions found in the backwards direction
try:
family = kinetics_family or kinetics_database.families[rxn.family]
except AttributeError:
from rmgpy.data.rmg import getDB
family = getDB('kinetics').families[rxn.family]
if not family.ownReverse:
rxn.degeneracy = family.calculateDegeneracy(rxn)
return rxnList
def ensure_species_in_reaction(reaction):
"""
Modifies a reaction holding Molecule objects to a reaction holding
Species objects. Generates resonance structures for reaction products.
"""
# if already species' objects, return none
if isinstance(reaction.reactants[0], Species):
return None
# obtain species with all resonance isomers
if reaction.isForward:
reaction.reactants = ensure_species(reaction.reactants, resonance=False)
reaction.products = ensure_species(reaction.products, resonance=True, keepIsomorphic=True)
else:
reaction.reactants = ensure_species(reaction.reactants, resonance=True, keepIsomorphic=True)
reaction.products = ensure_species(reaction.products, resonance=False)
# convert reaction.pairs object to species
new_pairs = []
for reactant, product in reaction.pairs:
new_pair = []
for reactant0 in reaction.reactants:
if reactant0.isIsomorphic(reactant):
new_pair.append(reactant0)
break
for product0 in reaction.products:
if product0.isIsomorphic(product):
new_pair.append(product0)
break
new_pairs.append(new_pair)
reaction.pairs = new_pairs
try:
ensure_species_in_reaction(reaction.reverse)
except AttributeError:
pass
def reduce_same_reactant_degeneracy(reaction, same_reactants=None):
"""
This method reduces the degeneracy of reactions with identical reactants,
since translational component of the transition states are already taken
into account (so swapping the same reactant is not valid)
This comes from work by Bishop and Laidler in 1965
"""
if len(reaction.reactants) == 2 and (
(reaction.isForward and same_reactants) or
reaction.reactants[0].isIsomorphic(reaction.reactants[1])
):
reaction.degeneracy *= 0.5
logging.debug('Degeneracy of reaction {} was decreased by 50% to {} since the reactants are identical'.format(reaction, reaction.degeneracy))
| Molecular-Image-Recognition/Molecular-Image-Recognition | code/rmgpy/data/kinetics/common.py | Python | mit | 16,818 | 0.003687 |
class Clock:
"""Clock that displays 24 hour clock that rollsover properly"""
def __init__(self, hour, minute):
self.hour = hour
self.minute = minute
self.cleanup()
def __repr__(self):
return f'Clock({self.hour}, {self.minute})'
def __str__(self):
return '{:02d}:{:02d}'.format(self.hour, self.minute)
def __eq__(self, other):
return repr(self) == repr(other)
def __add__(self, minutes):
self.minute += minutes
return self.cleanup()
def __sub__(self, minutes):
self.minute -= minutes
return self.cleanup()
def cleanup(self):
self.hour += self.minute // 60
self.hour %= 24
self.minute %= 60
return self
| exercism/python | exercises/practice/clock/.meta/example.py | Python | mit | 752 | 0 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Company.image_url'
db.delete_column(u'techpong_company', 'image_url')
# Adding field 'Company.banner_url'
db.add_column(u'techpong_company', 'banner_url',
self.gf('django.db.models.fields.URLField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'Company.logo_url'
db.add_column(u'techpong_company', 'logo_url',
self.gf('django.db.models.fields.URLField')(default='', max_length=255, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Company.image_url'
db.add_column(u'techpong_company', 'image_url',
self.gf('django.db.models.fields.URLField')(default=1, max_length=255),
keep_default=False)
# Deleting field 'Company.banner_url'
db.delete_column(u'techpong_company', 'banner_url')
# Deleting field 'Company.logo_url'
db.delete_column(u'techpong_company', 'logo_url')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'techpong.company': {
'Meta': {'object_name': 'Company'},
'banner_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'joined_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'techpong.match': {
'Meta': {'ordering': "['-played_time']", 'object_name': 'Match'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['techpong.Company']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loser': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'match_loser'", 'to': u"orm['techpong.Player']"}),
'loser_rating_after': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'loser_rating_before': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'match_quality': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'played_time': ('django.db.models.fields.DateTimeField', [], {}),
'winner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'match_winner'", 'to': u"orm['techpong.Player']"}),
'winner_rating_after': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'winner_rating_before': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'techpong.player': {
'Meta': {'object_name': 'Player'},
'cached_rating_changes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'cached_results': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['techpong.Company']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'rating': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'techpong.round': {
'Meta': {'object_name': 'Round'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['techpong.Match']"}),
'player1_score': ('django.db.models.fields.IntegerField', [], {}),
'player2_score': ('django.db.models.fields.IntegerField', [], {}),
'round_number': ('django.db.models.fields.PositiveSmallIntegerField', [], {})
},
u'techpong.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['techpong.Company']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['techpong'] | collingreen/startuppong | techpong/apps/techpong/migrations/0005_auto__del_field_company_image_url__add_field_company_banner_url__add_f.py | Python | mit | 8,058 | 0.007446 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'PosBox Homepage',
'version': '1.0',
'category': 'Point of Sale',
'sequence': 6,
'website': 'https://www.odoo.com/page/point-of-sale',
'summary': 'A homepage for the PosBox',
'description': """
PosBox Homepage
===============
This module overrides openerp web interface to display a simple
Homepage that explains what's the posbox and show the status,
and where to find documentation.
If you activate this module, you won't be able to access the
regular openerp interface anymore.
""",
'depends': ['hw_proxy'],
'installable': False,
'auto_install': False,
}
| vileopratama/vitech | src/addons/hw_posbox_homepage/__openerp__.py | Python | mit | 714 | 0.002801 |
"""
Database models for the LTI provider feature.
This app uses migrations. If you make changes to this model, be sure to create
an appropriate migration file and check it in at the same time as your model
changes. To do that,
1. Go to the edx-platform dir
2. ./manage.py lms schemamigration lti_provider --auto "description" --settings=devstack
"""
from django.contrib.auth.models import User
from django.db import models
import logging
from xmodule_django.models import CourseKeyField, UsageKeyField
from provider.utils import short_token, long_token
log = logging.getLogger("edx.lti_provider")
class LtiConsumer(models.Model):
"""
Database model representing an LTI consumer. This model stores the consumer
specific settings, such as the OAuth key/secret pair and any LTI fields
that must be persisted.
"""
consumer_name = models.CharField(max_length=255, unique=True)
consumer_key = models.CharField(max_length=32, unique=True, db_index=True, default=short_token)
consumer_secret = models.CharField(max_length=32, unique=True, default=short_token)
instance_guid = models.CharField(max_length=255, blank=True, null=True, unique=True)
@staticmethod
def get_or_supplement(instance_guid, consumer_key):
"""
The instance_guid is the best way to uniquely identify an LTI consumer.
However according to the LTI spec, the instance_guid field is optional
and so cannot be relied upon to be present.
This method first attempts to find an LtiConsumer by instance_guid.
Failing that, it tries to find a record with a matching consumer_key.
This can be the case if the LtiConsumer record was created as the result
of an LTI launch with no instance_guid.
If the instance_guid is now present, the LtiConsumer model will be
supplemented with the instance_guid, to more concretely identify the
consumer.
In practice, nearly all major LTI consumers provide an instance_guid, so
the fallback mechanism of matching by consumer key should be rarely
required.
"""
consumer = None
if instance_guid:
try:
consumer = LtiConsumer.objects.get(instance_guid=instance_guid)
except LtiConsumer.DoesNotExist:
# The consumer may not exist, or its record may not have a guid
pass
# Search by consumer key instead of instance_guid. If there is no
# consumer with a matching key, the LTI launch does not have permission
# to access the content.
if not consumer:
consumer = LtiConsumer.objects.get(
consumer_key=consumer_key,
)
# Add the instance_guid field to the model if it's not there already.
if instance_guid and not consumer.instance_guid:
consumer.instance_guid = instance_guid
consumer.save()
return consumer
class OutcomeService(models.Model):
"""
Model for a single outcome service associated with an LTI consumer. Note
that a given consumer may have more than one outcome service URL over its
lifetime, so we need to store the outcome service separately from the
LtiConsumer model.
An outcome service can be identified in two ways, depending on the
information provided by an LTI launch. The ideal way to identify the service
is by instance_guid, which should uniquely identify a consumer. However that
field is optional in the LTI launch, and so if it is missing we can fall
back on the consumer key (which should be created uniquely for each consumer
although we don't have a technical way to guarantee that).
Some LTI-specified fields use the prefix lis_; this refers to the IMS
Learning Information Services standard from which LTI inherits some
properties
"""
lis_outcome_service_url = models.CharField(max_length=255, unique=True)
lti_consumer = models.ForeignKey(LtiConsumer)
class GradedAssignment(models.Model):
"""
Model representing a single launch of a graded assignment by an individual
user. There will be a row created here only if the LTI consumer may require
a result to be returned from the LTI launch (determined by the presence of
the lis_result_sourcedid parameter in the launch POST). There will be only
one row created for a given usage/consumer combination; repeated launches of
the same content by the same user from the same LTI consumer will not add
new rows to the table.
Some LTI-specified fields use the prefix lis_; this refers to the IMS
Learning Information Services standard from which LTI inherits some
properties
"""
user = models.ForeignKey(User, db_index=True)
course_key = CourseKeyField(max_length=255, db_index=True)
usage_key = UsageKeyField(max_length=255, db_index=True)
outcome_service = models.ForeignKey(OutcomeService)
lis_result_sourcedid = models.CharField(max_length=255, db_index=True)
version_number = models.IntegerField(default=0)
class Meta(object):
unique_together = ('outcome_service', 'lis_result_sourcedid')
class LtiUser(models.Model):
"""
Model mapping the identity of an LTI user to an account on the edX platform.
The LTI user_id field is guaranteed to be unique per LTI consumer (per
to the LTI spec), so we guarantee a unique mapping from LTI to edX account
by using the lti_consumer/lti_user_id tuple.
"""
lti_consumer = models.ForeignKey(LtiConsumer)
lti_user_id = models.CharField(max_length=255)
edx_user = models.ForeignKey(User, unique=True)
class Meta(object):
unique_together = ('lti_consumer', 'lti_user_id')
| JCBarahona/edX | lms/djangoapps/lti_provider/models.py | Python | agpl-3.0 | 5,760 | 0.00191 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.gaming.v1",
manifest={
"OperationMetadata",
"OperationStatus",
"LabelSelector",
"RealmSelector",
"Schedule",
"SpecSource",
"TargetDetails",
"TargetState",
"DeployedFleetDetails",
},
)
class OperationMetadata(proto.Message):
r"""Represents the metadata of the long-running operation.
Attributes:
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation was
created.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation finished
running.
target (str):
Output only. Server-defined resource path for
the target of the operation.
verb (str):
Output only. Name of the verb executed by the
operation.
status_message (str):
Output only. Human-readable status of the
operation, if any.
requested_cancellation (bool):
Output only. Identifies whether the user has requested
cancellation of the operation. Operations that have
successfully been cancelled have [Operation.error][] value
with a [google.rpc.Status.code][google.rpc.Status.code] of
1, corresponding to ``Code.CANCELLED``.
api_version (str):
Output only. API version used to start the
operation.
unreachable (Sequence[str]):
Output only. List of Locations that could not
be reached.
operation_status (Sequence[google.cloud.gaming_v1.types.OperationMetadata.OperationStatusEntry]):
Output only. Operation status for Game
Services API operations. Operation status is in
the form of key-value pairs where keys are
resource IDs and the values show the status of
the operation. In case of failures, the value
includes an error code and error message.
"""
create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
target = proto.Field(proto.STRING, number=3,)
verb = proto.Field(proto.STRING, number=4,)
status_message = proto.Field(proto.STRING, number=5,)
requested_cancellation = proto.Field(proto.BOOL, number=6,)
api_version = proto.Field(proto.STRING, number=7,)
unreachable = proto.RepeatedField(proto.STRING, number=8,)
operation_status = proto.MapField(
proto.STRING, proto.MESSAGE, number=9, message="OperationStatus",
)
class OperationStatus(proto.Message):
r"""
Attributes:
done (bool):
Output only. Whether the operation is done or
still in progress.
error_code (google.cloud.gaming_v1.types.OperationStatus.ErrorCode):
The error code in case of failures.
error_message (str):
The human-readable error message.
"""
class ErrorCode(proto.Enum):
r""""""
ERROR_CODE_UNSPECIFIED = 0
INTERNAL_ERROR = 1
PERMISSION_DENIED = 2
CLUSTER_CONNECTION = 3
done = proto.Field(proto.BOOL, number=1,)
error_code = proto.Field(proto.ENUM, number=2, enum=ErrorCode,)
error_message = proto.Field(proto.STRING, number=3,)
class LabelSelector(proto.Message):
r"""The label selector, used to group labels on the resources.
Attributes:
labels (Sequence[google.cloud.gaming_v1.types.LabelSelector.LabelsEntry]):
Resource labels for this selector.
"""
labels = proto.MapField(proto.STRING, proto.STRING, number=1,)
class RealmSelector(proto.Message):
r"""The realm selector, used to match realm resources.
Attributes:
realms (Sequence[str]):
List of realms to match.
"""
realms = proto.RepeatedField(proto.STRING, number=1,)
class Schedule(proto.Message):
r"""The schedule of a recurring or one time event. The event's time span
is specified by start_time and end_time. If the scheduled event's
timespan is larger than the cron_spec + cron_job_duration, the event
will be recurring. If only cron_spec + cron_job_duration are
specified, the event is effective starting at the local time
specified by cron_spec, and is recurring.
::
start_time|-------[cron job]-------[cron job]-------[cron job]---|end_time
cron job: cron spec start time + duration
Attributes:
start_time (google.protobuf.timestamp_pb2.Timestamp):
The start time of the event.
end_time (google.protobuf.timestamp_pb2.Timestamp):
The end time of the event.
cron_job_duration (google.protobuf.duration_pb2.Duration):
The duration for the cron job event. The
duration of the event is effective after the
cron job's start time.
cron_spec (str):
The cron definition of the scheduled event.
See https://en.wikipedia.org/wiki/Cron. Cron
spec specifies the local time as defined by the
realm.
"""
start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
cron_job_duration = proto.Field(
proto.MESSAGE, number=3, message=duration_pb2.Duration,
)
cron_spec = proto.Field(proto.STRING, number=4,)
class SpecSource(proto.Message):
r"""Encapsulates Agones fleet spec and Agones autoscaler spec
sources.
Attributes:
game_server_config_name (str):
The game server config resource. Uses the form:
``projects/{project}/locations/{location}/gameServerDeployments/{deployment_id}/configs/{config_id}``.
name (str):
The name of the Agones leet config or Agones
scaling config used to derive the Agones fleet
or Agones autoscaler spec.
"""
game_server_config_name = proto.Field(proto.STRING, number=1,)
name = proto.Field(proto.STRING, number=2,)
class TargetDetails(proto.Message):
r"""Details about the Agones resources.
Attributes:
game_server_cluster_name (str):
The game server cluster name. Uses the form:
``projects/{project}/locations/{location}/realms/{realm}/gameServerClusters/{cluster}``.
game_server_deployment_name (str):
The game server deployment name. Uses the form:
``projects/{project}/locations/{location}/gameServerDeployments/{deployment_id}``.
fleet_details (Sequence[google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails]):
Agones fleet details for game server clusters
and game server deployments.
"""
class TargetFleetDetails(proto.Message):
r"""Details of the target Agones fleet.
Attributes:
fleet (google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails.TargetFleet):
Reference to target Agones fleet.
autoscaler (google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails.TargetFleetAutoscaler):
Reference to target Agones fleet autoscaling
policy.
"""
class TargetFleet(proto.Message):
r"""Target Agones fleet specification.
Attributes:
name (str):
The name of the Agones fleet.
spec_source (google.cloud.gaming_v1.types.SpecSource):
Encapsulates the source of the Agones fleet
spec. The Agones fleet spec source.
"""
name = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=2, message="SpecSource",)
class TargetFleetAutoscaler(proto.Message):
r"""Target Agones autoscaler policy reference.
Attributes:
name (str):
The name of the Agones autoscaler.
spec_source (google.cloud.gaming_v1.types.SpecSource):
Encapsulates the source of the Agones fleet
spec. Details about the Agones autoscaler spec.
"""
name = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=2, message="SpecSource",)
fleet = proto.Field(
proto.MESSAGE,
number=1,
message="TargetDetails.TargetFleetDetails.TargetFleet",
)
autoscaler = proto.Field(
proto.MESSAGE,
number=2,
message="TargetDetails.TargetFleetDetails.TargetFleetAutoscaler",
)
game_server_cluster_name = proto.Field(proto.STRING, number=1,)
game_server_deployment_name = proto.Field(proto.STRING, number=2,)
fleet_details = proto.RepeatedField(
proto.MESSAGE, number=3, message=TargetFleetDetails,
)
class TargetState(proto.Message):
r"""Encapsulates the Target state.
Attributes:
details (Sequence[google.cloud.gaming_v1.types.TargetDetails]):
Details about Agones fleets.
"""
details = proto.RepeatedField(proto.MESSAGE, number=1, message="TargetDetails",)
class DeployedFleetDetails(proto.Message):
r"""Details of the deployed Agones fleet.
Attributes:
deployed_fleet (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleet):
Information about the Agones fleet.
deployed_autoscaler (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleetAutoscaler):
Information about the Agones autoscaler for
that fleet.
"""
class DeployedFleet(proto.Message):
r"""Agones fleet specification and details.
Attributes:
fleet (str):
The name of the Agones fleet.
fleet_spec (str):
The fleet spec retrieved from the Agones
fleet.
spec_source (google.cloud.gaming_v1.types.SpecSource):
The source spec that is used to create the
Agones fleet. The GameServerConfig resource may
no longer exist in the system.
status (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleet.DeployedFleetStatus):
The current status of the Agones fleet.
Includes count of game servers in various
states.
"""
class DeployedFleetStatus(proto.Message):
r"""DeployedFleetStatus has details about the Agones fleets such
as how many are running, how many allocated, and so on.
Attributes:
ready_replicas (int):
The number of GameServer replicas in the
READY state in this fleet.
allocated_replicas (int):
The number of GameServer replicas in the
ALLOCATED state in this fleet.
reserved_replicas (int):
The number of GameServer replicas in the
RESERVED state in this fleet. Reserved instances
won't be deleted on scale down, but won't cause
an autoscaler to scale up.
replicas (int):
The total number of current GameServer
replicas in this fleet.
"""
ready_replicas = proto.Field(proto.INT64, number=1,)
allocated_replicas = proto.Field(proto.INT64, number=2,)
reserved_replicas = proto.Field(proto.INT64, number=3,)
replicas = proto.Field(proto.INT64, number=4,)
fleet = proto.Field(proto.STRING, number=1,)
fleet_spec = proto.Field(proto.STRING, number=2,)
spec_source = proto.Field(proto.MESSAGE, number=3, message="SpecSource",)
status = proto.Field(
proto.MESSAGE,
number=5,
message="DeployedFleetDetails.DeployedFleet.DeployedFleetStatus",
)
class DeployedFleetAutoscaler(proto.Message):
r"""Details about the Agones autoscaler.
Attributes:
autoscaler (str):
The name of the Agones autoscaler.
spec_source (google.cloud.gaming_v1.types.SpecSource):
The source spec that is used to create the
autoscaler. The GameServerConfig resource may no
longer exist in the system.
fleet_autoscaler_spec (str):
The autoscaler spec retrieved from Agones.
"""
autoscaler = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=4, message="SpecSource",)
fleet_autoscaler_spec = proto.Field(proto.STRING, number=3,)
deployed_fleet = proto.Field(proto.MESSAGE, number=1, message=DeployedFleet,)
deployed_autoscaler = proto.Field(
proto.MESSAGE, number=2, message=DeployedFleetAutoscaler,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| googleapis/python-game-servers | google/cloud/gaming_v1/types/common.py | Python | apache-2.0 | 13,962 | 0.001361 |
from flask import render_template
from app import app
@app.route('/')
@app.route('/mainpage')
def slpage():
return render_template('mainpage.html')
@app.errorhandler(404)
def page_not_found(error):
return render_template('page_not_found.html'), 404
| jvahala/brew-thing | app/views.py | Python | apache-2.0 | 274 | 0 |
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.db.models import AutoField
from django.forms import ValidationError, ModelForm
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User as AuthUser
from mezzanine.conf import settings
from mezzanine.core.forms import DynamicInlineAdminForm
from mezzanine.core.models import (Orderable, SitePermission,
CONTENT_STATUS_PUBLISHED)
from mezzanine.utils.urls import admin_url
from mezzanine.utils.models import get_user_model
if settings.USE_MODELTRANSLATION:
from django.utils.datastructures import SortedDict
from django.utils.translation import activate, get_language
from modeltranslation.admin import (TranslationAdmin,
TranslationInlineModelAdmin)
class BaseTranslationModelAdmin(TranslationAdmin):
"""
Mimic modeltranslation's TabbedTranslationAdmin but uses a
custom tabbed_translation_fields.js
"""
class Media:
js = (
"modeltranslation/js/force_jquery.js",
"mezzanine/js/%s" % settings.JQUERY_UI_FILENAME,
"mezzanine/js/admin/tabbed_translation_fields.js",
)
css = {
"all": ("mezzanine/css/admin/tabbed_translation_fields.css",),
}
else:
class BaseTranslationModelAdmin(admin.ModelAdmin):
"""
Abstract class used to handle the switch between translation
and no-translation class logic. We define the basic structure
for the Media class so we can extend it consistently regardless
of whether or not modeltranslation is used.
"""
class Media:
css = {"all": ()}
def getInlineBaseClass(cls):
if settings.USE_MODELTRANSLATION:
class InlineBase(TranslationInlineModelAdmin, cls):
"""
Abstract class that mimics django-modeltranslation's
Translation{Tabular,Stacked}Inline. Used as a placeholder
for future improvement.
"""
pass
return InlineBase
return cls
User = get_user_model()
class DisplayableAdminForm(ModelForm):
def clean_content(form):
status = form.cleaned_data.get("status")
content = form.cleaned_data.get("content")
if status == CONTENT_STATUS_PUBLISHED and not content:
raise ValidationError(_("This field is required if status "
"is set to published."))
return content
class DisplayableAdmin(BaseTranslationModelAdmin):
"""
Admin class for subclasses of the abstract ``Displayable`` model.
"""
list_display = ("title", "status", "admin_link")
list_display_links = ("title",)
list_editable = ("status",)
list_filter = ("status", "keywords__keyword")
date_hierarchy = "publish_date"
radio_fields = {"status": admin.HORIZONTAL}
fieldsets = (
(None, {
"fields": ["title", "status", ("publish_date", "expiry_date")],
}),
(_("Meta data"), {
"fields": ["_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"],
"classes": ("collapse-closed",)
}),
)
form = DisplayableAdminForm
def __init__(self, *args, **kwargs):
super(DisplayableAdmin, self).__init__(*args, **kwargs)
try:
self.search_fields = list(set(list(self.search_fields) + list(
self.model.objects.get_search_fields().keys())))
except AttributeError:
pass
class BaseDynamicInlineAdmin(object):
"""
Admin inline that uses JS to inject an "Add another" link which
when clicked, dynamically reveals another fieldset. Also handles
adding the ``_order`` field and its widget for models that
subclass ``Orderable``.
"""
form = DynamicInlineAdminForm
extra = 20
def __init__(self, *args, **kwargs):
super(BaseDynamicInlineAdmin, self).__init__(*args, **kwargs)
if issubclass(self.model, Orderable):
fields = self.fields
if not fields:
fields = self.model._meta.fields
exclude = self.exclude or []
fields = [f.name for f in fields if f.editable and
f.name not in exclude and not isinstance(f, AutoField)]
if "_order" in fields:
del fields[fields.index("_order")]
fields.append("_order")
self.fields = fields
class TabularDynamicInlineAdmin(BaseDynamicInlineAdmin, getInlineBaseClass(admin.TabularInline)):
template = "admin/includes/dynamic_inline_tabular.html"
class StackedDynamicInlineAdmin(BaseDynamicInlineAdmin, getInlineBaseClass(admin.StackedInline)):
template = "admin/includes/dynamic_inline_stacked.html"
def __init__(self, *args, **kwargs):
"""
Stacked dynamic inlines won't work without grappelli
installed, as the JavaScript in dynamic_inline.js isn't
able to target each of the inlines to set the value of
the order field.
"""
grappelli_name = getattr(settings, "PACKAGE_NAME_GRAPPELLI")
if grappelli_name not in settings.INSTALLED_APPS:
error = "StackedDynamicInlineAdmin requires Grappelli installed."
raise Exception(error)
super(StackedDynamicInlineAdmin, self).__init__(*args, **kwargs)
class OwnableAdmin(admin.ModelAdmin):
"""
Admin class for models that subclass the abstract ``Ownable``
model. Handles limiting the change list to objects owned by the
logged in user, as well as setting the owner of newly created
objects to the logged in user.
Remember that this will include the ``user`` field in the required
fields for the admin change form which may not be desirable. The
best approach to solve this is to define a ``fieldsets`` attribute
that excludes the ``user`` field or simple add ``user`` to your
admin excludes: ``exclude = ('user',)``
"""
def save_form(self, request, form, change):
"""
Set the object's owner as the logged in user.
"""
obj = form.save(commit=False)
if obj.user_id is None:
obj.user = request.user
return super(OwnableAdmin, self).save_form(request, form, change)
def queryset(self, request):
"""
Filter the change list by currently logged in user if not a
superuser. We also skip filtering if the model for this admin
class has been added to the sequence in the setting
``OWNABLE_MODELS_ALL_EDITABLE``, which contains models in the
format ``app_label.object_name``, and allows models subclassing
``Ownable`` to be excluded from filtering, eg: ownership should
not imply permission to edit.
"""
opts = self.model._meta
model_name = ("%s.%s" % (opts.app_label, opts.object_name)).lower()
models_all_editable = settings.OWNABLE_MODELS_ALL_EDITABLE
models_all_editable = [m.lower() for m in models_all_editable]
qs = super(OwnableAdmin, self).queryset(request)
if request.user.is_superuser or model_name in models_all_editable:
return qs
return qs.filter(user__id=request.user.id)
class SingletonAdmin(admin.ModelAdmin):
"""
Admin class for models that should only contain a single instance
in the database. Redirect all views to the change view when the
instance exists, and to the add view when it doesn't.
"""
def handle_save(self, request, response):
"""
Handles redirect back to the dashboard when save is clicked
(eg not save and continue editing), by checking for a redirect
response, which only occurs if the form is valid.
"""
form_valid = isinstance(response, HttpResponseRedirect)
if request.POST.get("_save") and form_valid:
return redirect("admin:index")
return response
def add_view(self, *args, **kwargs):
"""
Redirect to the change view if the singleton instance exists.
"""
try:
singleton = self.model.objects.get()
except (self.model.DoesNotExist, self.model.MultipleObjectsReturned):
kwargs.setdefault("extra_context", {})
kwargs["extra_context"]["singleton"] = True
response = super(SingletonAdmin, self).add_view(*args, **kwargs)
return self.handle_save(args[0], response)
return redirect(admin_url(self.model, "change", singleton.id))
def changelist_view(self, *args, **kwargs):
"""
Redirect to the add view if no records exist or the change
view if the singleton instance exists.
"""
try:
singleton = self.model.objects.get()
except self.model.MultipleObjectsReturned:
return super(SingletonAdmin, self).changelist_view(*args, **kwargs)
except self.model.DoesNotExist:
return redirect(admin_url(self.model, "add"))
return redirect(admin_url(self.model, "change", singleton.id))
def change_view(self, *args, **kwargs):
"""
If only the singleton instance exists, pass ``True`` for
``singleton`` into the template which will use CSS to hide
the "save and add another" button.
"""
kwargs.setdefault("extra_context", {})
kwargs["extra_context"]["singleton"] = self.model.objects.count() == 1
response = super(SingletonAdmin, self).change_view(*args, **kwargs)
return self.handle_save(args[0], response)
###########################################
# Site Permissions Inlines for User Admin #
###########################################
class SitePermissionInline(admin.TabularInline):
model = SitePermission
max_num = 1
can_delete = False
class SitePermissionUserAdmin(UserAdmin):
inlines = [SitePermissionInline]
# only register if User hasn't been overridden
if User == AuthUser:
admin.site.unregister(User)
admin.site.register(User, SitePermissionUserAdmin)
| dekomote/mezzanine-modeltranslation-backport | mezzanine/core/admin.py | Python | bsd-2-clause | 10,434 | 0.000479 |
class Solution(object):
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
length = len(nums)-1
if length<=0:
return 0
pos = 1
cnt = 1
last = nums[0]
while length > 0:
if nums[pos] == last:
if cnt >= 2:
del nums[pos]
else:
pos += 1
cnt += 1
else:
last = nums[pos]
pos += 1
cnt = 1
length -= 1
return len(nums)
print Solution().removeDuplicates([1,1,1,1,1,2,3,3,4,4,5]) | xingjian-f/Leetcode-solution | 80. Remove Duplicates from Sorted Array II.py | Python | mit | 605 | 0.07438 |
from main import *
def palindrome_chain_length(n):
count = 0
while True:
reversed_n = reverse_order(n)
if is_palindrome(n, reversed_n):
return count
n += reversed_n
count += 1
def reverse_order(n):
return int("".join([i for i in list(str(n)[::-1])]))
def is_palindrome(n, reversed_n):
return True if n == reversed_n else False
test.assert_equals(reverse_order(87), 78)
test.assert_equals(is_palindrome(87, reverse_order(87)), False)
test.assert_equals(is_palindrome(5, reverse_order(5)), True)
test.assert_equals(palindrome_chain_length(87), 4) | bionikspoon/Codewars-Challenges | python/5kyu/palindrome_chain_length/solution.py | Python | mit | 612 | 0.001634 |
# Copyright (C) 2011-2016 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This file is part of the GDB testsuite. It tests python Finish
# Breakpoints.
class ExceptionFinishBreakpoint(gdb.FinishBreakpoint):
def __init__(self, frame):
gdb.FinishBreakpoint.__init__ (self, frame, internal=1)
self.silent = True
print ("init ExceptionFinishBreakpoint")
def stop(self):
print ("stopped at ExceptionFinishBreakpoint")
return True
def out_of_scope(self):
print ("exception did not finish ...")
print ("Python script imported")
| freak97/binutils | gdb/testsuite/gdb.python/py-finish-breakpoint2.py | Python | gpl-2.0 | 1,221 | 0.006552 |
import json
import argparse
import logging
import glob
# Logging Information
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(levelname)s: %(message)s')
fh = logging.FileHandler('test_hashes.log')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
parser = argparse.ArgumentParser()
parser.add_argument("hash_num", help="file that we want to verify")
args = parser.parse_args()
hashes = set()
hash_num = args.hash_num
logger.info("Verifying consistency for VirusShare_00" + str(hash_num).zfill(3))
logger.debug("Generating hashes from ../hashes/VirusShare_00" + str(hash_num).zfill(3) + ".md5")
with open(("../hashes/VirusShare_00" + str(hash_num).zfill(3) + ".md5"),'r') as file:
for line in file.readlines()[6:]:
hashes.add(line.strip())
for filename in glob.glob("../analyses/VirusShare_00" + str(hash_num).zfill(3) + ".*"):
logger.debug("Removing hashes from " + filename)
with open(filename,'r') as file:
for line in file.readlines():
hashes.remove(json.loads(line.strip())["md5"])
if len(hashes) == 0:
logger.info("VirusShare_00" + str(hash_num).zfill(3) + ".ldjson is consistent with hashfile")
else:
logger.error("VirusShare_00" + str(hash_num).zfill(3) + ".ldjson is inconsistent with hashfile")
| seymour1/label-virusshare | test/test_hashes.py | Python | bsd-3-clause | 1,444 | 0.004848 |
# streamclone.py - producing and consuming streaming repository data
#
# Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import struct
import time
from .i18n import _
from . import (
branchmap,
error,
store,
util,
)
def canperformstreamclone(pullop, bailifbundle2supported=False):
"""Whether it is possible to perform a streaming clone as part of pull.
``bailifbundle2supported`` will cause the function to return False if
bundle2 stream clones are supported. It should only be called by the
legacy stream clone code path.
Returns a tuple of (supported, requirements). ``supported`` is True if
streaming clone is supported and False otherwise. ``requirements`` is
a set of repo requirements from the remote, or ``None`` if stream clone
isn't supported.
"""
repo = pullop.repo
remote = pullop.remote
bundle2supported = False
if pullop.canusebundle2:
if 'v1' in pullop.remotebundle2caps.get('stream', []):
bundle2supported = True
# else
# Server doesn't support bundle2 stream clone or doesn't support
# the versions we support. Fall back and possibly allow legacy.
# Ensures legacy code path uses available bundle2.
if bailifbundle2supported and bundle2supported:
return False, None
# Ensures bundle2 doesn't try to do a stream clone if it isn't supported.
#elif not bailifbundle2supported and not bundle2supported:
# return False, None
# Streaming clone only works on empty repositories.
if len(repo):
return False, None
# Streaming clone only works if all data is being requested.
if pullop.heads:
return False, None
streamrequested = pullop.streamclonerequested
# If we don't have a preference, let the server decide for us. This
# likely only comes into play in LANs.
if streamrequested is None:
# The server can advertise whether to prefer streaming clone.
streamrequested = remote.capable('stream-preferred')
if not streamrequested:
return False, None
# In order for stream clone to work, the client has to support all the
# requirements advertised by the server.
#
# The server advertises its requirements via the "stream" and "streamreqs"
# capability. "stream" (a value-less capability) is advertised if and only
# if the only requirement is "revlogv1." Else, the "streamreqs" capability
# is advertised and contains a comma-delimited list of requirements.
requirements = set()
if remote.capable('stream'):
requirements.add('revlogv1')
else:
streamreqs = remote.capable('streamreqs')
# This is weird and shouldn't happen with modern servers.
if not streamreqs:
return False, None
streamreqs = set(streamreqs.split(','))
# Server requires something we don't support. Bail.
if streamreqs - repo.supportedformats:
return False, None
requirements = streamreqs
return True, requirements
def maybeperformlegacystreamclone(pullop):
"""Possibly perform a legacy stream clone operation.
Legacy stream clones are performed as part of pull but before all other
operations.
A legacy stream clone will not be performed if a bundle2 stream clone is
supported.
"""
supported, requirements = canperformstreamclone(pullop)
if not supported:
return
repo = pullop.repo
remote = pullop.remote
# Save remote branchmap. We will use it later to speed up branchcache
# creation.
rbranchmap = None
if remote.capable('branchmap'):
rbranchmap = remote.branchmap()
repo.ui.status(_('streaming all changes\n'))
fp = remote.stream_out()
l = fp.readline()
try:
resp = int(l)
except ValueError:
raise error.ResponseError(
_('unexpected response from remote server:'), l)
if resp == 1:
raise error.Abort(_('operation forbidden by server'))
elif resp == 2:
raise error.Abort(_('locking the remote repository failed'))
elif resp != 0:
raise error.Abort(_('the server sent an unknown error code'))
l = fp.readline()
try:
filecount, bytecount = map(int, l.split(' ', 1))
except (ValueError, TypeError):
raise error.ResponseError(
_('unexpected response from remote server:'), l)
with repo.lock():
consumev1(repo, fp, filecount, bytecount)
# new requirements = old non-format requirements +
# new format-related remote requirements
# requirements from the streamed-in repository
repo.requirements = requirements | (
repo.requirements - repo.supportedformats)
repo._applyopenerreqs()
repo._writerequirements()
if rbranchmap:
branchmap.replacecache(repo, rbranchmap)
repo.invalidate()
def allowservergeneration(ui):
"""Whether streaming clones are allowed from the server."""
return ui.configbool('server', 'uncompressed', True, untrusted=True)
# This is it's own function so extensions can override it.
def _walkstreamfiles(repo):
return repo.store.walk()
def generatev1(repo):
"""Emit content for version 1 of a streaming clone.
This returns a 3-tuple of (file count, byte size, data iterator).
The data iterator consists of N entries for each file being transferred.
Each file entry starts as a line with the file name and integer size
delimited by a null byte.
The raw file data follows. Following the raw file data is the next file
entry, or EOF.
When used on the wire protocol, an additional line indicating protocol
success will be prepended to the stream. This function is not responsible
for adding it.
This function will obtain a repository lock to ensure a consistent view of
the store is captured. It therefore may raise LockError.
"""
entries = []
total_bytes = 0
# Get consistent snapshot of repo, lock during scan.
with repo.lock():
repo.ui.debug('scanning\n')
for name, ename, size in _walkstreamfiles(repo):
if size:
entries.append((name, size))
total_bytes += size
repo.ui.debug('%d files, %d bytes to transfer\n' %
(len(entries), total_bytes))
svfs = repo.svfs
oldaudit = svfs.mustaudit
debugflag = repo.ui.debugflag
svfs.mustaudit = False
def emitrevlogdata():
try:
for name, size in entries:
if debugflag:
repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
# partially encode name over the wire for backwards compat
yield '%s\0%d\n' % (store.encodedir(name), size)
if size <= 65536:
with svfs(name, 'rb') as fp:
yield fp.read(size)
else:
for chunk in util.filechunkiter(svfs(name), limit=size):
yield chunk
finally:
svfs.mustaudit = oldaudit
return len(entries), total_bytes, emitrevlogdata()
def generatev1wireproto(repo):
"""Emit content for version 1 of streaming clone suitable for the wire.
This is the data output from ``generatev1()`` with a header line
indicating file count and byte size.
"""
filecount, bytecount, it = generatev1(repo)
yield '%d %d\n' % (filecount, bytecount)
for chunk in it:
yield chunk
def generatebundlev1(repo, compression='UN'):
"""Emit content for version 1 of a stream clone bundle.
The first 4 bytes of the output ("HGS1") denote this as stream clone
bundle version 1.
The next 2 bytes indicate the compression type. Only "UN" is currently
supported.
The next 16 bytes are two 64-bit big endian unsigned integers indicating
file count and byte count, respectively.
The next 2 bytes is a 16-bit big endian unsigned short declaring the length
of the requirements string, including a trailing \0. The following N bytes
are the requirements string, which is ASCII containing a comma-delimited
list of repo requirements that are needed to support the data.
The remaining content is the output of ``generatev1()`` (which may be
compressed in the future).
Returns a tuple of (requirements, data generator).
"""
if compression != 'UN':
raise ValueError('we do not support the compression argument yet')
requirements = repo.requirements & repo.supportedformats
requires = ','.join(sorted(requirements))
def gen():
yield 'HGS1'
yield compression
filecount, bytecount, it = generatev1(repo)
repo.ui.status(_('writing %d bytes for %d files\n') %
(bytecount, filecount))
yield struct.pack('>QQ', filecount, bytecount)
yield struct.pack('>H', len(requires) + 1)
yield requires + '\0'
# This is where we'll add compression in the future.
assert compression == 'UN'
seen = 0
repo.ui.progress(_('bundle'), 0, total=bytecount, unit=_('bytes'))
for chunk in it:
seen += len(chunk)
repo.ui.progress(_('bundle'), seen, total=bytecount,
unit=_('bytes'))
yield chunk
repo.ui.progress(_('bundle'), None)
return requirements, gen()
def consumev1(repo, fp, filecount, bytecount):
"""Apply the contents from version 1 of a streaming clone file handle.
This takes the output from "streamout" and applies it to the specified
repository.
Like "streamout," the status line added by the wire protocol is not handled
by this function.
"""
with repo.lock():
repo.ui.status(_('%d files to transfer, %s of data\n') %
(filecount, util.bytecount(bytecount)))
handled_bytes = 0
repo.ui.progress(_('clone'), 0, total=bytecount, unit=_('bytes'))
start = time.time()
with repo.transaction('clone'):
with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount):
for i in xrange(filecount):
# XXX doesn't support '\n' or '\r' in filenames
l = fp.readline()
try:
name, size = l.split('\0', 1)
size = int(size)
except (ValueError, TypeError):
raise error.ResponseError(
_('unexpected response from remote server:'), l)
if repo.ui.debugflag:
repo.ui.debug('adding %s (%s)\n' %
(name, util.bytecount(size)))
# for backwards compat, name was partially encoded
path = store.decodedir(name)
with repo.svfs(path, 'w', backgroundclose=True) as ofp:
for chunk in util.filechunkiter(fp, limit=size):
handled_bytes += len(chunk)
repo.ui.progress(_('clone'), handled_bytes,
total=bytecount, unit=_('bytes'))
ofp.write(chunk)
# Writing straight to files circumvented the inmemory caches
repo.invalidate()
elapsed = time.time() - start
if elapsed <= 0:
elapsed = 0.001
repo.ui.progress(_('clone'), None)
repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
(util.bytecount(bytecount), elapsed,
util.bytecount(bytecount / elapsed)))
def readbundle1header(fp):
compression = fp.read(2)
if compression != 'UN':
raise error.Abort(_('only uncompressed stream clone bundles are '
'supported; got %s') % compression)
filecount, bytecount = struct.unpack('>QQ', fp.read(16))
requireslen = struct.unpack('>H', fp.read(2))[0]
requires = fp.read(requireslen)
if not requires.endswith('\0'):
raise error.Abort(_('malformed stream clone bundle: '
'requirements not properly encoded'))
requirements = set(requires.rstrip('\0').split(','))
return filecount, bytecount, requirements
def applybundlev1(repo, fp):
"""Apply the content from a stream clone bundle version 1.
We assume the 4 byte header has been read and validated and the file handle
is at the 2 byte compression identifier.
"""
if len(repo):
raise error.Abort(_('cannot apply stream clone bundle on non-empty '
'repo'))
filecount, bytecount, requirements = readbundle1header(fp)
missingreqs = requirements - repo.supportedformats
if missingreqs:
raise error.Abort(_('unable to apply stream clone: '
'unsupported format: %s') %
', '.join(sorted(missingreqs)))
consumev1(repo, fp, filecount, bytecount)
class streamcloneapplier(object):
"""Class to manage applying streaming clone bundles.
We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle
readers to perform bundle type-specific functionality.
"""
def __init__(self, fh):
self._fh = fh
def apply(self, repo):
return applybundlev1(repo, self._fh)
| dscho/hg | mercurial/streamclone.py | Python | gpl-2.0 | 13,689 | 0.001315 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v9.resources.types import keyword_plan_campaign
from google.protobuf import field_mask_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.services",
marshal="google.ads.googleads.v9",
manifest={
"GetKeywordPlanCampaignRequest",
"MutateKeywordPlanCampaignsRequest",
"KeywordPlanCampaignOperation",
"MutateKeywordPlanCampaignsResponse",
"MutateKeywordPlanCampaignResult",
},
)
class GetKeywordPlanCampaignRequest(proto.Message):
r"""Request message for
[KeywordPlanCampaignService.GetKeywordPlanCampaign][google.ads.googleads.v9.services.KeywordPlanCampaignService.GetKeywordPlanCampaign].
Attributes:
resource_name (str):
Required. The resource name of the Keyword
Plan campaign to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
class MutateKeywordPlanCampaignsRequest(proto.Message):
r"""Request message for
[KeywordPlanCampaignService.MutateKeywordPlanCampaigns][google.ads.googleads.v9.services.KeywordPlanCampaignService.MutateKeywordPlanCampaigns].
Attributes:
customer_id (str):
Required. The ID of the customer whose
Keyword Plan campaigns are being modified.
operations (Sequence[google.ads.googleads.v9.services.types.KeywordPlanCampaignOperation]):
Required. The list of operations to perform
on individual Keyword Plan campaigns.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operations = proto.RepeatedField(
proto.MESSAGE, number=2, message="KeywordPlanCampaignOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3,)
validate_only = proto.Field(proto.BOOL, number=4,)
class KeywordPlanCampaignOperation(proto.Message):
r"""A single operation (create, update, remove) on a Keyword Plan
campaign.
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
The FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v9.resources.types.KeywordPlanCampaign):
Create operation: No resource name is
expected for the new Keyword Plan campaign.
This field is a member of `oneof`_ ``operation``.
update (google.ads.googleads.v9.resources.types.KeywordPlanCampaign):
Update operation: The Keyword Plan campaign
is expected to have a valid resource name.
This field is a member of `oneof`_ ``operation``.
remove (str):
Remove operation: A resource name for the removed Keyword
Plan campaign is expected, in this format:
``customers/{customer_id}/keywordPlanCampaigns/{keywordPlan_campaign_id}``
This field is a member of `oneof`_ ``operation``.
"""
update_mask = proto.Field(
proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=keyword_plan_campaign.KeywordPlanCampaign,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof="operation",
message=keyword_plan_campaign.KeywordPlanCampaign,
)
remove = proto.Field(proto.STRING, number=3, oneof="operation",)
class MutateKeywordPlanCampaignsResponse(proto.Message):
r"""Response message for a Keyword Plan campaign mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v9.services.types.MutateKeywordPlanCampaignResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE, number=3, message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE, number=2, message="MutateKeywordPlanCampaignResult",
)
class MutateKeywordPlanCampaignResult(proto.Message):
r"""The result for the Keyword Plan campaign mutate.
Attributes:
resource_name (str):
Returned for successful operations.
"""
resource_name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
| googleads/google-ads-python | google/ads/googleads/v9/services/types/keyword_plan_campaign_service.py | Python | apache-2.0 | 6,083 | 0.000493 |
#
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import zope
import oz.Fedora
import oz.TDL
import subprocess
import os
import re
import guestfs
import string
import libxml2
import traceback
import ConfigParser
import boto.ec2
import sys
from time import *
from tempfile import *
from imgfac.ApplicationConfiguration import ApplicationConfiguration
from imgfac.ImageFactoryException import ImageFactoryException
from imgfac.ReservationManager import ReservationManager
from boto.s3.connection import S3Connection
from boto.s3.connection import Location
from boto.exception import *
from boto.ec2.blockdevicemapping import EBSBlockDeviceType, BlockDeviceMapping
from imgfac.CloudDelegate import CloudDelegate
# Boto is very verbose - shut it up
logging.getLogger('boto').setLevel(logging.INFO)
def subprocess_check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, stderr=subprocess.STDOUT, *popenargs, **kwargs)
stdout, stderr = process.communicate()
retcode = process.poll()
if retcode:
cmd = ' '.join(*popenargs)
raise ImageFactoryException("'%s' failed(%d): %s" % (cmd, retcode, stderr))
return (stdout, stderr, retcode)
class EC2Cloud(object):
zope.interface.implements(CloudDelegate)
def activity(self, activity):
# Simple helper function
# Activity should be a one line human-readable string indicating the task in progress
# We log it at DEBUG and also set it as the status_detail on our active image
self.log.debug(activity)
self.active_image.status_detail['activity'] = activity
def __init__(self):
# Note that we are now missing ( template, target, config_block = None):
super(EC2Cloud, self).__init__()
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
config_obj = ApplicationConfiguration()
self.app_config = config_obj.configuration
self.oz_config = ConfigParser.SafeConfigParser()
self.oz_config.read("/etc/oz/oz.cfg")
self.oz_config.set('paths', 'output_dir', self.app_config["imgdir"])
if "ec2" in config_obj.jeos_images:
self.ec2_jeos_amis = config_obj.jeos_images['ec2']
else:
self.log.warning("No JEOS amis defined for ec2. Snapshot builds will not be possible.")
self.ec2_jeos_amis = {}
def builder_should_create_target_image(self, builder, target, image_id, template, parameters):
self.log.info('builder_should_create_target_image() called on EC2Cloud plugin - returning True')
return True
def builder_will_create_target_image(self, builder, target, image_id, template, parameters):
# Nothing really to do here
pass
def builder_did_create_target_image(self, builder, target, image_id, template, parameters):
self.log.info('builder_did_create_target_image() called in EC2Cloud plugin')
# The bulk of what is done here is EC2 specific
# There are OS conditionals thrown in at the moment
# For now we are putting everything into the EC2 Cloud plugin
# TODO: Revisit this, and the plugin interface, to see if there are ways to
# make the separation cleaner
# This lets our logging helper know what image is being operated on
self.builder = builder
self.active_image = self.builder.target_image
try:
# TODO: More convenience vars - revisit
self.template = template
self.target = target
self.tdlobj = oz.TDL.TDL(xmlstring=self.template.xml, rootpw_required=True)
self._get_os_helper()
# Add in target specific content
self.add_target_content()
# TODO: This is a convenience variable for refactoring - rename
self.new_image_id = builder.target_image.identifier
# This lets our logging helper know what image is being operated on
self.activity("Initializing Oz environment")
# Create a name combining the TDL name and the UUID for use when tagging EC2 AMIs
self.longname = self.tdlobj.name + "-" + self.new_image_id
# Oz assumes unique names - TDL built for multiple backends guarantees they are not unique
# We don't really care about the name so just force uniqueness
self.tdlobj.name = "factory-build-" + self.new_image_id
# populate a config object to pass to OZ; this allows us to specify our
# own output dir but inherit other Oz behavior
self.oz_config = ConfigParser.SafeConfigParser()
self.oz_config.read("/etc/oz/oz.cfg")
self.oz_config.set('paths', 'output_dir', self.app_config["imgdir"])
# make this a property to enable quick cleanup on abort
self.instance = None
# OK great, we now have a customized KVM image
# Now we do some target specific transformation
# None of these things actually require anything other than the TDL object
# and the original disk image
# At this point our builder has a target_image and a base_image
# OS plugin has already provided the initial file for us to work with
# which we can currently assume is a raw KVM compatible image
self.image = builder.target_image.data
self.modify_oz_filesystem()
self.ec2_copy_filesystem()
self.ec2_modify_filesystem()
except:
self.log_exc()
self.status="FAILED"
raise
self.percent_complete=100
self.status="COMPLETED"
def _get_os_helper(self):
# For now we are adopting a 'mini-plugin' approach to OS specific code within the EC2 plugin
# In theory, this could live in the OS plugin - however, the code in question is very tightly
# related to the EC2 plugin, so it probably should stay here
try:
# Change RHEL-6 to RHEL6, etc.
os_name = self.tdlobj.distro.translate(None, '-')
class_name = "%s_ec2_Helper" % (os_name)
module_name = "imagefactory_plugins.EC2Cloud.EC2CloudOSHelpers"
__import__(module_name)
os_helper_class = getattr(sys.modules[module_name], class_name)
self.os_helper = os_helper_class(self)
except:
self.log_exc()
raise ImageFactoryException("Unable to create EC2 OS helper object for distro (%s) in TDL" % (self.tdlobj.distro) )
def push_image_to_provider(self, builder, provider, credentials, target, target_image, parameters):
self.log.info('push_image_to_provider() called in EC2Cloud')
self.builder = builder
self.active_image = self.builder.provider_image
# TODO: This is a convenience variable for refactoring - rename
self.new_image_id = builder.provider_image.identifier
self.tdlobj = oz.TDL.TDL(xmlstring=builder.target_image.template, rootpw_required=True)
self._get_os_helper()
self.push_image_upload(target_image, provider, credentials)
def delete_from_provider(self, builder, provider, credentials, target, parameters):
self.log.debug("Deleting AMI (%s)" % (self.builder.provider_image.identifier_on_provider))
self.activity("Preparing EC2 region details")
region=provider
region_conf=self.ec2_region_details[region]
boto_loc = region_conf['boto_loc']
if region != "ec2-us-east-1":
s3_url = "http://s3-%s.amazonaws.com/" % (region_conf['host'])
else:
# Note to Amazon - would it be that hard to have s3-us-east-1.amazonaws.com?
s3_url = "http://s3.amazonaws.com/"
self.ec2_decode_credentials(credentials)
ec2region = boto.ec2.get_region(boto_loc, aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key)
conn = ec2region.connect(aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key)
amis = conn.get_all_images([ self.builder.provider_image.identifier_on_provider ])
if len(amis) == 0:
raise ImageFactoryException("Unable to find AMI (%s) - cannot delete it" % (self.builder.provider_image.identifier_on_provider))
if len(amis) > 1:
raise ImageFactoryException("AMI lookup during delete returned more than one result - this should never happen - aborting")
if ami.root_device_type == "ebs":
self.log.debug("This is an EBS AMI")
# Disect the block device mapping to identify the snapshots
bd_map = ami.block_device_mapping
self.log.debug("De-registering AMI")
ami.deregister()
self.log.debug("Deleting EBS snapshots associated with AMI")
for bd in bd_map:
self.log.debug("Deleting bd snapshot (%s) for bd (%s)" % (bd_map[bd].snapshot_id, bd))
conn.delete_snapshot(bd_map[bd].snapshot_id)
else:
self.log.debug("This is an S3 AMI")
s3_conn = boto.s3.connection.S3Connection(aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key, host=s3_url)
# Disect the location to get the bucket and key for the manifest
(bucket, key) = split(ami.location, '/', 1)
self.log.debug("Retrieving S3 AMI manifest from bucket (%s) at key (%s)" % (bucket, key))
bucket = s3_conn.get_bucket(bucket)
key_obj = bucket.get_key(key)
manifest = key_obj.get_contents_as_string()
# It is possible that the key has a path-like structure"
# The XML contains only filenames - not path components
# so extract any "directory" type stuff here
keyprefix = ""
keysplit = rsplit(key,"/",1)
if len(keysplit) == 2:
keyprefix="%s/" % (keysplit[0])
self.log.debug("Deleting S3 image disk chunks")
man_etree = ElementTree.fromstring(manifest)
for part in man_etree.find("image").find("parts").findall("part"):
filename = part.find("filename").text
fullname = "%s%s" % (keyprefix, filename)
part_key_obj = bucket.get_key(fullname)
self.log.debug("Deleting %s" % (fullname))
part_key_obj.delete()
self.log.debug("Deleting manifest object %s" % (key))
key_obj.delete()
self.log.debug("de-registering the AMI itself")
ami.deregister()
def log_exc(self):
self.log.debug("Exception caught in ImageFactory")
self.log.debug(traceback.format_exc())
self.active_image.status_detail['error'] = traceback.format_exc()
def modify_oz_filesystem(self):
self.activity("Removing unique identifiers from image - Adding cloud information")
self.log.debug("init guestfs")
g = guestfs.GuestFS ()
self.log.debug("add input image")
g.add_drive (self.image)
self.log.debug("launch guestfs")
g.launch ()
g.mount_options("", "/dev/VolGroup00/LogVol00", "/")
# F16 and upwards end up with boot on sda2 due to GRUB changes
if (self.tdlobj.distro == 'Fedora') and (int(self.tdlobj.update) >= 16):
g.mount_options("", "/dev/sda2", "/boot")
else:
g.mount_options("", "/dev/sda1", "/boot")
self.log.info("Creating cloud-info file indicating target (%s)" % (self.target))
tmpl = 'CLOUD_TYPE="%s"\n' % (self.target)
g.write("/etc/sysconfig/cloud-info", tmpl)
# In the cloud context we currently never need or want persistent net device names
# This is known to break networking in RHEL/VMWare and could potentially do so elsewhere
# Just delete the file to be safe
if g.is_file("/etc/udev/rules.d/70-persistent-net.rules"):
g.rm("/etc/udev/rules.d/70-persistent-net.rules")
# Also clear out the MAC address this image was bound to.
# Second argument is 0 - means don't save a backup - this confuses network init
g.aug_init("/", 0)
if g.aug_rm("/files/etc/sysconfig/network-scripts/ifcfg-eth0/HWADDR"):
self.log.debug("Removed HWADDR from image's /etc/sysconfig/network-scripts/ifcfg-eth0")
g.aug_save()
else:
self.log.debug("Failed to remove HWADDR from image's /etc/sysconfig/network-scripts/ifcfg-eth0")
g.aug_close()
g.sync ()
g.umount_all ()
def ec2_copy_filesystem(self):
self.activity("Copying image contents to single flat partition for EC2")
target_image=self.image + ".tmp"
self.log.debug("init guestfs")
g = guestfs.GuestFS ()
self.log.debug("add input image")
g.add_drive (self.image)
self.log.debug("creat target image")
f = open (target_image, "w")
# TODO: Can this be larger, smaller - should it be?
f.truncate (10000 * 1024 * 1024)
f.close ()
g.add_drive(target_image)
self.log.debug("creat tmp image")
# We need a small FS to mount target and dest on - make image file for it
# TODO: Use Marek's create mount point trick instead of a temp file
tmp_image_file = "/tmp/tmp-img-" + self.new_image_id
f = open (tmp_image_file, "w")
f.truncate (10 * 1024 * 1024)
f.close
g.add_drive(tmp_image_file)
self.log.debug("launch guestfs")
g.launch ()
# TODO: Re-enable this?
# Do inspection here, as libguestfs prefers we do it before mounting anything
#inspection = g.inspect_os()
# This assumes, I think reasonably, only one OS on the disk image provided by Oz
#rootdev = inspection[0]
# At this point sda is original image - sdb is blank target - sdc is small helper
self.log.info("Making filesystems for EC2 transform")
# TODO: Make different FS types depending on the type of the original root fs
g.mkfs ("ext3", "/dev/sdb")
g.set_e2label ("/dev/sdb", "/")
g.mkfs ("ext3", "/dev/sdc")
self.log.info("Done")
g.mount_options ("", "/dev/sdc", "/")
g.mkdir("/in")
g.mkdir("/out")
# Yes, this looks odd but it is the easiest way to use cp_a from guestfs
# because we cannot use wildcards directly with guestfs
g.mkdir("/out/in")
g.mount_ro ("/dev/VolGroup00/LogVol00", "/in")
# F16 and upwards end up with boot on sda2 due to GRUB changes
if (self.tdlobj.distro == 'Fedora') and (int(self.tdlobj.update) >= 16):
g.mount_ro ("/dev/sda2", "/in/boot")
else:
g.mount_ro ("/dev/sda1", "/in/boot")
g.mount_options ("", "/dev/sdb", "/out/in")
self.log.info("Copying image contents to EC2 flat filesystem")
g.cp_a("/in/", "/out")
self.log.info("Done")
g.sync ()
g.umount_all ()
os.unlink(tmp_image_file)
self.log.debug("Copy complete - removing old image and replacing with new flat filesystem image")
os.unlink(self.image)
os.rename(target_image, self.image)
def ec2_modify_filesystem(self):
# Modifications
# Many of these are more or less directly ported from BoxGrinder
# Boxgrinder is written and maintained by Marek Goldmann and can be found at:
# http://boxgrinder.org/
# TODO: This would be safer and more robust if done within the running modified
# guest - in this would require tighter Oz integration
self.activity("Modifying flat filesystem with EC2 specific changes")
g = guestfs.GuestFS ()
g.add_drive(self.image)
g.launch ()
# Do inspection here, as libguestfs prefers we do it before mounting anything
# This should always be /dev/vda or /dev/sda but we do it anyway to be safe
osroot = g.inspect_os()[0]
# eg "fedora"
distro = g.inspect_get_distro(osroot)
arch = g.inspect_get_arch(osroot)
major_version = g.inspect_get_major_version(osroot)
minor_version = g.inspect_get_minor_version(osroot)
self.log.debug("distro: %s - arch: %s - major: %s - minor %s" % (distro, arch, major_version, minor_version))
g.mount_options ("", osroot, "/")
self.log.info("Modifying flat FS contents to be EC2 compatible")
self.log.info("Disabling SELINUX")
tmpl = '# Factory Disabled SELINUX - sorry\nSELINUX=permissive\nSELINUXTYPE=targeted\n'
g.write("/etc/sysconfig/selinux", tmpl)
# Make a /data directory for 64 bit hosts
# Ephemeral devs come pre-formatted from AWS - weird
if arch == "x86_64":
self.log.info("Making data directory")
g.mkdir("/data")
# BG - Upload one of two templated fstabs
# Input - root device name
# TODO: Match OS default behavior and/or what is found in the existing image
self.log.info("Modifying and uploading fstab")
# Make arch conditional
if arch == "x86_64":
tmpl=self.fstab_64bit
else:
tmpl=self.fstab_32bit
g.write("/etc/fstab", tmpl)
# BG - Enable networking
# Upload a known good ifcfg-eth0 and then chkconfig on networking
self.log.info("Enabling networking and uploading ifcfg-eth0")
g.sh("/sbin/chkconfig network on")
g.write("/etc/sysconfig/network-scripts/ifcfg-eth0", self.ifcfg_eth0)
# Disable first boot - this slows things down otherwise
if g.is_file("/etc/init.d/firstboot"):
g.sh("/sbin/chkconfig firstboot off")
# Ensure a sensible runlevel on systemd systems (>=F15)
# Oz/Anaconda hand us a graphical runlevel
if g.is_symlink("/etc/systemd/system/default.target"):
g.rm("/etc/systemd/system/default.target")
g.ln_s("/lib/systemd/system/multi-user.target","/etc/systemd/system/default.target")
# BG - Upload rc.local extra content
# Again, this uses a static copy - this bit is where the ssh key is downloaded
# TODO: Is this where we inject puppet?
# TODO - Possibly modify the key injection from rc_local to be only non-root
# and add a special user to sudoers - this is what BG has evolved to do
self.log.info("Updating rc.local for key injection")
g.write("/tmp/rc.local", self.rc_local)
# Starting with F16, rc.local doesn't exist by default
if not g.exists("/etc/rc.d/rc.local"):
g.sh("echo \#\!/bin/bash > /etc/rc.d/rc.local")
g.sh("chmod a+x /etc/rc.d/rc.local")
g.sh("cat /tmp/rc.local >> /etc/rc.d/rc.local")
g.rm("/tmp/rc.local")
# Don't ever allow password logins to EC2 sshd
g.aug_init("/", 0)
g.aug_set("/files/etc/ssh/sshd_config/PermitRootLogin", "without-password")
g.aug_save()
g.aug_close()
self.log.debug("Disabled root loging with password in /etc/ssh/sshd_config")
# Install menu list
# Derive the kernel version from the last element of ls /lib/modules and some
# other magic - look at linux_helper for details
# Look at /lib/modules and assume that the last kernel listed is the version we use
self.log.info("Modifying and updating menu.lst")
kernel_versions = g.ls("/lib/modules")
kernel_version = None
if (distro == "rhel") and (major_version == 5):
xenre = re.compile("xen$")
for kern in kernel_versions:
if xenre.search(kern):
kernel_version = kern
elif (len(kernel_versions) > 1) and (arch == "i386") and (distro == "fedora") and (int(major_version) <=13):
paere = re.compile("PAE$")
for kern in kernel_versions:
if paere.search(kern):
kernel_version = kern
else:
kernel_version = kernel_versions[len(kernel_versions)-1]
if not kernel_version:
self.log.debug("Unable to extract correct kernel version from: %s" % (str(kernel_versions)))
raise ImageFactoryException("Unable to extract kernel version")
self.log.debug("Using kernel version: %s" % (kernel_version))
# We could deduce this from version but it's easy to inspect
bootramfs = int(g.sh("ls -1 /boot | grep initramfs | wc -l"))
ramfs_prefix = "initramfs" if bootramfs > 0 else "initrd"
name="Image Factory EC2 boot - kernel: " + kernel_version
if (distro == "rhel") and (major_version == 5):
g.sh("/sbin/mkinitrd -f -v --preload xenblk --preload xennet /boot/initrd-%s.img %s" % (kernel_version))
kernel_options = ""
if (distro == "fedora") and (str(major_version) == "16"):
self.log.debug("Adding idle=halt option for Fedora 16 on EC2")
kernel_options += "idle=halt "
tmpl = self.menu_lst
tmpl = string.replace(tmpl, "#KERNEL_OPTIONS#", kernel_options)
tmpl = string.replace(tmpl, "#KERNEL_VERSION#", kernel_version)
tmpl = string.replace(tmpl, "#KERNEL_IMAGE_NAME#", ramfs_prefix)
tmpl = string.replace(tmpl, "#TITLE#", name)
g.write("/boot/grub/menu.lst", tmpl)
# EC2 Xen nosegneg bug
# This fixes issues with Fedora >=14 on EC2: https://bugzilla.redhat.com/show_bug.cgi?id=651861#c39
if (arch == "i386") and (distro == "fedora") and (int(major_version) >= 14):
self.log.info("Fixing Xen EC2 bug")
g.sh("echo \"hwcap 1 nosegneg\" > /etc/ld.so.conf.d/libc6-xen.conf")
g.sh("/sbin/ldconfig")
self.log.info("Done with EC2 filesystem modifications")
g.sync ()
g.umount_all ()
# TODO: Based on architecture associate one of two XML blocks that contain the correct
# regional AKIs for pvgrub
def wait_for_ec2_ssh_access(self, guestaddr):
self.activity("Waiting for SSH access to EC2 instance")
for i in range(300):
if i % 10 == 0:
self.log.debug("Waiting for EC2 ssh access: %d/300" % (i))
try:
stdout, stderr, retcode = self.guest.guest_execute_command(guestaddr, "/bin/true", timeout = 10)
break
except:
pass
sleep(1)
if i == 299:
raise ImageFactoryException("Unable to gain ssh access after 300 seconds - aborting")
def wait_for_ec2_instance_start(self, instance):
self.activity("Waiting for EC2 instance to become active")
for i in range(300):
if i % 10 == 0:
self.log.debug("Waiting for EC2 instance to start: %d/300" % (i))
try:
instance.update()
except EC2ResponseError, e:
# We occasionally get errors when querying an instance that has just started - ignore them and hope for the best
self.log.warning("EC2ResponseError encountered when querying EC2 instance (%s) - trying to continue" % (instance.id), exc_info = True)
except:
self.log.error("Exception encountered when updating status of instance (%s)" % (instance.id), exc_info = True)
self.status="FAILED"
try:
self.terminate_instance(instance)
except:
log.warning("WARNING: Instance (%s) failed to start and will not terminate - it may still be running" % (instance.id), exc_info = True)
raise ImageFactoryException("Instance (%s) failed to fully start or terminate - it may still be running" % (instance.id))
raise ImageFactoryException("Exception encountered when waiting for instance (%s) to start" % (instance.id))
if instance.state == u'running':
break
sleep(1)
if instance.state != u'running':
self.status="FAILED"
try:
self.terminate_instance(instance)
except:
log.warning("WARNING: Instance (%s) failed to start and will not terminate - it may still be running" % (instance.id), exc_info = True)
raise ImageFactoryException("Instance (%s) failed to fully start or terminate - it may still be running" % (instance.id))
raise ImageFactoryException("Instance failed to start after 300 seconds - stopping")
def terminate_instance(self, instance):
# boto 1.9 claims a terminate() method but does not implement it
# boto 2.0 throws an exception if you attempt to stop() an S3 backed instance
# introspect here and do the best we can
if "terminate" in dir(instance):
instance.terminate()
else:
instance.stop()
def snapshot_image_on_provider(self, builder, provider, credentials, target, template, parameters):
self.log.info('snapshot_image_on_provider() called in EC2Cloud')
self.builder = builder
self.active_image = self.builder.provider_image
# TODO: This is a convenience variable for refactoring - rename
self.new_image_id = builder.provider_image.identifier
# Template must be defined for snapshots
self.tdlobj = oz.TDL.TDL(xmlstring=str(template), rootpw_required=True)
self._get_os_helper()
self.os_helper.init_guest()
def replace(item):
if item in [self.ec2_access_key, self.ec2_secret_key]:
return "REDACTED"
return item
self.log.debug("Being asked to push for provider %s" % (provider))
self.log.debug("distro: %s - update: %s - arch: %s" % (self.tdlobj.distro, self.tdlobj.update, self.tdlobj.arch))
self.ec2_decode_credentials(credentials)
self.log.debug("acting as EC2 user: %s" % (str(self.ec2_user_id)))
self.status="PUSHING"
self.percent_complete=0
self.activity("Preparing EC2 region details")
region=provider
# These are the region details for the TARGET region for our new AMI
region_conf=self.ec2_region_details[region]
aki = region_conf[self.tdlobj.arch]
boto_loc = region_conf['boto_loc']
if region != "ec2-us-east-1":
upload_url = "http://s3-%s.amazonaws.com/" % (region_conf['host'])
else:
# Note to Amazon - would it be that hard to have s3-us-east-1.amazonaws.com?
upload_url = "http://s3.amazonaws.com/"
register_url = "http://ec2.%s.amazonaws.com/" % (region_conf['host'])
ami_id = "none"
build_region = provider
try:
ami_id = self.ec2_jeos_amis[provider][self.tdlobj.distro][self.tdlobj.update][self.tdlobj.arch]
except KeyError:
pass
if ami_id == "none":
try:
# Fallback to modification on us-east and upload cross-region
ami_id = self.ec2_jeos_amis['ec2-us-east-1'][self.tdlobj.distro][self.tdlobj.update][self.tdlobj.arch]
build_region = 'ec2-us-east-1'
self.log.info("WARNING: Building in ec2-us-east-1 for upload to %s" % (provider))
self.log.info(" This may be a bit slow - ask the Factory team to create a region-local JEOS")
except KeyError:
pass
if ami_id == "none":
self.status="FAILED"
raise ImageFactoryException("No available JEOS for desired OS, verison combination")
# These are the region details for the region we are building in (which may be different from the target)
build_region_conf = self.ec2_region_details[build_region]
# Note that this connection may be to a region other than the target
self.activity("Preparing EC2 JEOS AMI details")
ec2region = boto.ec2.get_region(build_region_conf['host'], aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key)
conn = ec2region.connect(aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key)
# Verify that AMI actually exists - err out if not
# Extract AMI type - "ebs" or "instance-store" (S3)
# If build_region != provider (meaning we are not building in our target region)
# if type == ebs throw an error - EBS builds must be in the target region/provider
amis = conn.get_all_images([ ami_id ])
ami = amis[0]
if (build_region != provider) and (ami.root_device_type == "ebs"):
self.log.error("EBS JEOS image exists in us-east-1 but not in target region (%s)" % (provider))
raise ImageFactoryException("No EBS JEOS image for region (%s) - aborting" % (provider))
instance_type=self.app_config.get('ec2-64bit-util','m1.large')
if self.tdlobj.arch == "i386":
instance_type=self.app_config.get('ec2-32bit-util','m1.small')
# Create a use-once SSH-able security group
self.activity("Creating EC2 security group for SSH access to utility image")
factory_security_group_name = "imagefactory-%s" % (self.new_image_id, )
factory_security_group_desc = "Temporary ImageFactory generated security group with SSH access"
self.log.debug("Creating temporary security group (%s)" % (factory_security_group_name))
factory_security_group = conn.create_security_group(factory_security_group_name, factory_security_group_desc)
factory_security_group.authorize('tcp', 22, 22, '0.0.0.0/0')
# Create a use-once SSH key
self.activity("Creating EC2 SSH key pair")
key_name = "fac-tmp-key-%s" % (self.new_image_id)
key = conn.create_key_pair(key_name)
# Shove into a named temp file
key_file_object = NamedTemporaryFile()
key_file_object.write(key.material)
key_file_object.flush()
key_file=key_file_object.name
# Now launch it
self.activity("Launching EC2 JEOS image")
self.log.debug("Starting ami %s with instance_type %s" % (ami_id, instance_type))
reservation = conn.run_instances(ami_id, instance_type=instance_type, key_name=key_name, security_groups = [ factory_security_group_name ])
if len(reservation.instances) != 1:
self.status="FAILED"
raise ImageFactoryException("run_instances did not result in the expected single instance - stopping")
self.instance = reservation.instances[0]
self.wait_for_ec2_instance_start(self.instance)
# From this point on we must be sure to terminate the instance when we are done
# so wrap in a try/finally
# Accidentally running a 64 bit instance doing nothing costs 56 USD week
try:
guestaddr = self.instance.public_dns_name
self.guest.sshprivkey = key_file
# Ugly ATM because failed access always triggers an exception
self.wait_for_ec2_ssh_access(guestaddr)
# There are a handful of additional boot tasks after SSH starts running
# Give them an additional 20 seconds for good measure
self.log.debug("Waiting 20 seconds for remaining boot tasks")
sleep(20)
self.activity("Customizing running EC2 JEOS instance")
self.log.debug("Stopping cron and killing any updatedb process that may be running")
# updatedb interacts poorly with the bundle step - make sure it isn't running
self.guest.guest_execute_command(guestaddr, "/sbin/service crond stop")
self.guest.guest_execute_command(guestaddr, "killall -9 updatedb || /bin/true")
self.log.debug("Done")
if ami.root_device_type == "instance-store":
# Different OSes need different steps here
# Only needed for S3 images
self.install_euca_tools(guestaddr)
# Not all JEOS images contain this - redoing it if already present is harmless
self.log.info("Creating cloud-info file indicating target (%s)" % (self.target))
self.guest.guest_execute_command(guestaddr, 'echo CLOUD_TYPE=\\\"%s\\\" > /etc/sysconfig/cloud-info' % (self.target))
self.log.debug("Customizing guest: %s" % (guestaddr))
self.guest.mkdir_p(self.guest.icicle_tmp)
self.guest.do_customize(guestaddr)
self.log.debug("Customization step complete")
self.log.debug("Generating ICICLE from customized guest")
self.output_descriptor = self.guest.do_icicle(guestaddr)
self.log.debug("ICICLE generation complete")
self.log.debug("Re-de-activate firstboot just in case it has been revived during customize")
self.guest.guest_execute_command(guestaddr, "[ -f /etc/init.d/firstboot ] && /sbin/chkconfig firstboot off || /bin/true")
self.log.debug("De-activation complete")
new_ami_id = None
image_name = str(self.longname)
image_desc = "%s - %s" % (asctime(localtime()), self.tdlobj.description)
if ami.root_device_type == "instance-store":
# This is an S3 image so we snapshot to another S3 image using euca-bundle-vol and
# associated tools
ec2cert = "/etc/pki/imagefactory/cert-ec2.pem"
# This is needed for uploading and registration
# Note that it is excluded from the final image
self.activity("Uploading certificate material for bundling of instance")
self.guest.guest_live_upload(guestaddr, self.ec2_cert_file, "/tmp")
self.guest.guest_live_upload(guestaddr, self.ec2_key_file, "/tmp")
self.guest.guest_live_upload(guestaddr, ec2cert, "/tmp")
self.log.debug("Cert upload complete")
# Some local variables to make the calls below look a little cleaner
ec2_uid = self.ec2_user_id
arch = self.tdlobj.arch
# AKI is set above
uuid = self.new_image_id
# We exclude /mnt /tmp and /root/.ssh to avoid embedding our utility key into the image
command = "euca-bundle-vol -c /tmp/%s -k /tmp/%s -u %s -e /mnt,/tmp,/root/.ssh --arch %s -d /mnt/bundles --kernel %s -p %s -s 10240 --ec2cert /tmp/cert-ec2.pem --fstab /etc/fstab -v /" % (os.path.basename(self.ec2_cert_file), os.path.basename(self.ec2_key_file), ec2_uid, arch, aki, uuid)
self.activity("Bundling remote instance in-place")
self.log.debug("Executing bundle vol command: %s" % (command))
stdout, stderr, retcode = self.guest.guest_execute_command(guestaddr, command)
self.log.debug("Bundle output: %s" % (stdout))
# Now, ensure we have an appropriate bucket to receive this image
# TODO: This is another copy - make it a function soon please
bucket= "imagefactory-" + region + "-" + self.ec2_user_id
self.activity("Preparing S3 destination for image bundle")
sconn = S3Connection(self.ec2_access_key, self.ec2_secret_key)
try:
sconn.create_bucket(bucket, location=boto_loc)
except S3CreateError as buckerr:
if buckerr.error_code == "BucketAlreadyOwnedByYou":
# Expected behavior after first push - not an error
pass
else:
raise
# TODO: End of copy
# TODO: We cannot timeout on any of the three commands below - can we fix that?
manifest = "/mnt/bundles/%s.manifest.xml" % (uuid)
# Unfortunately, for some OS versions we need to correct the manifest
self.correct_remote_manifest(guestaddr, manifest)
command = ['euca-upload-bundle', '-b', bucket, '-m', manifest,
'--ec2cert', '/tmp/cert-ec2.pem',
'-a', self.ec2_access_key, '-s', self.ec2_secret_key,
'-U', upload_url]
command_log = map(replace, command)
self.activity("Uploading bundle to S3")
self.log.debug("Executing upload bundle command: %s" % (command_log))
stdout, stderr, retcode = self.guest.guest_execute_command(guestaddr, ' '.join(command))
self.log.debug("Upload output: %s" % (stdout))
manifest_s3_loc = "%s/%s.manifest.xml" % (bucket, uuid)
command = ['euca-register', '-U', register_url,
'-A', self.ec2_access_key, '-S', self.ec2_secret_key, '-a', self.tdlobj.arch,
#'-n', image_name, '-d', image_desc,
manifest_s3_loc]
command_log = map(replace, command)
self.activity("Registering bundle as a new AMI")
self.log.debug("Executing register command: %s" % (command_log))
stdout, stderr, retcode = self.guest.guest_execute_command(guestaddr,
' '.join(command))
self.log.debug("Register output: %s" % (stdout))
m = re.match(".*(ami-[a-fA-F0-9]+)", stdout)
new_ami_id = m.group(1)
self.log.debug("Extracted AMI ID: %s " % (new_ami_id))
### End S3 snapshot code
else:
self.activity("Preparing image for an EBS snapshot")
self.log.debug("Performing image prep tasks for EBS backed images")
self.ebs_pre_shapshot_tasks(guestaddr)
self.activity("Requesting EBS snapshot creation by EC2")
self.log.debug("Creating a new EBS backed image from our running EBS instance")
new_ami_id = conn.create_image(self.instance.id, image_name, image_desc)
self.log.debug("EUCA creat_image call returned AMI ID: %s" % (new_ami_id))
self.activity("Waiting for newly generated AMI to become available")
# As with launching an instance we have seen occasional issues when trying to query this AMI right
# away - give it a moment to settle
sleep(10)
new_amis = conn.get_all_images([ new_ami_id ])
new_ami = new_amis[0]
timeout = 120
interval = 10
for i in range(timeout):
new_ami.update()
if new_ami.state == "available":
break
elif new_ami.state == "failed":
raise ImageFactoryException("Amazon reports EBS image creation failed")
self.log.debug("AMI status (%s) - waiting for 'available' - [%d of %d seconds elapsed]" % (new_ami.state, i * interval, timeout * interval))
sleep(interval)
if not new_ami_id:
raise ImageFactoryException("Failed to produce an AMI ID")
# This replaces our Warehouse calls
self.builder.provider_image.icicle = self.output_descriptor
self.builder.provider_image.identifier_on_provider = new_ami_id
self.builder.provider_account_identifier = self.ec2_access_key
finally:
self.activity("Terminating EC2 instance and deleting security group and SSH key")
self.terminate_instance(self.instance)
key_file_object.close()
conn.delete_key_pair(key_name)
try:
timeout = 60
interval = 5
for i in range(timeout):
self.instance.update()
if(self.instance.state == "terminated"):
factory_security_group.delete()
self.log.debug("Removed temporary security group (%s)" % (factory_security_group_name))
break
elif(i < timeout):
self.log.debug("Instance status (%s) - waiting for 'terminated'. [%d of %d seconds elapsed]" % (self.instance.state, i * interval, timeout * interval))
sleep(interval)
else:
raise Exception("Timeout waiting for instance to terminate.")
except Exception, e:
self.log.debug("Unable to delete temporary security group (%s) due to exception: %s" % (factory_security_group_name, e))
self.log.debug("Fedora_ec2_Builder instance %s pushed image with uuid %s to provider_image UUID (%s)" % (id(self), target_image_id, self.new_image_id))
self.percent_complete=100
self.status="COMPLETED"
def push_image_upload(self, target_image_id, provider, credentials):
self.status="PUSHING"
self.percent_complete=0
try:
if self.app_config["ec2_ami_type"] == "s3":
self.ec2_push_image_upload(target_image_id, provider,
credentials)
elif self.app_config["ec2_ami_type"] == "ebs":
self.ec2_push_image_upload_ebs(target_image_id, provider,
credentials)
else:
raise ImageFactoryException("Invalid or unspecified EC2 AMI type in config file")
except:
self.log_exc()
self.status="FAILED"
raise
self.status="COMPLETED"
def _ec2_get_xml_node(self, doc, credtype):
nodes = doc.xpathEval("//provider_credentials/ec2_credentials/%s" % (credtype))
if len(nodes) < 1:
raise ImageFactoryException("No EC2 %s available" % (credtype))
return nodes[0].content
def ec2_decode_credentials(self, credentials):
self.activity("Preparing EC2 credentials")
doc = libxml2.parseDoc(credentials)
self.ec2_user_id = self._ec2_get_xml_node(doc, "account_number")
self.ec2_access_key = self._ec2_get_xml_node(doc, "access_key")
self.provider_account_identifier = self.ec2_access_key
self.ec2_secret_key = self._ec2_get_xml_node(doc, "secret_access_key")
# Support both "key" and "x509_private" as element names
ec2_key_node = doc.xpathEval("//provider_credentials/ec2_credentials/key")
if not ec2_key_node:
ec2_key_node = doc.xpathEval("//provider_credentials/ec2_credentials/x509_private")
if not ec2_key_node:
raise ImageFactoryException("No x509 private key found in ec2 credentials")
ec2_key=ec2_key_node[0].content
# Support both "certificate" and "x509_public" as element names
ec2_cert_node = doc.xpathEval("//provider_credentials/ec2_credentials/certificate")
if not ec2_cert_node:
ec2_cert_node = doc.xpathEval("//provider_credentials/ec2_credentials/x509_public")
if not ec2_cert_node:
raise ImageFactoryException("No x509 public certificate found in ec2 credentials")
ec2_cert = ec2_cert_node[0].content
doc.freeDoc()
# Shove certs into named temporary files
self.ec2_cert_file_object = NamedTemporaryFile()
self.ec2_cert_file_object.write(ec2_cert)
self.ec2_cert_file_object.flush()
self.ec2_cert_file=self.ec2_cert_file_object.name
self.ec2_key_file_object = NamedTemporaryFile()
self.ec2_key_file_object.write(ec2_key)
self.ec2_key_file_object.flush()
self.ec2_key_file=self.ec2_key_file_object.name
def ec2_push_image_upload_ebs(self, target_image_id, provider, credentials):
# TODO: Merge with ec2_push_image_upload and/or factor out duplication
# In this case we actually do need an Oz object to manipulate a remote guest
self.os_helper.init_guest()
self.ec2_decode_credentials(credentials)
# We don't need the x509 material here so close the temp files right away
# TODO: Mod the decode to selectively create the files in the first place
# This is silly and messy
self.ec2_cert_file_object.close()
self.ec2_key_file_object.close()
# Image is always here and it is the target_image datafile
input_image = self.builder.target_image.data
input_image_compressed = input_image + ".gz"
input_image_compressed_name = os.path.basename(input_image_compressed)
compress_complete_marker = input_image_compressed + "-factory-compressed"
# We are guaranteed to hit this from multiple builders looking at the same image
# Grab a named lock based on the file name
# If the file is not present this guarantees that only one thread will compress
# NOTE: It is important to grab the lock before we even look for the file
# TODO: Switched this to use shell callouts because of a 64 bit bug - fix that
res_mgr = ReservationManager()
res_mgr.get_named_lock(input_image_compressed)
try:
if not os.path.isfile(input_image_compressed) or not os.path.isfile(compress_complete_marker):
self.activity("Compressing image file for upload to EC2")
self.log.debug("No compressed version of image file found - compressing now")
compress_command = 'gzip -c %s > %s' % (input_image, input_image_compressed)
self.log.debug("Compressing image file with external gzip cmd: %s" % (compress_command))
result = subprocess.call(compress_command, shell = True)
if result:
raise ImageFactoryException("Compression of image failed")
self.log.debug("Compression complete")
# Mark completion with an empty file
# Without this we might use a partially compressed file that resulted from a crash or termination
subprocess.call("touch %s" % (compress_complete_marker), shell = True)
finally:
res_mgr.release_named_lock(input_image_compressed)
self.activity("Preparing EC2 region details")
region=provider
region_conf=self.ec2_region_details[region]
aki = region_conf[self.tdlobj.arch]
# Use our F16 - 32 bit JEOS image as the utility image for uploading to the EBS volume
try:
ami_id = self.ec2_jeos_amis[provider]['Fedora']['16']['i386']
except KeyError:
raise ImageFactoryException("No Fedora 16 i386 JEOS/utility image in region (%s) - aborting", (provider))
# i386
instance_type=self.app_config.get('ec2-32bit-util','m1.small')
self.activity("Initializing connection to ec2 region (%s)" % region_conf['host'])
ec2region = boto.ec2.get_region(region_conf['host'], aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key)
conn = ec2region.connect(aws_access_key_id=self.ec2_access_key, aws_secret_access_key=self.ec2_secret_key)
# Create security group
self.activity("Creating EC2 security group for SSH access to utility image")
factory_security_group_name = "imagefactory-%s" % (str(self.new_image_id))
factory_security_group_desc = "Temporary ImageFactory generated security group with SSH access"
self.log.debug("Creating temporary security group (%s)" % (factory_security_group_name))
factory_security_group = conn.create_security_group(factory_security_group_name, factory_security_group_desc)
factory_security_group.authorize('tcp', 22, 22, '0.0.0.0/0')
# Create a use-once SSH key
self.activity("Creating SSH key pair for image upload")
key_name = "fac-tmp-key-%s" % (self.new_image_id)
key = conn.create_key_pair(key_name)
# Shove into a named temp file
key_file_object = NamedTemporaryFile()
key_file_object.write(key.material)
key_file_object.flush()
key_file=key_file_object.name
# Now launch it
self.activity("Launching EC2 utility image")
reservation = conn.run_instances(ami_id, instance_type=instance_type, key_name=key_name, security_groups = [ factory_security_group_name ])
if len(reservation.instances) != 1:
self.status="FAILED"
raise ImageFactoryException("run_instances did not result in the expected single instance - stopping")
self.instance = reservation.instances[0]
self.wait_for_ec2_instance_start(self.instance)
# From this point on we must be sure to terminate the instance when we are done
# so wrap in a try/finally
# Accidentally running a 64 bit instance doing nothing costs 56 USD week
volume = None
try:
guestaddr = self.instance.public_dns_name
self.guest.sshprivkey = key_file
# Ugly ATM because failed access always triggers an exception
self.wait_for_ec2_ssh_access(guestaddr)
# There are a handful of additional boot tasks after SSH starts running
# Give them an additional 20 seconds for good measure
self.log.debug("Waiting 20 seconds for remaining boot tasks")
sleep(20)
self.activity("Creating 10 GiB volume in (%s) to hold new image" % (self.instance.placement))
volume = conn.create_volume(10, self.instance.placement)
# Do the upload before testing to see if the volume has completed
# to get a bit of parallel work
self.activity("Uploading compressed image file")
self.guest.guest_live_upload(guestaddr, input_image_compressed, "/mnt")
# Don't burden API users with the step-by-step details here
self.activity("Preparing EC2 volume to receive new image")
# Volumes can sometimes take a very long time to create
# Wait up to 10 minutes for now (plus the time taken for the upload above)
self.log.debug("Waiting up to 600 seconds for volume (%s) to become available" % (volume.id))
retcode = 1
for i in range(60):
volume.update()
if volume.status == "available":
retcode = 0
break
self.log.debug("Volume status (%s) - waiting for 'available': %d/600" % (volume.status, i*10))
sleep(10)
if retcode:
raise ImageFactoryException("Unable to create target volume for EBS AMI - aborting")
# Volume is now available
# Attach it
conn.attach_volume(volume.id, self.instance.id, "/dev/sdh")
self.log.debug("Waiting up to 120 seconds for volume (%s) to become in-use" % (volume.id))
retcode = 1
for i in range(12):
volume.update()
vs = volume.attachment_state()
if vs == "attached":
retcode = 0
break
self.log.debug("Volume status (%s) - waiting for 'attached': %d/120" % (vs, i*10))
sleep(10)
if retcode:
raise ImageFactoryException("Unable to attach volume (%s) to instance (%s) aborting" % (volume.id, self.instance.id))
# TODO: This may not be necessary but it helped with some funnies observed during testing
# At some point run a bunch of builds without the delay to see if it breaks anything
self.log.debug("Waiting 20 seconds for EBS attachment to stabilize")
sleep(20)
# Decompress image into new EBS volume
self.activity("Decompressing image into new volume")
command = "gzip -dc /mnt/%s | dd of=/dev/xvdh bs=4k\n" % (input_image_compressed_name)
self.log.debug("Decompressing image file into EBS device via command: %s" % (command))
self.guest.guest_execute_command(guestaddr, command)
# Sync before snapshot
self.guest.guest_execute_command(guestaddr, "sync")
# Snapshot EBS volume
self.activity("Taking EC2 snapshot of new volume")
self.log.debug("Taking snapshot of volume (%s)" % (volume.id))
snapshot = conn.create_snapshot(volume.id, 'Image Factory Snapshot for provider image %s' % self.new_image_id)
# This can take a _long_ time - wait up to 20 minutes
self.log.debug("Waiting up to 1200 seconds for snapshot (%s) to become completed" % (snapshot.id))
retcode = 1
for i in range(120):
snapshot.update()
if snapshot.status == "completed":
retcode = 0
break
self.log.debug("Snapshot progress(%s) - status (%s) - waiting for 'completed': %d/1200" % (str(snapshot.progress), snapshot.status, i*10))
sleep(10)
if retcode:
raise ImageFactoryException("Unable to snapshot volume (%s) - aborting" % (volume.id))
# register against snapshot
self.activity("Registering snapshot as a new AMI")
self.log.debug("Registering snapshot (%s) as new EBS AMI" % (snapshot.id))
ebs = EBSBlockDeviceType()
ebs.snapshot_id = snapshot.id
ebs.delete_on_termination = True
block_map = BlockDeviceMapping()
block_map['/dev/sda1'] = ebs
# The ephemeral mappings are automatic with S3 images
# For EBS images we need to make them explicit
# These settings are required to make the same fstab work on both S3 and EBS images
e0 = EBSBlockDeviceType()
e0.ephemeral_name = 'ephemeral0'
e1 = EBSBlockDeviceType()
e1.ephemeral_name = 'ephemeral1'
if self.tdlobj.arch == "i386":
block_map['/dev/sda2'] = e0
block_map['/dev/sda3'] = e1
else:
block_map['/dev/sdb'] = e0
block_map['/dev/sdc'] = e1
result = conn.register_image(name='ImageFactory created AMI - %s' % (self.new_image_id),
description='ImageFactory created AMI - %s' % (self.new_image_id),
architecture=self.tdlobj.arch, kernel_id=aki,
root_device_name='/dev/sda1', block_device_map=block_map)
ami_id = str(result)
self.log.debug("Extracted AMI ID: %s " % (ami_id))
except:
self.log.debug("EBS image upload failed on exception")
#DANGER!!! Uncomment at your own risk!
#This is for deep debugging of the EBS utility instance - don't forget to shut it down manually
#self.log.debug("EBS image upload failed on exception", exc_info = True)
#self.log.debug("Waiting more or less forever to allow inspection of the instance")
#self.log.debug("run this: ssh -i %s root@%s" % (key_file, self.instance.public_dns_name))
#sleep(999999)
raise
finally:
self.activity("Terminating EC2 instance and deleting temp security group and volume")
self.terminate_instance(self.instance)
key_file_object.close()
conn.delete_key_pair(key_name)
self.log.debug("Waiting up to 240 seconds for instance (%s) to shut down" % (self.instance.id))
retcode = 1
for i in range(24):
self.instance.update()
if self.instance.state == "terminated":
retcode = 0
break
self.log.debug("Instance status (%s) - waiting for 'terminated': %d/240" % (self.instance.state, i*10))
sleep(10)
if retcode:
self.log.warning("Instance (%s) failed to terminate - Unable to delete volume (%s) or delete factory temp security group" % (self.instance.id, volume.id))
else:
self.log.debug("Deleting temporary security group")
factory_security_group.delete()
if volume:
self.log.debug("Deleting EBS volume (%s)" % (volume.id))
volume.delete()
# TODO: Add back-reference to ICICLE from base image object
# This replaces our warehouse calls
self.builder.provider_image.identifier_on_provider=ami_id
self.builder.provider_image.provider_account_identifier=self.ec2_access_key
self.log.debug("Fedora_ec2_Builder instance %s pushed image with uuid %s to provider_image UUID (%s)" % (id(self), target_image_id, self.new_image_id))
self.percent_complete=100
def ec2_push_image_upload(self, target_image_id, provider, credentials):
def replace(item):
if item in [self.ec2_access_key, self.ec2_secret_key]:
return "REDACTED"
return item
# Image is always here and it is the target_image datafile
input_image = self.builder.target_image.data
input_image_name = os.path.basename(input_image)
self.ec2_decode_credentials(credentials)
bundle_destination=self.app_config['imgdir']
self.activity("Preparing EC2 region details and connection")
region=provider
region_conf=self.ec2_region_details[region]
aki = region_conf[self.tdlobj.arch]
boto_loc = region_conf['boto_loc']
if region != "ec2-us-east-1":
upload_url = "http://s3-%s.amazonaws.com/" % (region_conf['host'])
else:
# Note to Amazon - would it be that hard to have s3-us-east-1.amazonaws.com?
upload_url = "http://s3.amazonaws.com/"
register_url = "http://ec2.%s.amazonaws.com/" % (region_conf['host'])
bucket= "imagefactory-" + region + "-" + self.ec2_user_id
# Euca does not support specifying region for bucket
# (Region URL is not sufficient)
# See: https://bugs.launchpad.net/euca2ools/+bug/704658
# What we end up having to do is manually create a bucket in the right region
# then explicitly point to that region URL when doing the image upload
# We CANNOT let euca create the bucket when uploading or it will end up in us-east-1
conn = S3Connection(self.ec2_access_key, self.ec2_secret_key)
try:
conn.create_bucket(bucket, location=boto_loc)
except S3CreateError as buckerr:
# if the bucket already exists, it is not an error
if buckerr.error_code != "BucketAlreadyOwnedByYou":
raise
# TODO: Make configurable?
ec2_service_cert = "/etc/pki/imagefactory/cert-ec2.pem"
bundle_command = [ "euca-bundle-image", "-i", input_image,
"--kernel", aki, "-d", bundle_destination,
"-a", self.ec2_access_key, "-s", self.ec2_secret_key,
"-c", self.ec2_cert_file, "-k", self.ec2_key_file,
"-u", self.ec2_user_id, "-r", self.tdlobj.arch,
"--ec2cert", ec2_service_cert ]
bundle_command_log = map(replace, bundle_command)
self.activity("Bundling image locally")
self.log.debug("Executing bundle command: %s " % (bundle_command_log))
bundle_output = subprocess_check_output(bundle_command)
self.log.debug("Bundle command complete")
self.log.debug("Bundle command output: %s " % (str(bundle_output)))
self.percent_complete=40
manifest = bundle_destination + "/" + input_image_name + ".manifest.xml"
upload_command = [ "euca-upload-bundle", "-b", bucket, "-m", manifest,
"--ec2cert", ec2_service_cert,
"-a", self.ec2_access_key, "-s", self.ec2_secret_key,
"-U" , upload_url ]
upload_command_log = map(replace, upload_command)
self.activity("Uploading image to EC2")
self.log.debug("Executing upload command: %s " % (upload_command_log))
upload_output = subprocess_check_output(upload_command)
self.log.debug("Upload command output: %s " % (str(upload_output)))
self.percent_complete=90
s3_path = bucket + "/" + input_image_name + ".manifest.xml"
register_env = { 'EC2_URL':register_url }
register_command = [ "euca-register" , "-A", self.ec2_access_key,
"-S", self.ec2_secret_key, "-a", self.tdlobj.arch, s3_path ]
register_command_log = map(replace, register_command)
self.activity("Registering image")
self.log.debug("Executing register command: %s with environment %s " % (register_command_log, repr(register_env)))
register_output = subprocess_check_output(register_command, env=register_env)
self.log.debug("Register command output: %s " % (str(register_output)))
m = re.match(".*(ami-[a-fA-F0-9]+)", register_output[0])
ami_id = m.group(1)
self.log.debug("Extracted AMI ID: %s " % (ami_id))
# TODO: This should be in a finally statement that rethrows exceptions
self.ec2_cert_file_object.close()
self.ec2_key_file_object.close()
self.status = "PUSHING"
# TODO: Generate and store ICICLE
# This replaces our warehouse calls
self.builder.provider_image.identifier_on_provider = ami_id
self.builder.provider_image.provider_account_identifier = self.ec2_access_key
self.log.debug("Fedora_ec2_Builder instance %s pushed image with uuid %s to provider_image UUID (%s)" % (id(self), target_image_id, self.new_image_id))
self.percent_complete=100
def abort(self):
# TODO: Make this progressively more robust
# In the near term, the most important thing we can do is terminate any EC2 instance we may be using
if self.instance:
instance_id = self.instance.id
try:
self.terminate_instance(self.instance)
except Exception, e:
self.log.warning("Warning, encountered - Instance %s may not be terminated ******** " % (instance_id))
self.log.exception(e)
# This file content is tightly bound up with our mod code above
# I've inserted it as class variables for convenience
rc_local="""# We have seen timing issues with curl commands - try several times
for t in 1 2 3 4 5 6 7 8 9 10; do
echo "Try number $t" >> /tmp/ec2-keypull.stderr
curl -o /tmp/my-key http://169.254.169.254/2009-04-04/meta-data/public-keys/0/openssh-key 2>> /tmp/ec2-keypull.stderr
[ -f /tmp/my-key ] && break
sleep 10
done
if ! [ -f /tmp/my-key ]; then
echo "Failed to retrieve SSH key after 10 tries and 100 seconds" > /dev/hvc0
exit 1
fi
dd if=/dev/urandom count=50 2>/dev/null|md5sum|awk '{ print $1 }'|passwd --stdin root >/dev/null
if [ ! -d /root/.ssh ] ; then
mkdir /root/.ssh
chmod 700 /root/.ssh
fi
cat /tmp/my-key >> /root/.ssh/authorized_keys
chmod 600 /root/.ssh/authorized_keys
for home in `find /home/* -maxdepth 0 -type d 2>/dev/null | tr '\\n' ' '`; do
user=`echo $home | awk -F '/' '{ print $3 }'`
if [ ! -d $home/.ssh ] ; then
mkdir -p $home/.ssh
chmod 700 $home/.ssh
chown $user $home/.ssh
fi
cat /tmp/my-key >> $home/.ssh/authorized_keys
chmod 600 $home/.ssh/authorized_keys
chown $user $home/.ssh/authorized_keys
done
rm /tmp/my-key
"""
ifcfg_eth0="""DEVICE=eth0
BOOTPROTO=dhcp
ONBOOT=yes
TYPE=Ethernet
USERCTL=yes
PEERDNS=yes
IPV6INIT=no
"""
menu_lst="""default=0
timeout=0
title #TITLE#
root (hd0)
kernel /boot/vmlinuz-#KERNEL_VERSION# ro root=LABEL=/ rd_NO_PLYMOUTH #KERNEL_OPTIONS#
initrd /boot/#KERNEL_IMAGE_NAME#-#KERNEL_VERSION#.img
"""
fstab_32bit="""LABEL=/ / ext3 defaults 1 1
/dev/xvda2 /mnt ext3 defaults,nofail 1 2
/dev/xvda3 swap swap defaults,nofail 0 0
none /dev/pts devpts gid=5,mode=620 0 0
none /dev/shm tmpfs defaults 0 0
none /proc proc defaults 0 0
none /sys sysfs defaults 0 0
"""
fstab_64bit="""LABEL=/ / ext3 defaults 1 1
/dev/xvdb /mnt ext3 defaults,nofail 0 0
/dev/xvdc /data ext3 defaults,nofail 0 0
none /dev/pts devpts gid=5,mode=620 0 0
none /dev/shm tmpfs defaults 0 0
none /proc proc defaults 0 0
none /sys sysfs defaults 0 0
"""
############ BEGIN CONFIG-LIKE class variables ###########################
##########################################################################
# Perhaps there is a better way to do this but this works for now
# TODO: Ideally we should use boto "Location" references when possible - 1.9 contains only DEFAULT and EU
# The rest are hard coded strings for now.
ec2_region_details={
'ec2-us-east-1': { 'boto_loc': Location.DEFAULT, 'host':'us-east-1', 'i386': 'aki-805ea7e9', 'x86_64': 'aki-825ea7eb' },
'ec2-us-west-1': { 'boto_loc': 'us-west-1', 'host':'us-west-1', 'i386': 'aki-83396bc6', 'x86_64': 'aki-8d396bc8' },
'ec2-us-west-2': { 'boto_loc': 'us-west-2', 'host':'us-west-2', 'i386': 'aki-c2e26ff2', 'x86_64': 'aki-98e26fa8' },
'ec2-ap-southeast-1': { 'boto_loc': 'ap-southeast-1', 'host':'ap-southeast-1', 'i386': 'aki-a4225af6', 'x86_64': 'aki-aa225af8' },
'ec2-ap-northeast-1': { 'boto_loc': 'ap-northeast-1', 'host':'ap-northeast-1', 'i386': 'aki-ec5df7ed', 'x86_64': 'aki-ee5df7ef' },
'ec2-sa-east-1': { 'boto_loc': 'sa-east-1', 'host':'sa-east-1', 'i386': 'aki-bc3ce3a1', 'x86_64': 'aki-cc3ce3d1' },
'ec2-eu-west-1': { 'boto_loc': Location.EU, 'host':'eu-west-1', 'i386': 'aki-64695810', 'x86_64': 'aki-62695816' } }
# July 13 - new approach - generic JEOS AMIs for Fedora - no userdata and no euca-tools
# ad-hoc ssh keys replace userdata - runtime install of euca tools for bundling
# v0.6 of F14 and F15 - dropped F13 for now - also include official public RHEL hourly AMIs for RHEL6
# Sept 1 - 2011 - updated us-west Fedora JEOSes to 0.6
# Sept 30 - 2011 - Moved out of here entirely to ApplicationConfiguration
# ec2_jeos_amis = <not here anymore>
def add_target_content(self):
"""Merge in target specific package and repo content.
TDL object must already exist as self.tdlobj"""
doc = None
# TODONOW: Fix
# if self.config_block:
import os.path
if None:
doc = libxml2.parseDoc(self.config_block)
elif os.path.isfile("/etc/imagefactory/target_content.xml"):
doc = libxml2.parseFile("/etc/imagefactory/target_content.xml")
else:
self.log.debug("Found neither a call-time config nor a config file - doing nothing")
return
# Purely to make the xpath statements below a tiny bit shorter
target = self.target
os=self.tdlobj.distro
version=self.tdlobj.update
arch=self.tdlobj.arch
# We go from most to least specific in this order:
# arch -> version -> os-> target
# Note that at the moment we even allow an include statment that covers absolutely everything.
# That is, one that doesn't even specify a target - this is to support a very simple call-time syntax
include = doc.xpathEval("/template_includes/include[@target='%s' and @os='%s' and @version='%s' and @arch='%s']" %
(target, os, version, arch))
if len(include) == 0:
include = doc.xpathEval("/template_includes/include[@target='%s' and @os='%s' and @version='%s' and not(@arch)]" %
(target, os, version))
if len(include) == 0:
include = doc.xpathEval("/template_includes/include[@target='%s' and @os='%s' and not(@version) and not(@arch)]" %
(target, os))
if len(include) == 0:
include = doc.xpathEval("/template_includes/include[@target='%s' and not(@os) and not(@version) and not(@arch)]" %
(target))
if len(include) == 0:
include = doc.xpathEval("/template_includes/include[not(@target) and not(@os) and not(@version) and not(@arch)]")
if len(include) == 0:
self.log.debug("cannot find a config section that matches our build details - doing nothing")
return
# OK - We have at least one config block that matches our build - take the first one, merge it and be done
# TODO: Merge all of them? Err out if there is more than one? Warn?
include = include[0]
packages = include.xpathEval("packages")
if len(packages) > 0:
self.tdlobj.merge_packages(str(packages[0]))
repositories = include.xpathEval("repositories")
if len(repositories) > 0:
self.tdlobj.merge_repositories(str(repositories[0]))
| henrysher/imagefactory | imagefactory-plugins/EC2Cloud/EC2Cloud.py | Python | apache-2.0 | 70,484 | 0.006626 |
"""
Pure SciPy implementation of Locally Optimal Block Preconditioned Conjugate
Gradient Method (LOBPCG), see
http://www-math.cudenver.edu/~aknyazev/software/BLOPEX/
License: BSD
Authors: Robert Cimrman, Andrew Knyazev
Examples in tests directory contributed by Nils Wagner.
"""
from __future__ import division, print_function, absolute_import
import sys
import numpy as np
from numpy.testing import assert_allclose
from scipy._lib.six import xrange
from scipy.linalg import inv, eigh, cho_factor, cho_solve, cholesky
from scipy.sparse.linalg import aslinearoperator, LinearOperator
__all__ = ['lobpcg']
@np.deprecate(new_name='eigh')
def symeig(mtxA, mtxB=None, select=None):
return eigh(mtxA, b=mtxB, eigvals=select)
def pause():
# Used only when verbosity level > 10.
input()
def save(ar, fileName):
# Used only when verbosity level > 10.
from numpy import savetxt
savetxt(fileName, ar, precision=8)
def _assert_symmetric(M, rtol=1e-5, atol=1e-8):
assert_allclose(M.T, M, rtol=rtol, atol=atol)
##
# 21.05.2007, c
def as2d(ar):
"""
If the input array is 2D return it, if it is 1D, append a dimension,
making it a column vector.
"""
if ar.ndim == 2:
return ar
else: # Assume 1!
aux = np.array(ar, copy=False)
aux.shape = (ar.shape[0], 1)
return aux
def _makeOperator(operatorInput, expectedShape):
"""Takes a dense numpy array or a sparse matrix or
a function and makes an operator performing matrix * blockvector
products.
Examples
--------
>>> A = _makeOperator( arrayA, (n, n) )
>>> vectorB = A( vectorX )
"""
if operatorInput is None:
def ident(x):
return x
operator = LinearOperator(expectedShape, ident, matmat=ident)
else:
operator = aslinearoperator(operatorInput)
if operator.shape != expectedShape:
raise ValueError('operator has invalid shape')
return operator
def _applyConstraints(blockVectorV, factYBY, blockVectorBY, blockVectorY):
"""Changes blockVectorV in place."""
gramYBV = np.dot(blockVectorBY.T, blockVectorV)
tmp = cho_solve(factYBY, gramYBV)
blockVectorV -= np.dot(blockVectorY, tmp)
def _b_orthonormalize(B, blockVectorV, blockVectorBV=None, retInvR=False):
if blockVectorBV is None:
if B is not None:
blockVectorBV = B(blockVectorV)
else:
blockVectorBV = blockVectorV # Shared data!!!
gramVBV = np.dot(blockVectorV.T, blockVectorBV)
gramVBV = cholesky(gramVBV)
gramVBV = inv(gramVBV, overwrite_a=True)
# gramVBV is now R^{-1}.
blockVectorV = np.dot(blockVectorV, gramVBV)
if B is not None:
blockVectorBV = np.dot(blockVectorBV, gramVBV)
if retInvR:
return blockVectorV, blockVectorBV, gramVBV
else:
return blockVectorV, blockVectorBV
def lobpcg(A, X,
B=None, M=None, Y=None,
tol=None, maxiter=20,
largest=True, verbosityLevel=0,
retLambdaHistory=False, retResidualNormsHistory=False):
"""Locally Optimal Block Preconditioned Conjugate Gradient Method (LOBPCG)
LOBPCG is a preconditioned eigensolver for large symmetric positive
definite (SPD) generalized eigenproblems.
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}
The symmetric linear operator of the problem, usually a
sparse matrix. Often called the "stiffness matrix".
X : array_like
Initial approximation to the k eigenvectors. If A has
shape=(n,n) then X should have shape shape=(n,k).
B : {dense matrix, sparse matrix, LinearOperator}, optional
the right hand side operator in a generalized eigenproblem.
by default, B = Identity
often called the "mass matrix"
M : {dense matrix, sparse matrix, LinearOperator}, optional
preconditioner to A; by default M = Identity
M should approximate the inverse of A
Y : array_like, optional
n-by-sizeY matrix of constraints, sizeY < n
The iterations will be performed in the B-orthogonal complement
of the column-space of Y. Y must be full rank.
Returns
-------
w : array
Array of k eigenvalues
v : array
An array of k eigenvectors. V has the same shape as X.
Other Parameters
----------------
tol : scalar, optional
Solver tolerance (stopping criterion)
by default: tol=n*sqrt(eps)
maxiter : integer, optional
maximum number of iterations
by default: maxiter=min(n,20)
largest : bool, optional
when True, solve for the largest eigenvalues, otherwise the smallest
verbosityLevel : integer, optional
controls solver output. default: verbosityLevel = 0.
retLambdaHistory : boolean, optional
whether to return eigenvalue history
retResidualNormsHistory : boolean, optional
whether to return history of residual norms
Examples
--------
Solve A x = lambda B x with constraints and preconditioning.
>>> from scipy.sparse import spdiags, issparse
>>> from scipy.sparse.linalg import lobpcg, LinearOperator
>>> n = 100
>>> vals = [np.arange(n, dtype=np.float64) + 1]
>>> A = spdiags(vals, 0, n, n)
>>> A.toarray()
array([[ 1., 0., 0., ..., 0., 0., 0.],
[ 0., 2., 0., ..., 0., 0., 0.],
[ 0., 0., 3., ..., 0., 0., 0.],
...,
[ 0., 0., 0., ..., 98., 0., 0.],
[ 0., 0., 0., ..., 0., 99., 0.],
[ 0., 0., 0., ..., 0., 0., 100.]])
Constraints.
>>> Y = np.eye(n, 3)
Initial guess for eigenvectors, should have linearly independent
columns. Column dimension = number of requested eigenvalues.
>>> X = np.random.rand(n, 3)
Preconditioner -- inverse of A (as an abstract linear operator).
>>> invA = spdiags([1./vals[0]], 0, n, n)
>>> def precond( x ):
... return invA * x
>>> M = LinearOperator(matvec=precond, shape=(n, n), dtype=float)
Here, ``invA`` could of course have been used directly as a preconditioner.
Let us then solve the problem:
>>> eigs, vecs = lobpcg(A, X, Y=Y, M=M, tol=1e-4, maxiter=40, largest=False)
>>> eigs
array([ 4., 5., 6.])
Note that the vectors passed in Y are the eigenvectors of the 3 smallest
eigenvalues. The results returned are orthogonal to those.
Notes
-----
If both retLambdaHistory and retResidualNormsHistory are True,
the return tuple has the following format
(lambda, V, lambda history, residual norms history).
In the following ``n`` denotes the matrix size and ``m`` the number
of required eigenvalues (smallest or largest).
The LOBPCG code internally solves eigenproblems of the size 3``m`` on every
iteration by calling the "standard" dense eigensolver, so if ``m`` is not
small enough compared to ``n``, it does not make sense to call the LOBPCG
code, but rather one should use the "standard" eigensolver,
e.g. numpy or scipy function in this case.
If one calls the LOBPCG algorithm for 5``m``>``n``,
it will most likely break internally, so the code tries to call the standard
function instead.
It is not that n should be large for the LOBPCG to work, but rather the
ratio ``n``/``m`` should be large. It you call the LOBPCG code with ``m``=1
and ``n``=10, it should work, though ``n`` is small. The method is intended
for extremely large ``n``/``m``, see e.g., reference [28] in
http://arxiv.org/abs/0705.2626
The convergence speed depends basically on two factors:
1. How well relatively separated the seeking eigenvalues are
from the rest of the eigenvalues.
One can try to vary ``m`` to make this better.
2. How well conditioned the problem is. This can be changed by using proper
preconditioning. For example, a rod vibration test problem (under tests
directory) is ill-conditioned for large ``n``, so convergence will be
slow, unless efficient preconditioning is used.
For this specific problem, a good simple preconditioner function would
be a linear solve for A, which is easy to code since A is tridiagonal.
*Acknowledgements*
lobpcg.py code was written by Robert Cimrman.
Many thanks belong to Andrew Knyazev, the author of the algorithm,
for lots of advice and support.
References
----------
.. [1] A. V. Knyazev (2001),
Toward the Optimal Preconditioned Eigensolver: Locally Optimal
Block Preconditioned Conjugate Gradient Method.
SIAM Journal on Scientific Computing 23, no. 2,
pp. 517-541. http://dx.doi.org/10.1137/S1064827500366124
.. [2] A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov (2007),
Block Locally Optimal Preconditioned Eigenvalue Xolvers (BLOPEX)
in hypre and PETSc. http://arxiv.org/abs/0705.2626
.. [3] A. V. Knyazev's C and MATLAB implementations:
http://www-math.cudenver.edu/~aknyazev/software/BLOPEX/
"""
blockVectorX = X
blockVectorY = Y
residualTolerance = tol
maxIterations = maxiter
if blockVectorY is not None:
sizeY = blockVectorY.shape[1]
else:
sizeY = 0
# Block size.
if len(blockVectorX.shape) != 2:
raise ValueError('expected rank-2 array for argument X')
n, sizeX = blockVectorX.shape
if sizeX > n:
raise ValueError('X column dimension exceeds the row dimension')
A = _makeOperator(A, (n,n))
B = _makeOperator(B, (n,n))
M = _makeOperator(M, (n,n))
if (n - sizeY) < (5 * sizeX):
# warn('The problem size is small compared to the block size.' \
# ' Using dense eigensolver instead of LOBPCG.')
if blockVectorY is not None:
raise NotImplementedError('The dense eigensolver '
'does not support constraints.')
# Define the closed range of indices of eigenvalues to return.
if largest:
eigvals = (n - sizeX, n-1)
else:
eigvals = (0, sizeX-1)
A_dense = A(np.eye(n))
B_dense = None if B is None else B(np.eye(n))
return eigh(A_dense, B_dense, eigvals=eigvals, check_finite=False)
if residualTolerance is None:
residualTolerance = np.sqrt(1e-15) * n
maxIterations = min(n, maxIterations)
if verbosityLevel:
aux = "Solving "
if B is None:
aux += "standard"
else:
aux += "generalized"
aux += " eigenvalue problem with"
if M is None:
aux += "out"
aux += " preconditioning\n\n"
aux += "matrix size %d\n" % n
aux += "block size %d\n\n" % sizeX
if blockVectorY is None:
aux += "No constraints\n\n"
else:
if sizeY > 1:
aux += "%d constraints\n\n" % sizeY
else:
aux += "%d constraint\n\n" % sizeY
print(aux)
##
# Apply constraints to X.
if blockVectorY is not None:
if B is not None:
blockVectorBY = B(blockVectorY)
else:
blockVectorBY = blockVectorY
# gramYBY is a dense array.
gramYBY = np.dot(blockVectorY.T, blockVectorBY)
try:
# gramYBY is a Cholesky factor from now on...
gramYBY = cho_factor(gramYBY)
except:
raise ValueError('cannot handle linearly dependent constraints')
_applyConstraints(blockVectorX, gramYBY, blockVectorBY, blockVectorY)
##
# B-orthonormalize X.
blockVectorX, blockVectorBX = _b_orthonormalize(B, blockVectorX)
##
# Compute the initial Ritz vectors: solve the eigenproblem.
blockVectorAX = A(blockVectorX)
gramXAX = np.dot(blockVectorX.T, blockVectorAX)
_lambda, eigBlockVector = eigh(gramXAX, check_finite=False)
ii = np.argsort(_lambda)[:sizeX]
if largest:
ii = ii[::-1]
_lambda = _lambda[ii]
eigBlockVector = np.asarray(eigBlockVector[:,ii])
blockVectorX = np.dot(blockVectorX, eigBlockVector)
blockVectorAX = np.dot(blockVectorAX, eigBlockVector)
if B is not None:
blockVectorBX = np.dot(blockVectorBX, eigBlockVector)
##
# Active index set.
activeMask = np.ones((sizeX,), dtype=bool)
lambdaHistory = [_lambda]
residualNormsHistory = []
previousBlockSize = sizeX
ident = np.eye(sizeX, dtype=A.dtype)
ident0 = np.eye(sizeX, dtype=A.dtype)
##
# Main iteration loop.
blockVectorP = None # set during iteration
blockVectorAP = None
blockVectorBP = None
for iterationNumber in xrange(maxIterations):
if verbosityLevel > 0:
print('iteration %d' % iterationNumber)
aux = blockVectorBX * _lambda[np.newaxis,:]
blockVectorR = blockVectorAX - aux
aux = np.sum(blockVectorR.conjugate() * blockVectorR, 0)
residualNorms = np.sqrt(aux)
residualNormsHistory.append(residualNorms)
ii = np.where(residualNorms > residualTolerance, True, False)
activeMask = activeMask & ii
if verbosityLevel > 2:
print(activeMask)
currentBlockSize = activeMask.sum()
if currentBlockSize != previousBlockSize:
previousBlockSize = currentBlockSize
ident = np.eye(currentBlockSize, dtype=A.dtype)
if currentBlockSize == 0:
break
if verbosityLevel > 0:
print('current block size:', currentBlockSize)
print('eigenvalue:', _lambda)
print('residual norms:', residualNorms)
if verbosityLevel > 10:
print(eigBlockVector)
activeBlockVectorR = as2d(blockVectorR[:,activeMask])
if iterationNumber > 0:
activeBlockVectorP = as2d(blockVectorP[:,activeMask])
activeBlockVectorAP = as2d(blockVectorAP[:,activeMask])
activeBlockVectorBP = as2d(blockVectorBP[:,activeMask])
if M is not None:
# Apply preconditioner T to the active residuals.
activeBlockVectorR = M(activeBlockVectorR)
##
# Apply constraints to the preconditioned residuals.
if blockVectorY is not None:
_applyConstraints(activeBlockVectorR,
gramYBY, blockVectorBY, blockVectorY)
##
# B-orthonormalize the preconditioned residuals.
aux = _b_orthonormalize(B, activeBlockVectorR)
activeBlockVectorR, activeBlockVectorBR = aux
activeBlockVectorAR = A(activeBlockVectorR)
if iterationNumber > 0:
aux = _b_orthonormalize(B, activeBlockVectorP,
activeBlockVectorBP, retInvR=True)
activeBlockVectorP, activeBlockVectorBP, invR = aux
activeBlockVectorAP = np.dot(activeBlockVectorAP, invR)
##
# Perform the Rayleigh Ritz Procedure:
# Compute symmetric Gram matrices:
xaw = np.dot(blockVectorX.T, activeBlockVectorAR)
waw = np.dot(activeBlockVectorR.T, activeBlockVectorAR)
xbw = np.dot(blockVectorX.T, activeBlockVectorBR)
if iterationNumber > 0:
xap = np.dot(blockVectorX.T, activeBlockVectorAP)
wap = np.dot(activeBlockVectorR.T, activeBlockVectorAP)
pap = np.dot(activeBlockVectorP.T, activeBlockVectorAP)
xbp = np.dot(blockVectorX.T, activeBlockVectorBP)
wbp = np.dot(activeBlockVectorR.T, activeBlockVectorBP)
gramA = np.bmat([[np.diag(_lambda), xaw, xap],
[xaw.T, waw, wap],
[xap.T, wap.T, pap]])
gramB = np.bmat([[ident0, xbw, xbp],
[xbw.T, ident, wbp],
[xbp.T, wbp.T, ident]])
else:
gramA = np.bmat([[np.diag(_lambda), xaw],
[xaw.T, waw]])
gramB = np.bmat([[ident0, xbw],
[xbw.T, ident]])
_assert_symmetric(gramA)
_assert_symmetric(gramB)
if verbosityLevel > 10:
save(gramA, 'gramA')
save(gramB, 'gramB')
# Solve the generalized eigenvalue problem.
_lambda, eigBlockVector = eigh(gramA, gramB, check_finite=False)
ii = np.argsort(_lambda)[:sizeX]
if largest:
ii = ii[::-1]
if verbosityLevel > 10:
print(ii)
_lambda = _lambda[ii].astype(np.float64)
eigBlockVector = np.asarray(eigBlockVector[:,ii].astype(np.float64))
lambdaHistory.append(_lambda)
if verbosityLevel > 10:
print('lambda:', _lambda)
## # Normalize eigenvectors!
## aux = np.sum( eigBlockVector.conjugate() * eigBlockVector, 0 )
## eigVecNorms = np.sqrt( aux )
## eigBlockVector = eigBlockVector / eigVecNorms[np.newaxis,:]
# eigBlockVector, aux = _b_orthonormalize( B, eigBlockVector )
if verbosityLevel > 10:
print(eigBlockVector)
pause()
##
# Compute Ritz vectors.
if iterationNumber > 0:
eigBlockVectorX = eigBlockVector[:sizeX]
eigBlockVectorR = eigBlockVector[sizeX:sizeX+currentBlockSize]
eigBlockVectorP = eigBlockVector[sizeX+currentBlockSize:]
pp = np.dot(activeBlockVectorR, eigBlockVectorR)
pp += np.dot(activeBlockVectorP, eigBlockVectorP)
app = np.dot(activeBlockVectorAR, eigBlockVectorR)
app += np.dot(activeBlockVectorAP, eigBlockVectorP)
bpp = np.dot(activeBlockVectorBR, eigBlockVectorR)
bpp += np.dot(activeBlockVectorBP, eigBlockVectorP)
else:
eigBlockVectorX = eigBlockVector[:sizeX]
eigBlockVectorR = eigBlockVector[sizeX:]
pp = np.dot(activeBlockVectorR, eigBlockVectorR)
app = np.dot(activeBlockVectorAR, eigBlockVectorR)
bpp = np.dot(activeBlockVectorBR, eigBlockVectorR)
if verbosityLevel > 10:
print(pp)
print(app)
print(bpp)
pause()
blockVectorX = np.dot(blockVectorX, eigBlockVectorX) + pp
blockVectorAX = np.dot(blockVectorAX, eigBlockVectorX) + app
blockVectorBX = np.dot(blockVectorBX, eigBlockVectorX) + bpp
blockVectorP, blockVectorAP, blockVectorBP = pp, app, bpp
aux = blockVectorBX * _lambda[np.newaxis,:]
blockVectorR = blockVectorAX - aux
aux = np.sum(blockVectorR.conjugate() * blockVectorR, 0)
residualNorms = np.sqrt(aux)
if verbosityLevel > 0:
print('final eigenvalue:', _lambda)
print('final residual norms:', residualNorms)
if retLambdaHistory:
if retResidualNormsHistory:
return _lambda, blockVectorX, lambdaHistory, residualNormsHistory
else:
return _lambda, blockVectorX, lambdaHistory
else:
if retResidualNormsHistory:
return _lambda, blockVectorX, residualNormsHistory
else:
return _lambda, blockVectorX
| jlcarmic/producthunt_simulator | venv/lib/python2.7/site-packages/scipy/sparse/linalg/eigen/lobpcg/lobpcg.py | Python | mit | 19,348 | 0.001551 |
"""
Tests for Calendar Sync views.
"""
import ddt
from django.test import TestCase
from django.urls import reverse
from openedx.features.calendar_sync.api import SUBSCRIBE, UNSUBSCRIBE
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
TEST_PASSWORD = 'test'
@ddt.ddt
class TestCalendarSyncView(SharedModuleStoreTestCase, TestCase):
"""Tests for the calendar sync view."""
@classmethod
def setUpClass(cls):
""" Set up any course data """
super(TestCalendarSyncView, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestCalendarSyncView, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.user = self.create_user_for_course(self.course)
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.calendar_sync_url = reverse('openedx.calendar_sync', args=[self.course.id])
@ddt.data(
# Redirect on successful subscribe
[{'tool_data': "{{'toggle_data': '{}'}}".format(SUBSCRIBE)}, 302, ''],
# Redirect on successful unsubscribe
[{'tool_data': "{{'toggle_data': '{}'}}".format(UNSUBSCRIBE)}, 302, ''],
# 422 on unknown toggle_data
[{'tool_data': "{{'toggle_data': '{}'}}".format('gibberish')}, 422,
'Toggle data was not provided or had unknown value.'],
# 422 on no toggle_data
[{'tool_data': "{{'random_data': '{}'}}".format('gibberish')}, 422,
'Toggle data was not provided or had unknown value.'],
# 422 on no tool_data
[{'nonsense': "{{'random_data': '{}'}}".format('gibberish')}, 422, 'Tool data was not provided.'],
)
@ddt.unpack
def test_course_dates_fragment(self, data, expected_status_code, contained_text):
response = self.client.post(self.calendar_sync_url, data)
assert response.status_code == expected_status_code
assert contained_text in str(response.content)
| stvstnfrd/edx-platform | openedx/features/calendar_sync/tests/test_views.py | Python | agpl-3.0 | 2,051 | 0.002438 |
# constants extracted from
# https://github.com/jrowberg/i2cdevlib/blob/master/Arduino/MPU6050/MPU6050.h
MPU6050_ADDRESS_AD0_LOW = 0x68
MPU6050_ADDRESS_AD0_HIGH = 0x69
MPU6050_DEFAULT_ADDRESS = MPU6050_ADDRESS_AD0_LOW
MPU6050_RA_XG_OFFS_TC = 0x00
MPU6050_RA_YG_OFFS_TC = 0x01
MPU6050_RA_ZG_OFFS_TC = 0x02
MPU6050_RA_X_FINE_GAIN = 0x03
MPU6050_RA_Y_FINE_GAIN = 0x04
MPU6050_RA_Z_FINE_GAIN = 0x05
MPU6050_RA_XA_OFFS_H = 0x06
MPU6050_RA_XA_OFFS_L_TC = 0x07
MPU6050_RA_YA_OFFS_H = 0x08
MPU6050_RA_YA_OFFS_L_TC = 0x09
MPU6050_RA_ZA_OFFS_H = 0x0A
MPU6050_RA_ZA_OFFS_L_TC = 0x0B
MPU6050_RA_SELF_TEST_X = 0x0D
MPU6050_RA_SELF_TEST_Y = 0x0E
MPU6050_RA_SELF_TEST_Z = 0x0F
MPU6050_RA_SELF_TEST_A = 0x10
MPU6050_RA_XG_OFFS_USRH = 0x13
MPU6050_RA_XG_OFFS_USRL = 0x14
MPU6050_RA_YG_OFFS_USRH = 0x15
MPU6050_RA_YG_OFFS_USRL = 0x16
MPU6050_RA_ZG_OFFS_USRH = 0x17
MPU6050_RA_ZG_OFFS_USRL = 0x18
MPU6050_RA_SMPLRT_DIV = 0x19
MPU6050_RA_CONFIG = 0x1A
MPU6050_RA_GYRO_CONFIG = 0x1B
MPU6050_RA_ACCEL_CONFIG = 0x1C
MPU6050_RA_FF_THR = 0x1D
MPU6050_RA_FF_DUR = 0x1E
MPU6050_RA_MOT_THR = 0x1F
MPU6050_RA_MOT_DUR = 0x20
MPU6050_RA_ZRMOT_THR = 0x21
MPU6050_RA_ZRMOT_DUR = 0x22
MPU6050_RA_FIFO_EN = 0x23
MPU6050_RA_I2C_MST_CTRL = 0x24
MPU6050_RA_I2C_SLV0_ADDR = 0x25
MPU6050_RA_I2C_SLV0_REG = 0x26
MPU6050_RA_I2C_SLV0_CTRL = 0x27
MPU6050_RA_I2C_SLV1_ADDR = 0x28
MPU6050_RA_I2C_SLV1_REG = 0x29
MPU6050_RA_I2C_SLV1_CTRL = 0x2A
MPU6050_RA_I2C_SLV2_ADDR = 0x2B
MPU6050_RA_I2C_SLV2_REG = 0x2C
MPU6050_RA_I2C_SLV2_CTRL = 0x2D
MPU6050_RA_I2C_SLV3_ADDR = 0x2E
MPU6050_RA_I2C_SLV3_REG = 0x2F
MPU6050_RA_I2C_SLV3_CTRL = 0x30
MPU6050_RA_I2C_SLV4_ADDR = 0x31
MPU6050_RA_I2C_SLV4_REG = 0x32
MPU6050_RA_I2C_SLV4_DO = 0x33
MPU6050_RA_I2C_SLV4_CTRL = 0x34
MPU6050_RA_I2C_SLV4_DI = 0x35
MPU6050_RA_I2C_MST_STATUS = 0x36
MPU6050_RA_INT_PIN_CFG = 0x37
MPU6050_RA_INT_ENABLE = 0x38
MPU6050_RA_DMP_INT_STATUS = 0x39
MPU6050_RA_INT_STATUS = 0x3A
MPU6050_RA_ACCEL_XOUT_H = 0x3B
MPU6050_RA_ACCEL_XOUT_L = 0x3C
MPU6050_RA_ACCEL_YOUT_H = 0x3D
MPU6050_RA_ACCEL_YOUT_L = 0x3E
MPU6050_RA_ACCEL_ZOUT_H = 0x3F
MPU6050_RA_ACCEL_ZOUT_L = 0x40
MPU6050_RA_TEMP_OUT_H = 0x41
MPU6050_RA_TEMP_OUT_L = 0x42
MPU6050_RA_GYRO_XOUT_H = 0x43
MPU6050_RA_GYRO_XOUT_L = 0x44
MPU6050_RA_GYRO_YOUT_H = 0x45
MPU6050_RA_GYRO_YOUT_L = 0x46
MPU6050_RA_GYRO_ZOUT_H = 0x47
MPU6050_RA_GYRO_ZOUT_L = 0x48
MPU6050_RA_EXT_SENS_DATA_00 = 0x49
MPU6050_RA_EXT_SENS_DATA_01 = 0x4A
MPU6050_RA_EXT_SENS_DATA_02 = 0x4B
MPU6050_RA_EXT_SENS_DATA_03 = 0x4C
MPU6050_RA_EXT_SENS_DATA_04 = 0x4D
MPU6050_RA_EXT_SENS_DATA_05 = 0x4E
MPU6050_RA_EXT_SENS_DATA_06 = 0x4F
MPU6050_RA_EXT_SENS_DATA_07 = 0x50
MPU6050_RA_EXT_SENS_DATA_08 = 0x51
MPU6050_RA_EXT_SENS_DATA_09 = 0x52
MPU6050_RA_EXT_SENS_DATA_10 = 0x53
MPU6050_RA_EXT_SENS_DATA_11 = 0x54
MPU6050_RA_EXT_SENS_DATA_12 = 0x55
MPU6050_RA_EXT_SENS_DATA_13 = 0x56
MPU6050_RA_EXT_SENS_DATA_14 = 0x57
MPU6050_RA_EXT_SENS_DATA_15 = 0x58
MPU6050_RA_EXT_SENS_DATA_16 = 0x59
MPU6050_RA_EXT_SENS_DATA_17 = 0x5A
MPU6050_RA_EXT_SENS_DATA_18 = 0x5B
MPU6050_RA_EXT_SENS_DATA_19 = 0x5C
MPU6050_RA_EXT_SENS_DATA_20 = 0x5D
MPU6050_RA_EXT_SENS_DATA_21 = 0x5E
MPU6050_RA_EXT_SENS_DATA_22 = 0x5F
MPU6050_RA_EXT_SENS_DATA_23 = 0x60
MPU6050_RA_MOT_DETECT_STATUS = 0x61
MPU6050_RA_I2C_SLV0_DO = 0x63
MPU6050_RA_I2C_SLV1_DO = 0x64
MPU6050_RA_I2C_SLV2_DO = 0x65
MPU6050_RA_I2C_SLV3_DO = 0x66
MPU6050_RA_I2C_MST_DELAY_CTRL = 0x67
MPU6050_RA_SIGNAL_PATH_RESET = 0x68
MPU6050_RA_MOT_DETECT_CTRL = 0x69
MPU6050_RA_USER_CTRL = 0x6A
MPU6050_RA_PWR_MGMT_1 = 0x6B
MPU6050_RA_PWR_MGMT_2 = 0x6C
MPU6050_RA_BANK_SEL = 0x6D
MPU6050_RA_MEM_START_ADDR = 0x6E
MPU6050_RA_MEM_R_W = 0x6F
MPU6050_RA_DMP_CFG_1 = 0x70
MPU6050_RA_DMP_CFG_2 = 0x71
MPU6050_RA_FIFO_COUNTH = 0x72
MPU6050_RA_FIFO_COUNTL = 0x73
MPU6050_RA_FIFO_R_W = 0x74
MPU6050_RA_WHO_AM_I = 0x75
MPU6050_SELF_TEST_XA_1_BIT = 0x07
MPU6050_SELF_TEST_XA_1_LENGTH = 0x03
MPU6050_SELF_TEST_XA_2_BIT = 0x05
MPU6050_SELF_TEST_XA_2_LENGTH = 0x02
MPU6050_SELF_TEST_YA_1_BIT = 0x07
MPU6050_SELF_TEST_YA_1_LENGTH = 0x03
MPU6050_SELF_TEST_YA_2_BIT = 0x03
MPU6050_SELF_TEST_YA_2_LENGTH = 0x02
MPU6050_SELF_TEST_ZA_1_BIT = 0x07
MPU6050_SELF_TEST_ZA_1_LENGTH = 0x03
MPU6050_SELF_TEST_ZA_2_BIT = 0x01
MPU6050_SELF_TEST_ZA_2_LENGTH = 0x02
MPU6050_SELF_TEST_XG_1_BIT = 0x04
MPU6050_SELF_TEST_XG_1_LENGTH = 0x05
MPU6050_SELF_TEST_YG_1_BIT = 0x04
MPU6050_SELF_TEST_YG_1_LENGTH = 0x05
MPU6050_SELF_TEST_ZG_1_BIT = 0x04
MPU6050_SELF_TEST_ZG_1_LENGTH = 0x05
MPU6050_TC_PWR_MODE_BIT = 7
MPU6050_TC_OFFSET_BIT = 6
MPU6050_TC_OFFSET_LENGTH = 6
MPU6050_TC_OTP_BNK_VLD_BIT = 0
MPU6050_VDDIO_LEVEL_VLOGIC = 0
MPU6050_VDDIO_LEVEL_VDD = 1
MPU6050_CFG_EXT_SYNC_SET_BIT = 5
MPU6050_CFG_EXT_SYNC_SET_LENGTH = 3
MPU6050_CFG_DLPF_CFG_BIT = 2
MPU6050_CFG_DLPF_CFG_LENGTH = 3
MPU6050_EXT_SYNC_DISABLED = 0x0
MPU6050_EXT_SYNC_TEMP_OUT_L = 0x1
MPU6050_EXT_SYNC_GYRO_XOUT_L = 0x2
MPU6050_EXT_SYNC_GYRO_YOUT_L = 0x3
MPU6050_EXT_SYNC_GYRO_ZOUT_L = 0x4
MPU6050_EXT_SYNC_ACCEL_XOUT_L = 0x5
MPU6050_EXT_SYNC_ACCEL_YOUT_L = 0x6
MPU6050_EXT_SYNC_ACCEL_ZOUT_L = 0x7
MPU6050_DLPF_BW_256 = 0x00
MPU6050_DLPF_BW_188 = 0x01
MPU6050_DLPF_BW_98 = 0x02
MPU6050_DLPF_BW_42 = 0x03
MPU6050_DLPF_BW_20 = 0x04
MPU6050_DLPF_BW_10 = 0x05
MPU6050_DLPF_BW_5 = 0x06
MPU6050_GCONFIG_FS_SEL_BIT = 4
MPU6050_GCONFIG_FS_SEL_LENGTH = 2
MPU6050_GYRO_FS_250 = 0x00
MPU6050_GYRO_FS_500 = 0x01
MPU6050_GYRO_FS_1000 = 0x02
MPU6050_GYRO_FS_2000 = 0x03
MPU6050_ACONFIG_XA_ST_BIT = 7
MPU6050_ACONFIG_YA_ST_BIT = 6
MPU6050_ACONFIG_ZA_ST_BIT = 5
MPU6050_ACONFIG_AFS_SEL_BIT = 4
MPU6050_ACONFIG_AFS_SEL_LENGTH = 2
MPU6050_ACONFIG_ACCEL_HPF_BIT = 2
MPU6050_ACONFIG_ACCEL_HPF_LENGTH = 3
MPU6050_ACCEL_FS_2 = 0x00
MPU6050_ACCEL_FS_4 = 0x01
MPU6050_ACCEL_FS_8 = 0x02
MPU6050_ACCEL_FS_16 = 0x03
MPU6050_DHPF_RESET = 0x00
MPU6050_DHPF_5 = 0x01
MPU6050_DHPF_2P5 = 0x02
MPU6050_DHPF_1P25 = 0x03
MPU6050_DHPF_0P63 = 0x04
MPU6050_DHPF_HOLD = 0x07
MPU6050_TEMP_FIFO_EN_BIT = 7
MPU6050_XG_FIFO_EN_BIT = 6
MPU6050_YG_FIFO_EN_BIT = 5
MPU6050_ZG_FIFO_EN_BIT = 4
MPU6050_ACCEL_FIFO_EN_BIT = 3
MPU6050_SLV2_FIFO_EN_BIT = 2
MPU6050_SLV1_FIFO_EN_BIT = 1
MPU6050_SLV0_FIFO_EN_BIT = 0
MPU6050_MULT_MST_EN_BIT = 7
MPU6050_WAIT_FOR_ES_BIT = 6
MPU6050_SLV_3_FIFO_EN_BIT = 5
MPU6050_I2C_MST_P_NSR_BIT = 4
MPU6050_I2C_MST_CLK_BIT = 3
MPU6050_I2C_MST_CLK_LENGTH = 4
MPU6050_CLOCK_DIV_348 = 0x0
MPU6050_CLOCK_DIV_333 = 0x1
MPU6050_CLOCK_DIV_320 = 0x2
MPU6050_CLOCK_DIV_308 = 0x3
MPU6050_CLOCK_DIV_296 = 0x4
MPU6050_CLOCK_DIV_286 = 0x5
MPU6050_CLOCK_DIV_276 = 0x6
MPU6050_CLOCK_DIV_267 = 0x7
MPU6050_CLOCK_DIV_258 = 0x8
MPU6050_CLOCK_DIV_500 = 0x9
MPU6050_CLOCK_DIV_471 = 0xA
MPU6050_CLOCK_DIV_444 = 0xB
MPU6050_CLOCK_DIV_421 = 0xC
MPU6050_CLOCK_DIV_400 = 0xD
MPU6050_CLOCK_DIV_381 = 0xE
MPU6050_CLOCK_DIV_364 = 0xF
MPU6050_I2C_SLV_RW_BIT = 7
MPU6050_I2C_SLV_ADDR_BIT = 6
MPU6050_I2C_SLV_ADDR_LENGTH = 7
MPU6050_I2C_SLV_EN_BIT = 7
MPU6050_I2C_SLV_BYTE_SW_BIT = 6
MPU6050_I2C_SLV_REG_DIS_BIT = 5
MPU6050_I2C_SLV_GRP_BIT = 4
MPU6050_I2C_SLV_LEN_BIT = 3
MPU6050_I2C_SLV_LEN_LENGTH = 4
MPU6050_I2C_SLV4_RW_BIT = 7
MPU6050_I2C_SLV4_ADDR_BIT = 6
MPU6050_I2C_SLV4_ADDR_LENGTH = 7
MPU6050_I2C_SLV4_EN_BIT = 7
MPU6050_I2C_SLV4_INT_EN_BIT = 6
MPU6050_I2C_SLV4_REG_DIS_BIT = 5
MPU6050_I2C_SLV4_MST_DLY_BIT = 4
MPU6050_I2C_SLV4_MST_DLY_LENGTH = 5
MPU6050_MST_PASS_THROUGH_BIT = 7
MPU6050_MST_I2C_SLV4_DONE_BIT = 6
MPU6050_MST_I2C_LOST_ARB_BIT = 5
MPU6050_MST_I2C_SLV4_NACK_BIT = 4
MPU6050_MST_I2C_SLV3_NACK_BIT = 3
MPU6050_MST_I2C_SLV2_NACK_BIT = 2
MPU6050_MST_I2C_SLV1_NACK_BIT = 1
MPU6050_MST_I2C_SLV0_NACK_BIT = 0
MPU6050_INTCFG_INT_LEVEL_BIT = 7
MPU6050_INTCFG_INT_OPEN_BIT = 6
MPU6050_INTCFG_LATCH_INT_EN_BIT = 5
MPU6050_INTCFG_INT_RD_CLEAR_BIT = 4
MPU6050_INTCFG_FSYNC_INT_LEVEL_BIT = 3
MPU6050_INTCFG_FSYNC_INT_EN_BIT = 2
MPU6050_INTCFG_I2C_BYPASS_EN_BIT = 1
MPU6050_INTCFG_CLKOUT_EN_BIT = 0
MPU6050_INTMODE_ACTIVEHIGH = 0x00
MPU6050_INTMODE_ACTIVELOW = 0x01
MPU6050_INTDRV_PUSHPULL = 0x00
MPU6050_INTDRV_OPENDRAIN = 0x01
MPU6050_INTLATCH_50USPULSE = 0x00
MPU6050_INTLATCH_WAITCLEAR = 0x01
MPU6050_INTCLEAR_STATUSREAD = 0x00
MPU6050_INTCLEAR_ANYREAD = 0x01
MPU6050_INTERRUPT_FF_BIT = 7
MPU6050_INTERRUPT_MOT_BIT = 6
MPU6050_INTERRUPT_ZMOT_BIT = 5
MPU6050_INTERRUPT_FIFO_OFLOW_BIT = 4
MPU6050_INTERRUPT_I2C_MST_INT_BIT = 3
MPU6050_INTERRUPT_PLL_RDY_INT_BIT = 2
MPU6050_INTERRUPT_DMP_INT_BIT = 1
MPU6050_INTERRUPT_DATA_RDY_BIT = 0
MPU6050_DMPINT_5_BIT = 5
MPU6050_DMPINT_4_BIT = 4
MPU6050_DMPINT_3_BIT = 3
MPU6050_DMPINT_2_BIT = 2
MPU6050_DMPINT_1_BIT = 1
MPU6050_DMPINT_0_BIT = 0
MPU6050_MOTION_MOT_XNEG_BIT = 7
MPU6050_MOTION_MOT_XPOS_BIT = 6
MPU6050_MOTION_MOT_YNEG_BIT = 5
MPU6050_MOTION_MOT_YPOS_BIT = 4
MPU6050_MOTION_MOT_ZNEG_BIT = 3
MPU6050_MOTION_MOT_ZPOS_BIT = 2
MPU6050_MOTION_MOT_ZRMOT_BIT = 0
MPU6050_DELAYCTRL_DELAY_ES_SHADOW_BIT = 7
MPU6050_DELAYCTRL_I2C_SLV4_DLY_EN_BIT = 4
MPU6050_DELAYCTRL_I2C_SLV3_DLY_EN_BIT = 3
MPU6050_DELAYCTRL_I2C_SLV2_DLY_EN_BIT = 2
MPU6050_DELAYCTRL_I2C_SLV1_DLY_EN_BIT = 1
MPU6050_DELAYCTRL_I2C_SLV0_DLY_EN_BIT = 0
MPU6050_PATHRESET_GYRO_RESET_BIT = 2
MPU6050_PATHRESET_ACCEL_RESET_BIT = 1
MPU6050_PATHRESET_TEMP_RESET_BIT = 0
MPU6050_DETECT_ACCEL_ON_DELAY_BIT = 5
MPU6050_DETECT_ACCEL_ON_DELAY_LENGTH = 2
MPU6050_DETECT_FF_COUNT_BIT = 3
MPU6050_DETECT_FF_COUNT_LENGTH = 2
MPU6050_DETECT_MOT_COUNT_BIT = 1
MPU6050_DETECT_MOT_COUNT_LENGTH = 2
MPU6050_DETECT_DECREMENT_RESET = 0x0
MPU6050_DETECT_DECREMENT_1 = 0x1
MPU6050_DETECT_DECREMENT_2 = 0x2
MPU6050_DETECT_DECREMENT_4 = 0x3
MPU6050_USERCTRL_DMP_EN_BIT = 7
MPU6050_USERCTRL_FIFO_EN_BIT = 6
MPU6050_USERCTRL_I2C_MST_EN_BIT = 5
MPU6050_USERCTRL_I2C_IF_DIS_BIT = 4
MPU6050_USERCTRL_DMP_RESET_BIT = 3
MPU6050_USERCTRL_FIFO_RESET_BIT = 2
MPU6050_USERCTRL_I2C_MST_RESET_BIT = 1
MPU6050_USERCTRL_SIG_COND_RESET_BIT = 0
MPU6050_PWR1_DEVICE_RESET_BIT = 7
MPU6050_PWR1_SLEEP_BIT = 6
MPU6050_PWR1_CYCLE_BIT = 5
MPU6050_PWR1_TEMP_DIS_BIT = 3
MPU6050_PWR1_CLKSEL_BIT = 2
MPU6050_PWR1_CLKSEL_LENGTH = 3
MPU6050_CLOCK_INTERNAL = 0x00
MPU6050_CLOCK_PLL_XGYRO = 0x01
MPU6050_CLOCK_PLL_YGYRO = 0x02
MPU6050_CLOCK_PLL_ZGYRO = 0x03
MPU6050_CLOCK_PLL_EXT32K = 0x04
MPU6050_CLOCK_PLL_EXT19M = 0x05
MPU6050_CLOCK_KEEP_RESET = 0x07
MPU6050_PWR2_LP_WAKE_CTRL_BIT = 7
MPU6050_PWR2_LP_WAKE_CTRL_LENGTH = 2
MPU6050_PWR2_STBY_XA_BIT = 5
MPU6050_PWR2_STBY_YA_BIT = 4
MPU6050_PWR2_STBY_ZA_BIT = 3
MPU6050_PWR2_STBY_XG_BIT = 2
MPU6050_PWR2_STBY_YG_BIT = 1
MPU6050_PWR2_STBY_ZG_BIT = 0
MPU6050_WAKE_FREQ_1P25 = 0x0
MPU6050_WAKE_FREQ_2P5 = 0x1
MPU6050_WAKE_FREQ_5 = 0x2
MPU6050_WAKE_FREQ_10 = 0x3
MPU6050_BANKSEL_PRFTCH_EN_BIT = 6
MPU6050_BANKSEL_CFG_USER_BANK_BIT = 5
MPU6050_BANKSEL_MEM_SEL_BIT = 4
MPU6050_BANKSEL_MEM_SEL_LENGTH = 5
MPU6050_WHO_AM_I_BIT = 6
MPU6050_WHO_AM_I_LENGTH = 6
MPU6050_DMP_MEMORY_BANKS = 8
MPU6050_DMP_MEMORY_BANK_SIZE = 256
MPU6050_DMP_MEMORY_CHUNK_SIZE = 16
MPU_SCL_PIN = 13
MPU_SDA_PIN = 12
MPU_DATA_RDY_PIN = 14
MPU_ADDR = MPU6050_DEFAULT_ADDRESS
| mapr-demos/wifi-sensor-demo | python/mpu6050/all_constants.py | Python | apache-2.0 | 15,010 | 0.022252 |
# -*- coding: utf-8 -*-
"""
The rrule module offers a small, complete, and very fast, implementation of
the recurrence rules documented in the
`iCalendar RFC <http://www.ietf.org/rfc/rfc2445.txt>`_,
including support for caching of results.
"""
import itertools
import datetime
import calendar
import sys
from six import advance_iterator, integer_types
from six.moves import _thread
__all__ = ["rrule", "rruleset", "rrulestr",
"YEARLY", "MONTHLY", "WEEKLY", "DAILY",
"HOURLY", "MINUTELY", "SECONDLY",
"MO", "TU", "WE", "TH", "FR", "SA", "SU"]
# Every mask is 7 days longer to handle cross-year weekly periods.
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 +
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
M365MASK = list(M366MASK)
M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32))
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
MDAY365MASK = list(MDAY366MASK)
M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0))
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
NMDAY365MASK = list(NMDAY366MASK)
M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366)
M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
MDAY365MASK = tuple(MDAY365MASK)
M365MASK = tuple(M365MASK)
(YEARLY,
MONTHLY,
WEEKLY,
DAILY,
HOURLY,
MINUTELY,
SECONDLY) = list(range(7))
# Imported on demand.
easter = None
parser = None
class weekday(object):
__slots__ = ["weekday", "n"]
def __init__(self, weekday, n=None):
if n == 0:
raise ValueError("Can't create weekday with n == 0")
self.weekday = weekday
self.n = n
def __call__(self, n):
if n == self.n:
return self
else:
return self.__class__(self.weekday, n)
def __eq__(self, other):
try:
if self.weekday != other.weekday or self.n != other.n:
return False
except AttributeError:
return False
return True
def __repr__(self):
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
if not self.n:
return s
else:
return "%s(%+d)" % (s, self.n)
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
class rrulebase(object):
def __init__(self, cache=False):
if cache:
self._cache = []
self._cache_lock = _thread.allocate_lock()
self._cache_gen = self._iter()
self._cache_complete = False
else:
self._cache = None
self._cache_complete = False
self._len = None
def __iter__(self):
if self._cache_complete:
return iter(self._cache)
elif self._cache is None:
return self._iter()
else:
return self._iter_cached()
def _iter_cached(self):
i = 0
gen = self._cache_gen
cache = self._cache
acquire = self._cache_lock.acquire
release = self._cache_lock.release
while gen:
if i == len(cache):
acquire()
if self._cache_complete:
break
try:
for j in range(10):
cache.append(advance_iterator(gen))
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
release()
yield cache[i]
i += 1
while i < self._len:
yield cache[i]
i += 1
def __getitem__(self, item):
if self._cache_complete:
return self._cache[item]
elif isinstance(item, slice):
if item.step and item.step < 0:
return list(iter(self))[item]
else:
return list(itertools.islice(self,
item.start or 0,
item.stop or sys.maxsize,
item.step or 1))
elif item >= 0:
gen = iter(self)
try:
for i in range(item+1):
res = advance_iterator(gen)
except StopIteration:
raise IndexError
return res
else:
return list(iter(self))[item]
def __contains__(self, item):
if self._cache_complete:
return item in self._cache
else:
for i in self:
if i == item:
return True
elif i > item:
return False
return False
# __len__() introduces a large performance penality.
def count(self):
""" Returns the number of recurrences in this set. It will have go
trough the whole recurrence, if this hasn't been done before. """
if self._len is None:
for x in self:
pass
return self._len
def before(self, dt, inc=False):
""" Returns the last recurrence before the given datetime instance. The
inc keyword defines what happens if dt is an occurrence. With
inc=True, if dt itself is an occurrence, it will be returned. """
if self._cache_complete:
gen = self._cache
else:
gen = self
last = None
if inc:
for i in gen:
if i > dt:
break
last = i
else:
for i in gen:
if i >= dt:
break
last = i
return last
def after(self, dt, inc=False):
""" Returns the first recurrence after the given datetime instance. The
inc keyword defines what happens if dt is an occurrence. With
inc=True, if dt itself is an occurrence, it will be returned. """
if self._cache_complete:
gen = self._cache
else:
gen = self
if inc:
for i in gen:
if i >= dt:
return i
else:
for i in gen:
if i > dt:
return i
return None
def between(self, after, before, inc=False):
""" Returns all the occurrences of the rrule between after and before.
The inc keyword defines what happens if after and/or before are
themselves occurrences. With inc=True, they will be included in the
list, if they are found in the recurrence set. """
if self._cache_complete:
gen = self._cache
else:
gen = self
started = False
l = []
if inc:
for i in gen:
if i > before:
break
elif not started:
if i >= after:
started = True
l.append(i)
else:
l.append(i)
else:
for i in gen:
if i >= before:
break
elif not started:
if i > after:
started = True
l.append(i)
else:
l.append(i)
return l
class rrule(rrulebase):
"""
That's the base of the rrule operation. It accepts all the keywords
defined in the RFC as its constructor parameters (except byday,
which was renamed to byweekday) and more. The constructor prototype is::
rrule(freq)
Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY,
or SECONDLY.
Additionally, it supports the following keyword arguments:
:param cache:
If given, it must be a boolean value specifying to enable or disable
caching of results. If you will use the same rrule instance multiple
times, enabling caching will improve the performance considerably.
:param dtstart:
The recurrence start. Besides being the base for the recurrence,
missing parameters in the final recurrence instances will also be
extracted from this date. If not given, datetime.now() will be used
instead.
:param interval:
The interval between each freq iteration. For example, when using
YEARLY, an interval of 2 means once every two years, but with HOURLY,
it means once every two hours. The default interval is 1.
:param wkst:
The week start day. Must be one of the MO, TU, WE constants, or an
integer, specifying the first day of the week. This will affect
recurrences based on weekly periods. The default week start is got
from calendar.firstweekday(), and may be modified by
calendar.setfirstweekday().
:param count:
How many occurrences will be generated.
:param until:
If given, this must be a datetime instance, that will specify the
limit of the recurrence. If a recurrence instance happens to be the
same as the datetime instance given in the until keyword, this will
be the last occurrence.
:param bysetpos:
If given, it must be either an integer, or a sequence of integers,
positive or negative. Each given integer will specify an occurrence
number, corresponding to the nth occurrence of the rule inside the
frequency period. For example, a bysetpos of -1 if combined with a
MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will
result in the last work day of every month.
:param bymonth:
If given, it must be either an integer, or a sequence of integers,
meaning the months to apply the recurrence to.
:param bymonthday:
If given, it must be either an integer, or a sequence of integers,
meaning the month days to apply the recurrence to.
:param byyearday:
If given, it must be either an integer, or a sequence of integers,
meaning the year days to apply the recurrence to.
:param byweekno:
If given, it must be either an integer, or a sequence of integers,
meaning the week numbers to apply the recurrence to. Week numbers
have the meaning described in ISO8601, that is, the first week of
the year is that containing at least four days of the new year.
:param byweekday:
If given, it must be either an integer (0 == MO), a sequence of
integers, one of the weekday constants (MO, TU, etc), or a sequence
of these constants. When given, these variables will define the
weekdays where the recurrence will be applied. It's also possible to
use an argument n for the weekday instances, which will mean the nth
occurrence of this weekday in the period. For example, with MONTHLY,
or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the
first friday of the month where the recurrence happens. Notice that in
the RFC documentation, this is specified as BYDAY, but was renamed to
avoid the ambiguity of that keyword.
:param byhour:
If given, it must be either an integer, or a sequence of integers,
meaning the hours to apply the recurrence to.
:param byminute:
If given, it must be either an integer, or a sequence of integers,
meaning the minutes to apply the recurrence to.
:param bysecond:
If given, it must be either an integer, or a sequence of integers,
meaning the seconds to apply the recurrence to.
:param byeaster:
If given, it must be either an integer, or a sequence of integers,
positive or negative. Each integer will define an offset from the
Easter Sunday. Passing the offset 0 to byeaster will yield the Easter
Sunday itself. This is an extension to the RFC specification.
"""
def __init__(self, freq, dtstart=None,
interval=1, wkst=None, count=None, until=None, bysetpos=None,
bymonth=None, bymonthday=None, byyearday=None, byeaster=None,
byweekno=None, byweekday=None,
byhour=None, byminute=None, bysecond=None,
cache=False):
super(rrule, self).__init__(cache)
global easter
if not dtstart:
dtstart = datetime.datetime.now().replace(microsecond=0)
elif not isinstance(dtstart, datetime.datetime):
dtstart = datetime.datetime.fromordinal(dtstart.toordinal())
else:
dtstart = dtstart.replace(microsecond=0)
self._dtstart = dtstart
self._tzinfo = dtstart.tzinfo
self._freq = freq
self._interval = interval
self._count = count
if until and not isinstance(until, datetime.datetime):
until = datetime.datetime.fromordinal(until.toordinal())
self._until = until
if wkst is None:
self._wkst = calendar.firstweekday()
elif isinstance(wkst, integer_types):
self._wkst = wkst
else:
self._wkst = wkst.weekday
if bysetpos is None:
self._bysetpos = None
elif isinstance(bysetpos, integer_types):
if bysetpos == 0 or not (-366 <= bysetpos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
self._bysetpos = (bysetpos,)
else:
self._bysetpos = tuple(bysetpos)
for pos in self._bysetpos:
if pos == 0 or not (-366 <= pos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
if not (byweekno or byyearday or bymonthday or
byweekday is not None or byeaster is not None):
if freq == YEARLY:
if not bymonth:
bymonth = dtstart.month
bymonthday = dtstart.day
elif freq == MONTHLY:
bymonthday = dtstart.day
elif freq == WEEKLY:
byweekday = dtstart.weekday()
# bymonth
if not bymonth:
self._bymonth = None
elif isinstance(bymonth, integer_types):
self._bymonth = (bymonth,)
else:
self._bymonth = tuple(bymonth)
# byyearday
if not byyearday:
self._byyearday = None
elif isinstance(byyearday, integer_types):
self._byyearday = (byyearday,)
else:
self._byyearday = tuple(byyearday)
# byeaster
if byeaster is not None:
if not easter:
from dateutil import easter
if isinstance(byeaster, integer_types):
self._byeaster = (byeaster,)
else:
self._byeaster = tuple(byeaster)
else:
self._byeaster = None
# bymonthay
if not bymonthday:
self._bymonthday = ()
self._bynmonthday = ()
elif isinstance(bymonthday, integer_types):
if bymonthday < 0:
self._bynmonthday = (bymonthday,)
self._bymonthday = ()
else:
self._bymonthday = (bymonthday,)
self._bynmonthday = ()
else:
self._bymonthday = tuple([x for x in bymonthday if x > 0])
self._bynmonthday = tuple([x for x in bymonthday if x < 0])
# byweekno
if byweekno is None:
self._byweekno = None
elif isinstance(byweekno, integer_types):
self._byweekno = (byweekno,)
else:
self._byweekno = tuple(byweekno)
# byweekday / bynweekday
if byweekday is None:
self._byweekday = None
self._bynweekday = None
elif isinstance(byweekday, integer_types):
self._byweekday = (byweekday,)
self._bynweekday = None
elif hasattr(byweekday, "n"):
if not byweekday.n or freq > MONTHLY:
self._byweekday = (byweekday.weekday,)
self._bynweekday = None
else:
self._bynweekday = ((byweekday.weekday, byweekday.n),)
self._byweekday = None
else:
self._byweekday = []
self._bynweekday = []
for wday in byweekday:
if isinstance(wday, integer_types):
self._byweekday.append(wday)
elif not wday.n or freq > MONTHLY:
self._byweekday.append(wday.weekday)
else:
self._bynweekday.append((wday.weekday, wday.n))
self._byweekday = tuple(self._byweekday)
self._bynweekday = tuple(self._bynweekday)
if not self._byweekday:
self._byweekday = None
elif not self._bynweekday:
self._bynweekday = None
# byhour
if byhour is None:
if freq < HOURLY:
self._byhour = (dtstart.hour,)
else:
self._byhour = None
elif isinstance(byhour, integer_types):
self._byhour = (byhour,)
else:
self._byhour = tuple(byhour)
# byminute
if byminute is None:
if freq < MINUTELY:
self._byminute = (dtstart.minute,)
else:
self._byminute = None
elif isinstance(byminute, integer_types):
self._byminute = (byminute,)
else:
self._byminute = tuple(byminute)
# bysecond
if bysecond is None:
if freq < SECONDLY:
self._bysecond = (dtstart.second,)
else:
self._bysecond = None
elif isinstance(bysecond, integer_types):
self._bysecond = (bysecond,)
else:
self._bysecond = tuple(bysecond)
if self._freq >= HOURLY:
self._timeset = None
else:
self._timeset = []
for hour in self._byhour:
for minute in self._byminute:
for second in self._bysecond:
self._timeset.append(
datetime.time(hour, minute, second,
tzinfo=self._tzinfo))
self._timeset.sort()
self._timeset = tuple(self._timeset)
def _iter(self):
year, month, day, hour, minute, second, weekday, yearday, _ = \
self._dtstart.timetuple()
# Some local variables to speed things up a bit
freq = self._freq
interval = self._interval
wkst = self._wkst
until = self._until
bymonth = self._bymonth
byweekno = self._byweekno
byyearday = self._byyearday
byweekday = self._byweekday
byeaster = self._byeaster
bymonthday = self._bymonthday
bynmonthday = self._bynmonthday
bysetpos = self._bysetpos
byhour = self._byhour
byminute = self._byminute
bysecond = self._bysecond
ii = _iterinfo(self)
ii.rebuild(year, month)
getdayset = {YEARLY: ii.ydayset,
MONTHLY: ii.mdayset,
WEEKLY: ii.wdayset,
DAILY: ii.ddayset,
HOURLY: ii.ddayset,
MINUTELY: ii.ddayset,
SECONDLY: ii.ddayset}[freq]
if freq < HOURLY:
timeset = self._timeset
else:
gettimeset = {HOURLY: ii.htimeset,
MINUTELY: ii.mtimeset,
SECONDLY: ii.stimeset}[freq]
if ((freq >= HOURLY and
self._byhour and hour not in self._byhour) or
(freq >= MINUTELY and
self._byminute and minute not in self._byminute) or
(freq >= SECONDLY and
self._bysecond and second not in self._bysecond)):
timeset = ()
else:
timeset = gettimeset(hour, minute, second)
total = 0
count = self._count
while True:
# Get dayset with the right frequency
dayset, start, end = getdayset(year, month, day)
# Do the "hard" work ;-)
filtered = False
for i in dayset[start:end]:
if ((bymonth and ii.mmask[i] not in bymonth) or
(byweekno and not ii.wnomask[i]) or
(byweekday and ii.wdaymask[i] not in byweekday) or
(ii.nwdaymask and not ii.nwdaymask[i]) or
(byeaster and not ii.eastermask[i]) or
((bymonthday or bynmonthday) and
ii.mdaymask[i] not in bymonthday and
ii.nmdaymask[i] not in bynmonthday) or
(byyearday and
((i < ii.yearlen and i+1 not in byyearday and
-ii.yearlen+i not in byyearday) or
(i >= ii.yearlen and i+1-ii.yearlen not in byyearday and
-ii.nextyearlen+i-ii.yearlen not in byyearday)))):
dayset[i] = None
filtered = True
# Output results
if bysetpos and timeset:
poslist = []
for pos in bysetpos:
if pos < 0:
daypos, timepos = divmod(pos, len(timeset))
else:
daypos, timepos = divmod(pos-1, len(timeset))
try:
i = [x for x in dayset[start:end]
if x is not None][daypos]
time = timeset[timepos]
except IndexError:
pass
else:
date = datetime.date.fromordinal(ii.yearordinal+i)
res = datetime.datetime.combine(date, time)
if res not in poslist:
poslist.append(res)
poslist.sort()
for res in poslist:
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
total += 1
yield res
if count:
count -= 1
if not count:
self._len = total
return
else:
for i in dayset[start:end]:
if i is not None:
date = datetime.date.fromordinal(ii.yearordinal+i)
for time in timeset:
res = datetime.datetime.combine(date, time)
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
total += 1
yield res
if count:
count -= 1
if not count:
self._len = total
return
# Handle frequency and interval
fixday = False
if freq == YEARLY:
year += interval
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == MONTHLY:
month += interval
if month > 12:
div, mod = divmod(month, 12)
month = mod
year += div
if month == 0:
month = 12
year -= 1
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == WEEKLY:
if wkst > weekday:
day += -(weekday+1+(6-wkst))+self._interval*7
else:
day += -(weekday-wkst)+self._interval*7
weekday = wkst
fixday = True
elif freq == DAILY:
day += interval
fixday = True
elif freq == HOURLY:
if filtered:
# Jump to one iteration before next day
hour += ((23-hour)//interval)*interval
while True:
hour += interval
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
if not byhour or hour in byhour:
break
timeset = gettimeset(hour, minute, second)
elif freq == MINUTELY:
if filtered:
# Jump to one iteration before next day
minute += ((1439-(hour*60+minute))//interval)*interval
while True:
minute += interval
div, mod = divmod(minute, 60)
if div:
minute = mod
hour += div
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
filtered = False
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute)):
break
timeset = gettimeset(hour, minute, second)
elif freq == SECONDLY:
if filtered:
# Jump to one iteration before next day
second += (((86399-(hour*3600+minute*60+second))
// interval)*interval)
while True:
second += self._interval
div, mod = divmod(second, 60)
if div:
second = mod
minute += div
div, mod = divmod(minute, 60)
if div:
minute = mod
hour += div
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute) and
(not bysecond or second in bysecond)):
break
timeset = gettimeset(hour, minute, second)
if fixday and day > 28:
daysinmonth = calendar.monthrange(year, month)[1]
if day > daysinmonth:
while day > daysinmonth:
day -= daysinmonth
month += 1
if month == 13:
month = 1
year += 1
if year > datetime.MAXYEAR:
self._len = total
return
daysinmonth = calendar.monthrange(year, month)[1]
ii.rebuild(year, month)
class _iterinfo(object):
__slots__ = ["rrule", "lastyear", "lastmonth",
"yearlen", "nextyearlen", "yearordinal", "yearweekday",
"mmask", "mrange", "mdaymask", "nmdaymask",
"wdaymask", "wnomask", "nwdaymask", "eastermask"]
def __init__(self, rrule):
for attr in self.__slots__:
setattr(self, attr, None)
self.rrule = rrule
def rebuild(self, year, month):
# Every mask is 7 days longer to handle cross-year weekly periods.
rr = self.rrule
if year != self.lastyear:
self.yearlen = 365+calendar.isleap(year)
self.nextyearlen = 365+calendar.isleap(year+1)
firstyday = datetime.date(year, 1, 1)
self.yearordinal = firstyday.toordinal()
self.yearweekday = firstyday.weekday()
wday = datetime.date(year, 1, 1).weekday()
if self.yearlen == 365:
self.mmask = M365MASK
self.mdaymask = MDAY365MASK
self.nmdaymask = NMDAY365MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M365RANGE
else:
self.mmask = M366MASK
self.mdaymask = MDAY366MASK
self.nmdaymask = NMDAY366MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M366RANGE
if not rr._byweekno:
self.wnomask = None
else:
self.wnomask = [0]*(self.yearlen+7)
# no1wkst = firstwkst = self.wdaymask.index(rr._wkst)
no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7
if no1wkst >= 4:
no1wkst = 0
# Number of days in the year, plus the days we got
# from last year.
wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7
else:
# Number of days in the year, minus the days we
# left in last year.
wyearlen = self.yearlen-no1wkst
div, mod = divmod(wyearlen, 7)
numweeks = div+mod//4
for n in rr._byweekno:
if n < 0:
n += numweeks+1
if not (0 < n <= numweeks):
continue
if n > 1:
i = no1wkst+(n-1)*7
if no1wkst != firstwkst:
i -= 7-firstwkst
else:
i = no1wkst
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if 1 in rr._byweekno:
# Check week number 1 of next year as well
# TODO: Check -numweeks for next year.
i = no1wkst+numweeks*7
if no1wkst != firstwkst:
i -= 7-firstwkst
if i < self.yearlen:
# If week starts in next year, we
# don't care about it.
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if no1wkst:
# Check last week number of last year as
# well. If no1wkst is 0, either the year
# started on week start, or week number 1
# got days from last year, so there are no
# days from last year's last week number in
# this year.
if -1 not in rr._byweekno:
lyearweekday = datetime.date(year-1, 1, 1).weekday()
lno1wkst = (7-lyearweekday+rr._wkst) % 7
lyearlen = 365+calendar.isleap(year-1)
if lno1wkst >= 4:
lno1wkst = 0
lnumweeks = 52+(lyearlen +
(lyearweekday-rr._wkst) % 7) % 7//4
else:
lnumweeks = 52+(self.yearlen-no1wkst) % 7//4
else:
lnumweeks = -1
if lnumweeks in rr._byweekno:
for i in range(no1wkst):
self.wnomask[i] = 1
if (rr._bynweekday and (month != self.lastmonth or
year != self.lastyear)):
ranges = []
if rr._freq == YEARLY:
if rr._bymonth:
for month in rr._bymonth:
ranges.append(self.mrange[month-1:month+1])
else:
ranges = [(0, self.yearlen)]
elif rr._freq == MONTHLY:
ranges = [self.mrange[month-1:month+1]]
if ranges:
# Weekly frequency won't get here, so we may not
# care about cross-year weekly periods.
self.nwdaymask = [0]*self.yearlen
for first, last in ranges:
last -= 1
for wday, n in rr._bynweekday:
if n < 0:
i = last+(n+1)*7
i -= (self.wdaymask[i]-wday) % 7
else:
i = first+(n-1)*7
i += (7-self.wdaymask[i]+wday) % 7
if first <= i <= last:
self.nwdaymask[i] = 1
if rr._byeaster:
self.eastermask = [0]*(self.yearlen+7)
eyday = easter.easter(year).toordinal()-self.yearordinal
for offset in rr._byeaster:
self.eastermask[eyday+offset] = 1
self.lastyear = year
self.lastmonth = month
def ydayset(self, year, month, day):
return list(range(self.yearlen)), 0, self.yearlen
def mdayset(self, year, month, day):
set = [None]*self.yearlen
start, end = self.mrange[month-1:month+1]
for i in range(start, end):
set[i] = i
return set, start, end
def wdayset(self, year, month, day):
# We need to handle cross-year weeks here.
set = [None]*(self.yearlen+7)
i = datetime.date(year, month, day).toordinal()-self.yearordinal
start = i
for j in range(7):
set[i] = i
i += 1
# if (not (0 <= i < self.yearlen) or
# self.wdaymask[i] == self.rrule._wkst):
# This will cross the year boundary, if necessary.
if self.wdaymask[i] == self.rrule._wkst:
break
return set, start, i
def ddayset(self, year, month, day):
set = [None]*self.yearlen
i = datetime.date(year, month, day).toordinal()-self.yearordinal
set[i] = i
return set, i, i+1
def htimeset(self, hour, minute, second):
set = []
rr = self.rrule
for minute in rr._byminute:
for second in rr._bysecond:
set.append(datetime.time(hour, minute, second,
tzinfo=rr._tzinfo))
set.sort()
return set
def mtimeset(self, hour, minute, second):
set = []
rr = self.rrule
for second in rr._bysecond:
set.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo))
set.sort()
return set
def stimeset(self, hour, minute, second):
return (datetime.time(hour, minute, second,
tzinfo=self.rrule._tzinfo),)
class rruleset(rrulebase):
""" The rruleset type allows more complex recurrence setups, mixing
multiple rules, dates, exclusion rules, and exclusion dates. The type
constructor takes the following keyword arguments:
:param cache: If True, caching of results will be enabled, improving
performance of multiple queries considerably. """
class _genitem(object):
def __init__(self, genlist, gen):
try:
self.dt = advance_iterator(gen)
genlist.append(self)
except StopIteration:
pass
self.genlist = genlist
self.gen = gen
def __next__(self):
try:
self.dt = advance_iterator(self.gen)
except StopIteration:
self.genlist.remove(self)
next = __next__
def __lt__(self, other):
return self.dt < other.dt
def __gt__(self, other):
return self.dt > other.dt
def __eq__(self, other):
return self.dt == other.dt
def __ne__(self, other):
return self.dt != other.dt
def __init__(self, cache=False):
super(rruleset, self).__init__(cache)
self._rrule = []
self._rdate = []
self._exrule = []
self._exdate = []
def rrule(self, rrule):
""" Include the given :py:class:`rrule` instance in the recurrence set
generation. """
self._rrule.append(rrule)
def rdate(self, rdate):
""" Include the given :py:class:`datetime` instance in the recurrence
set generation. """
self._rdate.append(rdate)
def exrule(self, exrule):
""" Include the given rrule instance in the recurrence set exclusion
list. Dates which are part of the given recurrence rules will not
be generated, even if some inclusive rrule or rdate matches them.
"""
self._exrule.append(exrule)
def exdate(self, exdate):
""" Include the given datetime instance in the recurrence set
exclusion list. Dates included that way will not be generated,
even if some inclusive rrule or rdate matches them. """
self._exdate.append(exdate)
def _iter(self):
rlist = []
self._rdate.sort()
self._genitem(rlist, iter(self._rdate))
for gen in [iter(x) for x in self._rrule]:
self._genitem(rlist, gen)
rlist.sort()
exlist = []
self._exdate.sort()
self._genitem(exlist, iter(self._exdate))
for gen in [iter(x) for x in self._exrule]:
self._genitem(exlist, gen)
exlist.sort()
lastdt = None
total = 0
while rlist:
ritem = rlist[0]
if not lastdt or lastdt != ritem.dt:
while exlist and exlist[0] < ritem:
advance_iterator(exlist[0])
exlist.sort()
if not exlist or ritem != exlist[0]:
total += 1
yield ritem.dt
lastdt = ritem.dt
advance_iterator(ritem)
rlist.sort()
self._len = total
class _rrulestr(object):
_freq_map = {"YEARLY": YEARLY,
"MONTHLY": MONTHLY,
"WEEKLY": WEEKLY,
"DAILY": DAILY,
"HOURLY": HOURLY,
"MINUTELY": MINUTELY,
"SECONDLY": SECONDLY}
_weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3,
"FR": 4, "SA": 5, "SU": 6}
def _handle_int(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = int(value)
def _handle_int_list(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = [int(x) for x in value.split(',')]
_handle_INTERVAL = _handle_int
_handle_COUNT = _handle_int
_handle_BYSETPOS = _handle_int_list
_handle_BYMONTH = _handle_int_list
_handle_BYMONTHDAY = _handle_int_list
_handle_BYYEARDAY = _handle_int_list
_handle_BYEASTER = _handle_int_list
_handle_BYWEEKNO = _handle_int_list
_handle_BYHOUR = _handle_int_list
_handle_BYMINUTE = _handle_int_list
_handle_BYSECOND = _handle_int_list
def _handle_FREQ(self, rrkwargs, name, value, **kwargs):
rrkwargs["freq"] = self._freq_map[value]
def _handle_UNTIL(self, rrkwargs, name, value, **kwargs):
global parser
if not parser:
from dateutil import parser
try:
rrkwargs["until"] = parser.parse(value,
ignoretz=kwargs.get("ignoretz"),
tzinfos=kwargs.get("tzinfos"))
except ValueError:
raise ValueError("invalid until date")
def _handle_WKST(self, rrkwargs, name, value, **kwargs):
rrkwargs["wkst"] = self._weekday_map[value]
def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwarsg):
l = []
for wday in value.split(','):
for i in range(len(wday)):
if wday[i] not in '+-0123456789':
break
n = wday[:i] or None
w = wday[i:]
if n:
n = int(n)
l.append(weekdays[self._weekday_map[w]](n))
rrkwargs["byweekday"] = l
_handle_BYDAY = _handle_BYWEEKDAY
def _parse_rfc_rrule(self, line,
dtstart=None,
cache=False,
ignoretz=False,
tzinfos=None):
if line.find(':') != -1:
name, value = line.split(':')
if name != "RRULE":
raise ValueError("unknown parameter name")
else:
value = line
rrkwargs = {}
for pair in value.split(';'):
name, value = pair.split('=')
name = name.upper()
value = value.upper()
try:
getattr(self, "_handle_"+name)(rrkwargs, name, value,
ignoretz=ignoretz,
tzinfos=tzinfos)
except AttributeError:
raise ValueError("unknown parameter '%s'" % name)
except (KeyError, ValueError):
raise ValueError("invalid '%s': %s" % (name, value))
return rrule(dtstart=dtstart, cache=cache, **rrkwargs)
def _parse_rfc(self, s,
dtstart=None,
cache=False,
unfold=False,
forceset=False,
compatible=False,
ignoretz=False,
tzinfos=None):
global parser
if compatible:
forceset = True
unfold = True
s = s.upper()
if not s.strip():
raise ValueError("empty string")
if unfold:
lines = s.splitlines()
i = 0
while i < len(lines):
line = lines[i].rstrip()
if not line:
del lines[i]
elif i > 0 and line[0] == " ":
lines[i-1] += line[1:]
del lines[i]
else:
i += 1
else:
lines = s.split()
if (not forceset and len(lines) == 1 and (s.find(':') == -1 or
s.startswith('RRULE:'))):
return self._parse_rfc_rrule(lines[0], cache=cache,
dtstart=dtstart, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
rrulevals = []
rdatevals = []
exrulevals = []
exdatevals = []
for line in lines:
if not line:
continue
if line.find(':') == -1:
name = "RRULE"
value = line
else:
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
raise ValueError("empty property name")
name = parms[0]
parms = parms[1:]
if name == "RRULE":
for parm in parms:
raise ValueError("unsupported RRULE parm: "+parm)
rrulevals.append(value)
elif name == "RDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError("unsupported RDATE parm: "+parm)
rdatevals.append(value)
elif name == "EXRULE":
for parm in parms:
raise ValueError("unsupported EXRULE parm: "+parm)
exrulevals.append(value)
elif name == "EXDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError("unsupported RDATE parm: "+parm)
exdatevals.append(value)
elif name == "DTSTART":
for parm in parms:
raise ValueError("unsupported DTSTART parm: "+parm)
if not parser:
from dateutil import parser
dtstart = parser.parse(value, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
raise ValueError("unsupported property: "+name)
if (forceset or len(rrulevals) > 1 or rdatevals
or exrulevals or exdatevals):
if not parser and (rdatevals or exdatevals):
from dateutil import parser
set = rruleset(cache=cache)
for value in rrulevals:
set.rrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in rdatevals:
for datestr in value.split(','):
set.rdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exrulevals:
set.exrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exdatevals:
for datestr in value.split(','):
set.exdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
if compatible and dtstart:
set.rdate(dtstart)
return set
else:
return self._parse_rfc_rrule(rrulevals[0],
dtstart=dtstart,
cache=cache,
ignoretz=ignoretz,
tzinfos=tzinfos)
def __call__(self, s, **kwargs):
return self._parse_rfc(s, **kwargs)
rrulestr = _rrulestr()
# vim:ts=4:sw=4:et
| drpaneas/linuxed.gr | lib/python2.7/site-packages/dateutil/rrule.py | Python | mit | 47,634 | 0.000084 |
import calendar
db = [None]
def get_month_transaction_days(acc, year, month):
monthdays = calendar.monthrange(year, month)
result = db[0].view('bank/transaction_days', startkey=[acc._id, year, month, 1],
endkey=[acc._id, year, month, monthdays], group=True, group_level=4).all()
return [r['key'][-1] for r in result]
def get_what_choice():
result = db[0].view('bank/what_choice', group=True)
return [r['key'] for r in result]
def get_who_choice():
result = db[0].view('bank/who_choice', group=True)
return [r['key'] for r in result] | baverman/cakeplant | cakeplant/bank/model.py | Python | mit | 573 | 0.012216 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
冒泡排序(bubble sort):每个回合都从第一个元素开始和它后面的元素比较,
如果比它后面的元素更大的话就交换,一直重复,直到这个元素到了它能到达的位置。
每次遍历都将剩下的元素中最大的那个放到了序列的“最后”(除去了前面已经排好的那些元素)。
注意检测是否已经完成了排序,如果已完成就可以退出了。时间复杂度O(n2)
'''
def short_bubble_sort(a_list):
exchange = True
pass_num = len(a_list) - 1
while pass_num > 0 and exchange:
exchange = False
for i in range(pass_num):
if a_list[i] > a_list[i + 1]:
exchange = True
# temp = a_list[i]
# a_list[i] = a_list[i + 1]
# a_list[i + 1] = temp
a_list[i], a_list[i + 1] = a_list[i + 1], a_list[i]
pass_num = pass_num - 1
if __name__ == '__main__':
a_list = [20, 40, 50, 22, 100, 90]
short_bubble_sort(a_list)
print(a_list) # [20, 22, 40, 50, 90, 100]
| Lucky0604/algorithms | sort/bubble-sort.py | Python | mit | 1,125 | 0.002361 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
class Brain():
def think(self, userSession):
pass
class WikiBrain(Brain):
maxN = ''
maxX = ''
wikiAPI = u'https://zh.wikipedia.org/w/api.php?uselang=zh_tw&action=query&prop=extracts&format=xml&exintro=&titles='
def load(self, info):
print("load information")
def think(self, userSession):
word_n = {}
word_x = {}
result = u'不懂你的意思'
for word in userSession["lastWords"]:
# print(word)
if word.flag == 'n' or word.flag =='x':
wikiResult = self.findWiki(word)
if wikiResult == '':
return result
else:
return wikiResult
else:
pass
return result
def findWiki(self, word):
# print(word)
r = requests.get( self.wikiAPI+word.word )
# print(r.encoding)
#print(dir(r))
return self.getExtract(r.text)
def getExtract(self, wikiApiRes):
if wikiApiRes.count('<extract')==0 :
return ""
result = wikiApiRes.split('<extract')[1].split('</extract>')[0]
result = result.replace('xml:space="preserve">','')
result = result.replace('<','')
result = result.replace('p>','')
result = result.replace('/b>','')
result = result.replace('b>','')
result = result.replace('/p>','')
result = result.replace('>','')
result = result.replace('br>','')
return result
| taosheng/jarvis | socialBrainTest/python/brain.py | Python | apache-2.0 | 1,645 | 0.015309 |
if hasattr(context, 'portal_type') and context.portal_type == 'FormSaveData2ContentEntry':
index = None
if context.getValue('brugia-malayi') != 'None' or ('B. malayi' in context.getValue('a-aegypti-infected-with-filariae')):
if context.getValue('brugia-malayi') == '':
index = None
else:
index = 'BM'
return index
else:
return None
| uwosh/uwosh.filariasis | uwosh/filariasis/skins/uwosh.filariasis/indexByBMalayi.py | Python | gpl-2.0 | 393 | 0.007634 |
# Problem 19: Counting Sundays
# https://projecteuler.net/problem=19
def is_leapyear(year):
if year%4 == 0 and year%100 != 0 or year%400 == 0:
return 1
else:
return 0
month = [31, 28, 31, 30, 31, 30,
31, 31, 30, 31, 30, 31]
def days_of_month(m, y):
return month[m-1] + (is_leapyear(y) if m == 2 else 0)
def days_of_year(y):
return sum(month) + is_leapyear(y)
# date 1 Jan 1900 represented as (1, 1, 1900)
# 1 Jan 1900 was Monday, days is 1
# 7 Jan 1900 was Sunday, days is 7
def date_to_days(date):
dy, mn, yr = date
days = dy
for y in range(1900, yr):
days += days_of_year(y)
for m in range(1, mn):
days += days_of_month(m, yr)
return days
def is_sunday(days):
return days % 7 == 0
def cs():
count = 0
for y in range(1901, 2000+1):
for m in range(1, 12+1):
days = date_to_days((1, m, y))
if is_sunday(days):
count += 1
return count
#
def test():
return 'No test'
def main():
return cs()
if __name__ == '__main__':
import sys
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
print(test())
else:
print(main())
| yehnan/project_euler_python | p019.py | Python | mit | 1,291 | 0.015492 |
from tasty.types import conversions
from tasty.types import *
from tasty.types.driver import TestDriver
__params__ = {'la': 32, 'lb': 32, 'da': 10}
driver = TestDriver()
def protocol(client, server, params):
server.ga = Garbled(val=Unsigned(bitlen=764, dim=[1], signed=False, passive=True, empty=True), signed=False, bitlen=764, dim=[1])
server.gb = Garbled(val=Unsigned(bitlen=764, dim=[1], signed=False, passive=True, empty=True), signed=False, bitlen=764, dim=[1])
conversions.Garbled_Garbled_send(server.ga, client.ga, 764, [1], False)
conversions.Garbled_Garbled_send(server.gb, client.gb, 764, [1], False)
client.gc = client.ga * client.gb
client.c = Unsigned(val=client.gc, passive=True, signed=False, bitlen=1528, dim=[1])
| tastyproject/tasty | tasty/tests/functional/protocols/mul/garbled_server_server_client/protocol_setup_server.py | Python | gpl-3.0 | 756 | 0.005291 |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Basic client for Google Cloud Speech API."""
import os
from google.cloud.client import Client as BaseClient
from google.cloud.environment_vars import DISABLE_GRPC
from google.cloud.speech._gax import GAPICSpeechAPI
from google.cloud.speech._http import HTTPSpeechAPI
from google.cloud.speech.sample import Sample
_USE_GAX = not os.getenv(DISABLE_GRPC, False)
class Client(BaseClient):
"""Client to bundle configuration needed for API requests.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``http`` object is
passed), falls back to the default inferred from the
environment.
:type http: :class:`~httplib2.Http`
:param http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~httplib2.Http.request`. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
:type use_gax: bool
:param use_gax: (Optional) Explicitly specifies whether
to use the gRPC transport (via GAX) or HTTP. If unset,
falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment
variable
"""
SCOPE = ('https://www.googleapis.com/auth/cloud-platform',)
"""The scopes required for authenticating as an API consumer."""
_speech_api = None
def __init__(self, credentials=None, http=None, use_gax=None):
super(Client, self).__init__(credentials=credentials, http=http)
# Save on the actual client class whether we use GAX or not.
if use_gax is None:
self._use_gax = _USE_GAX
else:
self._use_gax = use_gax
def sample(self, content=None, source_uri=None, stream=None, encoding=None,
sample_rate=None):
"""Factory: construct Sample to use when making recognize requests.
:type content: bytes
:param content: (Optional) Bytes containing audio data.
:type source_uri: str
:param source_uri: (Optional) URI that points to a file that contains
audio data bytes as specified in RecognitionConfig.
Currently, only Google Cloud Storage URIs are
supported, which must be specified in the following
format: ``gs://bucket_name/object_name``.
:type stream: file
:param stream: (Optional) File like object to stream.
:type encoding: str
:param encoding: encoding of audio data sent in all RecognitionAudio
messages, can be one of: :attr:`~.Encoding.LINEAR16`,
:attr:`~.Encoding.FLAC`, :attr:`~.Encoding.MULAW`,
:attr:`~.Encoding.AMR`, :attr:`~.Encoding.AMR_WB`
:type sample_rate: int
:param sample_rate: Sample rate in Hertz of the audio data sent in all
requests. Valid values are: 8000-48000. For best
results, set the sampling rate of the audio source
to 16000 Hz. If that's not possible, use the
native sample rate of the audio source (instead of
re-sampling).
:rtype: :class:`~google.cloud.speech.sample.Sample`
:returns: Instance of ``Sample``.
"""
return Sample(content=content, source_uri=source_uri, stream=stream,
encoding=encoding, sample_rate=sample_rate, client=self)
@property
def speech_api(self):
"""Helper for speech-related API calls."""
if self._speech_api is None:
if self._use_gax:
self._speech_api = GAPICSpeechAPI(self)
else:
self._speech_api = HTTPSpeechAPI(self)
return self._speech_api
| daspecster/google-cloud-python | speech/google/cloud/speech/client.py | Python | apache-2.0 | 4,677 | 0 |
import asyncio
import os
import time
import peru.cache
from shared import assert_contents, create_dir, make_synchronous, PeruTest
class CacheTest(PeruTest):
@make_synchronous
def setUp(self):
self.cache = yield from peru.cache.Cache(create_dir())
self.content = {
'a': 'foo',
'b/c': 'bar',
'b/d': 'baz',
}
self.content_dir = create_dir(self.content)
self.content_tree = yield from self.cache.import_tree(self.content_dir)
@make_synchronous
def test_basic_export(self):
export_dir = create_dir()
yield from self.cache.export_tree(self.content_tree, export_dir)
assert_contents(export_dir, self.content)
@make_synchronous
def test_export_force_with_preexisting_files(self):
# Create a working tree with a conflicting file.
dirty_content = {'a': 'junk'}
export_dir = create_dir(dirty_content)
# Export should fail by default.
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
yield from self.cache.export_tree(self.content_tree, export_dir)
assert_contents(export_dir, dirty_content)
# But it should suceed with the force flag.
yield from self.cache.export_tree(
self.content_tree, export_dir, force=True)
assert_contents(export_dir, self.content)
@make_synchronous
def test_export_force_with_changed_files(self):
export_dir = create_dir()
yield from self.cache.export_tree(self.content_tree, export_dir)
# If we dirty a file, a resync should fail.
with open(os.path.join(export_dir, 'a'), 'w') as f:
f.write('dirty')
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
yield from self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree)
# But it should succeed with the --force flag.
yield from self.cache.export_tree(
self.content_tree, export_dir, force=True,
previous_tree=self.content_tree)
assert_contents(export_dir, self.content)
@make_synchronous
def test_multiple_imports(self):
new_content = {'fee/fi': 'fo fum'}
new_tree = yield from self.cache.import_tree(create_dir(new_content))
export_dir = create_dir()
yield from self.cache.export_tree(new_tree, export_dir)
assert_contents(export_dir, new_content)
@make_synchronous
def test_import_with_gitignore(self):
# Make sure our git imports don't get confused by .gitignore files.
new_content = {'fee/fi': 'fo fum', '.gitignore': 'fee/'}
new_tree = yield from self.cache.import_tree(create_dir(new_content))
export_dir = create_dir()
yield from self.cache.export_tree(new_tree, export_dir)
assert_contents(export_dir, new_content)
@make_synchronous
def test_import_with_files(self):
all_content = {'foo': '',
'bar': '',
'baz/bing': ''}
test_dir = create_dir(all_content)
tree = yield from self.cache.import_tree(
test_dir, picks=['foo', 'baz'])
expected_content = {'foo': '',
'baz/bing': ''}
out_dir = create_dir()
yield from self.cache.export_tree(tree, out_dir)
assert_contents(out_dir, expected_content)
@make_synchronous
def test_export_with_existing_files(self):
# Create a dir with an existing file that doesn't conflict.
more_content = {'untracked': 'stuff'}
export_dir = create_dir(more_content)
yield from self.cache.export_tree(self.content_tree, export_dir)
expected_content = self.content.copy()
expected_content.update(more_content)
assert_contents(export_dir, expected_content)
# But if we try to export twice, the export_dir will now have
# conflicting files, and export_tree() should throw.
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
yield from self.cache.export_tree(self.content_tree, export_dir)
# By default, git's checkout safety doesn't protect files that are
# .gitignore'd. Make sure we still throw the right errors in the
# presence of a .gitignore file.
with open(os.path.join(export_dir, '.gitignore'), 'w') as f:
f.write('*\n') # .gitignore everything
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
yield from self.cache.export_tree(self.content_tree, export_dir)
@make_synchronous
def test_previous_tree(self):
export_dir = create_dir(self.content)
# Create some new content.
new_content = self.content.copy()
new_content['a'] += ' different'
new_content['newfile'] = 'newfile stuff'
new_dir = create_dir(new_content)
new_tree = yield from self.cache.import_tree(new_dir)
# Now use cache.export_tree to move from the original content to the
# different content.
yield from self.cache.export_tree(
new_tree, export_dir, previous_tree=self.content_tree)
assert_contents(export_dir, new_content)
# Now do the same thing again, but use a dirty working copy. This
# should cause an error.
dirty_content = self.content.copy()
dirty_content['a'] += ' dirty'
dirty_dir = create_dir(dirty_content)
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
yield from self.cache.export_tree(
new_tree, dirty_dir, previous_tree=self.content_tree)
# But if the file is simply missing, it should work.
os.remove(os.path.join(dirty_dir, 'a'))
yield from self.cache.export_tree(
new_tree, dirty_dir, previous_tree=self.content_tree)
assert_contents(dirty_dir, new_content)
# Make sure we get an error even if the dirty file is unchanged between
# the previous tree and the new one.
no_conflict_dirty_content = self.content.copy()
no_conflict_dirty_content['b/c'] += ' dirty'
no_conflict_dirty_dir = create_dir(no_conflict_dirty_content)
with self.assertRaises(peru.cache.DirtyWorkingCopyError):
yield from self.cache.export_tree(new_tree, no_conflict_dirty_dir,
previous_tree=self.content_tree)
@make_synchronous
def test_missing_files_in_previous_tree(self):
'''Export should allow missing files, and it should recreate them.'''
export_dir = create_dir()
# Nothing in content_tree exists yet, so this export should be the same
# as if previous_tree wasn't specified.
yield from self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree)
assert_contents(export_dir, self.content)
# Make sure the same applies with just a single missing file.
os.remove(os.path.join(export_dir, 'a'))
yield from self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree)
assert_contents(export_dir, self.content)
@make_synchronous
def test_merge_trees(self):
merged_tree = yield from self.cache.merge_trees(
self.content_tree, self.content_tree, 'subdir')
expected_content = dict(self.content)
for path, content in self.content.items():
expected_content[os.path.join('subdir', path)] = content
export_dir = create_dir()
yield from self.cache.export_tree(merged_tree, export_dir)
assert_contents(export_dir, expected_content)
with self.assertRaises(peru.cache.MergeConflictError):
# subdir/ is already populated, so this merge should throw.
yield from self.cache.merge_trees(
merged_tree, self.content_tree, 'subdir')
@make_synchronous
def test_merge_with_deep_prefix(self):
'''This test was inspired by a bug on Windows where we would give git a
backslash-separated merge prefix, even though git demands forward slash
as a path separator.'''
content = {'file': 'stuff'}
content_dir = create_dir(content)
tree = yield from self.cache.import_tree(content_dir)
prefixed_tree = yield from self.cache.merge_trees(None, tree, 'a/b/')
export_dir = create_dir()
yield from self.cache.export_tree(prefixed_tree, export_dir)
assert_contents(export_dir, {'a/b/file': 'stuff'})
@make_synchronous
def test_read_file(self):
a_content = yield from self.cache.read_file(self.content_tree, 'a')
bc_content = yield from self.cache.read_file(self.content_tree, 'b/c')
self.assertEqual(b'foo', a_content)
self.assertEqual(b'bar', bc_content)
with self.assertRaises(FileNotFoundError):
yield from self.cache.read_file(self.content_tree, 'nonexistent')
with self.assertRaises(IsADirectoryError):
yield from self.cache.read_file(self.content_tree, 'b')
# A helper method for several tests below below.
@asyncio.coroutine
def do_excludes_and_files_test(self, excludes, picks, expected):
tree = yield from self.cache.import_tree(
self.content_dir, excludes=excludes, picks=picks)
out_dir = create_dir()
yield from self.cache.export_tree(tree, out_dir)
assert_contents(out_dir, expected)
@make_synchronous
def test_import_with_specific_file(self):
yield from self.do_excludes_and_files_test(
excludes=[], picks=['a'], expected={'a': 'foo'})
@make_synchronous
def test_import_with_specific_dir(self):
yield from self.do_excludes_and_files_test(
excludes=[], picks=['b'], expected={'b/c': 'bar', 'b/d': 'baz'})
@make_synchronous
def test_import_with_excluded_file(self):
yield from self.do_excludes_and_files_test(
excludes=['a'], picks=[], expected={'b/c': 'bar', 'b/d': 'baz'})
@make_synchronous
def test_import_with_excluded_dir(self):
yield from self.do_excludes_and_files_test(
excludes=['b'], picks=[], expected={'a': 'foo'})
@make_synchronous
def test_import_with_excludes_and_files(self):
yield from self.do_excludes_and_files_test(
excludes=['b/c'], picks=['b'], expected={'b/d': 'baz'})
@make_synchronous
def test_ls_tree(self):
# Use the recursive case to get valid entries for each file. We could
# hardcode these, but it would be messy and annoying to maintain.
entries = yield from self.cache.ls_tree(
self.content_tree, recursive=True)
assert entries.keys() == {'a', 'b', 'b/c', 'b/d'}
assert (entries['a'].type == entries['b/c'].type ==
entries['b/d'].type == peru.cache.BLOB_TYPE)
assert entries['b'].type == peru.cache.TREE_TYPE
# Check the non-recursive, non-path case.
self.assertDictEqual(
{'a': entries['a'], 'b': entries['b']},
(yield from self.cache.ls_tree(self.content_tree)))
# Check the single file case, and make sure paths are normalized.
self.assertDictEqual(
{'b/c': entries['b/c']},
(yield from self.cache.ls_tree(self.content_tree, 'b/c//./')))
# Check the single dir case. (Trailing slash shouldn't matter, because
# we nomalize it, but git will do the wrong thing if we forget
# normalization.)
self.assertDictEqual(
{'b': entries['b']},
(yield from self.cache.ls_tree(self.content_tree, 'b/')))
# Check the recursive dir case.
self.assertDictEqual(
{'b': entries['b'], 'b/c': entries['b/c'], 'b/d': entries['b/d']},
(yield from self.cache.ls_tree(
self.content_tree, 'b', recursive=True)))
# Make sure that we don't skip over a target file in recursive mode.
self.assertDictEqual(
{'b/c': entries['b/c']},
(yield from self.cache.ls_tree(
self.content_tree, 'b/c', recursive=True)))
@make_synchronous
def test_modify_tree(self):
base_dir = create_dir({'a': 'foo', 'b/c': 'bar'})
base_tree = yield from self.cache.import_tree(base_dir)
entries = yield from self.cache.ls_tree(base_tree, recursive=True)
cases = []
# Test regular deletions.
cases.append(({'a': None},
{'b/c': 'bar'}))
cases.append(({'a//./': None}, # Paths should get normalized.
{'b/c': 'bar'}))
cases.append(({'b': None},
{'a': 'foo'}))
cases.append(({'b/c': None},
{'a': 'foo'}))
cases.append(({'x/y/z': None},
{'a': 'foo', 'b/c': 'bar'}))
cases.append(({'b/x': None},
{'a': 'foo', 'b/c': 'bar'}))
# Test the case where we try to delete below a file.
cases.append(({'a/x': None},
{'a': 'foo', 'b/c': 'bar'}))
# Test insertions.
cases.append(({'b': entries['a']},
{'a': 'foo', 'b': 'foo'}))
cases.append(({'x': entries['a']},
{'a': 'foo', 'x': 'foo', 'b/c': 'bar'}))
cases.append(({'x': entries['b']},
{'a': 'foo', 'b/c': 'bar', 'x/c': 'bar'}))
cases.append(({'d/e/f': entries['a']},
{'a': 'foo', 'b/c': 'bar', 'd/e/f': 'foo'}))
cases.append(({'d/e/f': entries['b']},
{'a': 'foo', 'b/c': 'bar', 'd/e/f/c': 'bar'}))
for modifications, result in cases:
modified_tree = yield from self.cache.modify_tree(
base_tree, modifications)
modified_dir = create_dir()
yield from self.cache.export_tree(modified_tree, modified_dir)
error_msg = ('modify_tree failed to give result {} '
'for modifications {}'.format(
repr(result), repr(modifications)))
assert_contents(modified_dir, result, message=error_msg)
@make_synchronous
def test_git_attributes(self):
# Setting the 'text' attribute when files contain Windows-style
# newlines makes them appear dirty, which leads to errors where the
# cache thinks its own checked out files are dirty. (I don't honestly
# understand all the details.) The cache's git calls will read
# .gitattributes in the sync dir, so we need to set our own attributes
# in the $GIT_DIR to override. Everything in this test has to be done
# in binary mode or it will all get muddled up when we actually run it
# on Windows.
windows_content = {'file': b'windows newline\r\n'}
gitattributes_content = {'.gitattributes': b'* text'}
both_content = windows_content.copy()
both_content.update(gitattributes_content)
windows_dir = create_dir(windows_content)
tree = yield from self.cache.import_tree(windows_dir)
out_dir = create_dir(gitattributes_content)
# This export fails without the fix mentioned above.
yield from self.cache.export_tree(tree, out_dir)
assert_contents(out_dir, both_content, binary=True)
@make_synchronous
def test_touched_file(self):
# Bumping the mtime on a file makes it appear dirty to `git
# diff-files`. However, when the index is refreshed with `git
# update-index`, the dirtiness should go away. This test guarantees
# that we do that refresh, both with and without a cached index file.
# Note that because the index file only has an mtime resolution of 1
# second, we have to artificially inflate the mtime to guarantee that
# the file will actually appear dirty.
export_dir = create_dir()
a_path = os.path.join(export_dir, 'a')
t = time.time()
def bump_mtime_one_minute():
nonlocal t
t += 60 # Add a whole minute to the mtime we set.
os.utime(a_path, (t, t))
# Do the first export.
yield from self.cache.export_tree(self.content_tree, export_dir)
# Touch a and rerun the export with no cached index.
bump_mtime_one_minute()
yield from self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree)
# Create a cached index file.
index_dir = create_dir()
index_file = os.path.join(index_dir, 'test_index_file')
yield from self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree,
previous_index_file=index_file)
# Finally, touch a again and rerun the export using the cached index.
bump_mtime_one_minute()
yield from self.cache.export_tree(
self.content_tree, export_dir, previous_tree=self.content_tree,
previous_index_file=index_file)
@make_synchronous
def test_import_ignores_dotperu(self):
# We have a security problem similar to git's if we allow '.peru'
# directories in the trees we write to disk. (See
# https://github.com/blog/1938-vulnerability-announced-update-your-git-clients.)
# We need to check that *all* '.peru' dirs are ignored in imported
# trees, including inside of nested subdirectories. And as in the git
# issue, we need to do this in a case-insensitive way.
content = {
'foo': 'bar',
'.peru/foo1': 'bar',
'.PERU/foo2': 'bar',
'.pErU/foo3': 'bar',
'dir/foo': 'bar',
'dir/.peru/foo1': 'bar',
'dir/.PERU/foo2': 'bar',
'dir/.peRU/foo3': 'bar',
}
tree = yield from self.cache.import_tree(create_dir(content))
entries = yield from self.cache.ls_tree(tree, recursive=True)
self.assertEqual({'foo', 'dir', 'dir/foo'}, entries.keys(),
"Expected all of the .peru dirs to be omitted.")
@make_synchronous
def test_validate_capitalizations(self):
# Assert that the set of capitalizations is the right size, both before
# and after deduplication.
self.assertEqual(len(peru.cache.DOTPERU_CAPITALIZATIONS), 16)
self.assertEqual(len(set(peru.cache.DOTPERU_CAPITALIZATIONS)), 16)
# Check that every capitalization actually spells ".peru".
for capitalization in peru.cache.DOTPERU_CAPITALIZATIONS:
self.assertEqual(capitalization.lower(), ".peru")
| oconnor663/peru | tests/test_cache.py | Python | mit | 18,779 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.