repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
brython-dev/brython | www/src/Lib/unittest/signals.py | 1020 | 2403 | import signal
import weakref
from functools import wraps
__unittest = True
class _InterruptHandler(object):
def __init__(self, default_handler):
self.called = False
self.original_handler = default_handler
if isinstance(default_handler, int):
if default_handler == signal.SIG_DFL:
# Pretend it's signal.default_int_handler instead.
default_handler = signal.default_int_handler
elif default_handler == signal.SIG_IGN:
# Not quite the same thing as SIG_IGN, but the closest we
# can make it: do nothing.
def default_handler(unused_signum, unused_frame):
pass
else:
raise TypeError("expected SIGINT signal handler to be "
"signal.SIG_IGN, signal.SIG_DFL, or a "
"callable object")
self.default_handler = default_handler
def __call__(self, signum, frame):
installed_handler = signal.getsignal(signal.SIGINT)
if installed_handler is not self:
# if we aren't the installed handler, then delegate immediately
# to the default handler
self.default_handler(signum, frame)
if self.called:
self.default_handler(signum, frame)
self.called = True
for result in _results.keys():
result.stop()
_results = weakref.WeakKeyDictionary()
def registerResult(result):
_results[result] = 1
def removeResult(result):
return bool(_results.pop(result, None))
_interrupt_handler = None
def installHandler():
global _interrupt_handler
if _interrupt_handler is None:
default_handler = signal.getsignal(signal.SIGINT)
_interrupt_handler = _InterruptHandler(default_handler)
signal.signal(signal.SIGINT, _interrupt_handler)
def removeHandler(method=None):
if method is not None:
@wraps(method)
def inner(*args, **kwargs):
initial = signal.getsignal(signal.SIGINT)
removeHandler()
try:
return method(*args, **kwargs)
finally:
signal.signal(signal.SIGINT, initial)
return inner
global _interrupt_handler
if _interrupt_handler is not None:
signal.signal(signal.SIGINT, _interrupt_handler.original_handler)
| bsd-3-clause | a92ca70bf3fc4d747f97f7537938c45b | 32.84507 | 75 | 0.605909 | 4.47486 | false | false | false | false |
brython-dev/brython | www/src/Lib/random.py | 1 | 31991 | """Random variable generators.
bytes
-----
uniform bytes (values between 0 and 255)
integers
--------
uniform within range
sequences
---------
pick random element
pick random sample
pick weighted random sample
generate random permutation
distributions on the real line:
------------------------------
uniform
triangular
normal (Gaussian)
lognormal
negative exponential
gamma
beta
pareto
Weibull
distributions on the circle (angles 0 to 2pi)
---------------------------------------------
circular uniform
von Mises
General notes on the underlying Mersenne Twister core generator:
* The period is 2**19937-1.
* It is one of the most extensively tested generators in existence.
* The random() method is implemented in C, executes in a single Python step,
and is, therefore, threadsafe.
"""
# Translated by Guido van Rossum from C source provided by
# Adrian Baddeley. Adapted by Raymond Hettinger for use with
# the Mersenne Twister and os.urandom() core generators.
from warnings import warn as _warn
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from math import tau as TWOPI, floor as _floor, isfinite as _isfinite
from os import urandom as _urandom
from _collections_abc import Set as _Set, Sequence as _Sequence
from operator import index as _index
from itertools import accumulate as _accumulate, repeat as _repeat
from bisect import bisect as _bisect
import os as _os
import _random
try:
# hashlib is pretty heavy to load, try lean internal module first
from _sha512 import sha512 as _sha512
except ImportError:
# fallback to official implementation
from hashlib import sha512 as _sha512
__all__ = [
"Random",
"SystemRandom",
"betavariate",
"choice",
"choices",
"expovariate",
"gammavariate",
"gauss",
"getrandbits",
"getstate",
"lognormvariate",
"normalvariate",
"paretovariate",
"randbytes",
"randint",
"random",
"randrange",
"sample",
"seed",
"setstate",
"shuffle",
"triangular",
"uniform",
"vonmisesvariate",
"weibullvariate",
]
NV_MAGICCONST = 4 * _exp(-0.5) / _sqrt(2.0)
LOG4 = _log(4.0)
SG_MAGICCONST = 1.0 + _log(4.5)
BPF = 53 # Number of bits in a float
RECIP_BPF = 2 ** -BPF
_ONE = 1
class Random(_random.Random):
"""Random number generator base class used by bound module functions.
Used to instantiate instances of Random to get generators that don't
share state.
Class Random can also be subclassed if you want to use a different basic
generator of your own devising: in that case, override the following
methods: random(), seed(), getstate(), and setstate().
Optionally, implement a getrandbits() method so that randrange()
can cover arbitrarily large ranges.
"""
VERSION = 3 # used by getstate/setstate
def __init__(self, x=None):
"""Initialize an instance.
Optional argument x controls seeding, as for Random.seed().
"""
self.seed(x)
self.gauss_next = None
def seed(self, a=None, version=2):
"""Initialize internal state from a seed.
The only supported seed types are None, int, float,
str, bytes, and bytearray.
None or no argument seeds from current time or from an operating
system specific randomness source if available.
If *a* is an int, all bits are used.
For version 2 (the default), all of the bits are used if *a* is a str,
bytes, or bytearray. For version 1 (provided for reproducing random
sequences from older versions of Python), the algorithm for str and
bytes generates a narrower range of seeds.
"""
if version == 1 and isinstance(a, (str, bytes)):
a = a.decode('latin-1') if isinstance(a, bytes) else a
x = ord(a[0]) << 7 if a else 0
for c in map(ord, a):
x = ((1000003 * x) ^ c) & 0xFFFFFFFFFFFFFFFF
x ^= len(a)
a = -2 if x == -1 else x
elif version == 2 and isinstance(a, (str, bytes, bytearray)):
if isinstance(a, str):
a = a.encode()
a = int.from_bytes(a + _sha512(a).digest())
elif not isinstance(a, (type(None), int, float, str, bytes, bytearray)):
raise TypeError('The only supported seed types are: None,\n'
'int, float, str, bytes, and bytearray.')
super().seed(a)
self.gauss_next = None
def getstate(self):
"""Return internal state; can be passed to setstate() later."""
return self.VERSION, super().getstate(), self.gauss_next
def setstate(self, state):
"""Restore internal state from object returned by getstate()."""
version = state[0]
if version == 3:
version, internalstate, self.gauss_next = state
super().setstate(internalstate)
elif version == 2:
version, internalstate, self.gauss_next = state
# In version 2, the state was saved as signed ints, which causes
# inconsistencies between 32/64-bit systems. The state is
# really unsigned 32-bit ints, so we convert negative ints from
# version 2 to positive longs for version 3.
try:
internalstate = tuple(x % (2 ** 32) for x in internalstate)
except ValueError as e:
raise TypeError from e
super().setstate(internalstate)
else:
raise ValueError("state with version %s passed to "
"Random.setstate() of version %s" %
(version, self.VERSION))
## -------------------------------------------------------
## ---- Methods below this point do not need to be overridden or extended
## ---- when subclassing for the purpose of using a different core generator.
## -------------------- pickle support -------------------
# Issue 17489: Since __reduce__ was defined to fix #759889 this is no
# longer called; we leave it here because it has been here since random was
# rewritten back in 2001 and why risk breaking something.
def __getstate__(self): # for pickle
return self.getstate()
def __setstate__(self, state): # for pickle
self.setstate(state)
def __reduce__(self):
return self.__class__, (), self.getstate()
## ---- internal support method for evenly distributed integers ----
def __init_subclass__(cls, /, **kwargs):
"""Control how subclasses generate random integers.
The algorithm a subclass can use depends on the random() and/or
getrandbits() implementation available to it and determines
whether it can generate random integers from arbitrarily large
ranges.
"""
for c in cls.__mro__:
if '_randbelow' in c.__dict__:
# just inherit it
break
if 'getrandbits' in c.__dict__:
cls._randbelow = cls._randbelow_with_getrandbits
break
if 'random' in c.__dict__:
cls._randbelow = cls._randbelow_without_getrandbits
break
def _randbelow_with_getrandbits(self, n):
"Return a random int in the range [0,n). Defined for n > 0."
getrandbits = self.getrandbits
k = n.bit_length() # don't use (n-1) here because n can be 1
r = getrandbits(k) # 0 <= r < 2**k
while r >= n:
r = getrandbits(k)
return r
def _randbelow_without_getrandbits(self, n, maxsize=1<<BPF):
"""Return a random int in the range [0,n). Defined for n > 0.
The implementation does not use getrandbits, but only random.
"""
random = self.random
if n >= maxsize:
_warn("Underlying random() generator does not supply \n"
"enough bits to choose from a population range this large.\n"
"To remove the range limitation, add a getrandbits() method.")
return _floor(random() * n)
rem = maxsize % n
limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0
r = random()
while r >= limit:
r = random()
return _floor(r * maxsize) % n
_randbelow = _randbelow_with_getrandbits
## --------------------------------------------------------
## ---- Methods below this point generate custom distributions
## ---- based on the methods defined above. They do not
## ---- directly touch the underlying generator and only
## ---- access randomness through the methods: random(),
## ---- getrandbits(), or _randbelow().
## -------------------- bytes methods ---------------------
def randbytes(self, n):
"""Generate n random bytes."""
return self.getrandbits(n * 8).to_bytes(n, 'little')
## -------------------- integer methods -------------------
def randrange(self, start, stop=None, step=_ONE):
"""Choose a random item from range(start, stop[, step]).
This fixes the problem with randint() which includes the
endpoint; in Python this is usually not what you want.
"""
# This code is a bit messy to make it fast for the
# common case while still doing adequate error checking.
try:
istart = _index(start)
except TypeError:
istart = int(start)
if istart != start:
_warn('randrange() will raise TypeError in the future',
DeprecationWarning, 2)
raise ValueError("non-integer arg 1 for randrange()")
_warn('non-integer arguments to randrange() have been deprecated '
'since Python 3.10 and will be removed in a subsequent '
'version',
DeprecationWarning, 2)
if stop is None:
# We don't check for "step != 1" because it hasn't been
# type checked and converted to an integer yet.
if step is not _ONE:
raise TypeError('Missing a non-None stop argument')
if istart > 0:
return self._randbelow(istart)
raise ValueError("empty range for randrange()")
# stop argument supplied.
try:
istop = _index(stop)
except TypeError:
istop = int(stop)
if istop != stop:
_warn('randrange() will raise TypeError in the future',
DeprecationWarning, 2)
raise ValueError("non-integer stop for randrange()")
_warn('non-integer arguments to randrange() have been deprecated '
'since Python 3.10 and will be removed in a subsequent '
'version',
DeprecationWarning, 2)
width = istop - istart
try:
istep = _index(step)
except TypeError:
istep = int(step)
if istep != step:
_warn('randrange() will raise TypeError in the future',
DeprecationWarning, 2)
raise ValueError("non-integer step for randrange()")
_warn('non-integer arguments to randrange() have been deprecated '
'since Python 3.10 and will be removed in a subsequent '
'version',
DeprecationWarning, 2)
# Fast path.
if istep == 1:
if width > 0:
return istart + self._randbelow(width)
raise ValueError("empty range for randrange() (%d, %d, %d)" % (istart, istop, width))
# Non-unit step argument supplied.
if istep > 0:
n = (width + istep - 1) // istep
elif istep < 0:
n = (width + istep + 1) // istep
else:
raise ValueError("zero step for randrange()")
if n <= 0:
raise ValueError("empty range for randrange()")
return istart + istep * self._randbelow(n)
def randint(self, a, b):
"""Return random integer in range [a, b], including both end points.
"""
return self.randrange(a, b+1)
## -------------------- sequence methods -------------------
def choice(self, seq):
"""Choose a random element from a non-empty sequence."""
if not seq:
raise IndexError('Cannot choose from an empty sequence')
return seq[self._randbelow(len(seq))]
def shuffle(self, x):
"""Shuffle list x in place, and return None."""
randbelow = self._randbelow
for i in reversed(range(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
j = randbelow(i + 1)
x[i], x[j] = x[j], x[i]
def sample(self, population, k, *, counts=None):
"""Chooses k unique random elements from a population sequence.
Returns a new list containing elements from the population while
leaving the original population unchanged. The resulting list is
in selection order so that all sub-slices will also be valid random
samples. This allows raffle winners (the sample) to be partitioned
into grand prize and second place winners (the subslices).
Members of the population need not be hashable or unique. If the
population contains repeats, then each occurrence is a possible
selection in the sample.
Repeated elements can be specified one at a time or with the optional
counts parameter. For example:
sample(['red', 'blue'], counts=[4, 2], k=5)
is equivalent to:
sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5)
To choose a sample from a range of integers, use range() for the
population argument. This is especially fast and space efficient
for sampling from a large population:
sample(range(10000000), 60)
"""
# Sampling without replacement entails tracking either potential
# selections (the pool) in a list or previous selections in a set.
# When the number of selections is small compared to the
# population, then tracking selections is efficient, requiring
# only a small set and an occasional reselection. For
# a larger number of selections, the pool tracking method is
# preferred since the list takes less space than the
# set and it doesn't suffer from frequent reselections.
# The number of calls to _randbelow() is kept at or near k, the
# theoretical minimum. This is important because running time
# is dominated by _randbelow() and because it extracts the
# least entropy from the underlying random number generators.
# Memory requirements are kept to the smaller of a k-length
# set or an n-length list.
# There are other sampling algorithms that do not require
# auxiliary memory, but they were rejected because they made
# too many calls to _randbelow(), making them slower and
# causing them to eat more entropy than necessary.
if not isinstance(population, _Sequence):
raise TypeError("Population must be a sequence. "
"For dicts or sets, use sorted(d).")
n = len(population)
if counts is not None:
cum_counts = list(_accumulate(counts))
if len(cum_counts) != n:
raise ValueError('The number of counts does not match the population')
total = cum_counts.pop()
if not isinstance(total, int):
raise TypeError('Counts must be integers')
if total <= 0:
raise ValueError('Total of counts must be greater than zero')
selections = self.sample(range(total), k=k)
bisect = _bisect
return [population[bisect(cum_counts, s)] for s in selections]
randbelow = self._randbelow
if not 0 <= k <= n:
raise ValueError("Sample larger than population or is negative")
result = [None] * k
setsize = 21 # size of a small set minus size of an empty list
if k > 5:
setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
if n <= setsize:
# An n-length list is smaller than a k-length set.
# Invariant: non-selected at pool[0 : n-i]
pool = list(population)
for i in range(k):
j = randbelow(n - i)
result[i] = pool[j]
pool[j] = pool[n - i - 1] # move non-selected item into vacancy
else:
selected = set()
selected_add = selected.add
for i in range(k):
j = randbelow(n)
while j in selected:
j = randbelow(n)
selected_add(j)
result[i] = population[j]
return result
def choices(self, population, weights=None, *, cum_weights=None, k=1):
"""Return a k sized list of population elements chosen with replacement.
If the relative weights or cumulative weights are not specified,
the selections are made with equal probability.
"""
random = self.random
n = len(population)
if cum_weights is None:
if weights is None:
floor = _floor
n += 0.0 # convert to float for a small speed improvement
return [population[floor(random() * n)] for i in _repeat(None, k)]
try:
cum_weights = list(_accumulate(weights))
except TypeError:
if not isinstance(weights, int):
raise
k = weights
raise TypeError(
f'The number of choices must be a keyword argument: {k=}'
) from None
elif weights is not None:
raise TypeError('Cannot specify both weights and cumulative weights')
if len(cum_weights) != n:
raise ValueError('The number of weights does not match the population')
total = cum_weights[-1] + 0.0 # convert to float
if total <= 0.0:
raise ValueError('Total of weights must be greater than zero')
if not _isfinite(total):
raise ValueError('Total of weights must be finite')
bisect = _bisect
hi = n - 1
return [population[bisect(cum_weights, random() * total, 0, hi)]
for i in _repeat(None, k)]
## -------------------- real-valued distributions -------------------
def uniform(self, a, b):
"Get a random number in the range [a, b) or [a, b] depending on rounding."
return a + (b - a) * self.random()
def triangular(self, low=0.0, high=1.0, mode=None):
"""Triangular distribution.
Continuous distribution bounded by given lower and upper limits,
and having a given mode value in-between.
http://en.wikipedia.org/wiki/Triangular_distribution
"""
u = self.random()
try:
c = 0.5 if mode is None else (mode - low) / (high - low)
except ZeroDivisionError:
return low
if u > c:
u = 1.0 - u
c = 1.0 - c
low, high = high, low
return low + (high - low) * _sqrt(u * c)
def normalvariate(self, mu=0.0, sigma=1.0):
"""Normal distribution.
mu is the mean, and sigma is the standard deviation.
"""
# Uses Kinderman and Monahan method. Reference: Kinderman,
# A.J. and Monahan, J.F., "Computer generation of random
# variables using the ratio of uniform deviates", ACM Trans
# Math Software, 3, (1977), pp257-260.
random = self.random
while True:
u1 = random()
u2 = 1.0 - random()
z = NV_MAGICCONST * (u1 - 0.5) / u2
zz = z * z / 4.0
if zz <= -_log(u2):
break
return mu + z * sigma
def gauss(self, mu=0.0, sigma=1.0):
"""Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls.
"""
# When x and y are two variables from [0, 1), uniformly
# distributed, then
#
# cos(2*pi*x)*sqrt(-2*log(1-y))
# sin(2*pi*x)*sqrt(-2*log(1-y))
#
# are two *independent* variables with normal distribution
# (mu = 0, sigma = 1).
# (Lambert Meertens)
# (corrected version; bug discovered by Mike Miller, fixed by LM)
# Multithreading note: When two threads call this function
# simultaneously, it is possible that they will receive the
# same return value. The window is very small though. To
# avoid this, you have to use a lock around all calls. (I
# didn't want to slow this down in the serial case by using a
# lock here.)
random = self.random
z = self.gauss_next
self.gauss_next = None
if z is None:
x2pi = random() * TWOPI
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
z = _cos(x2pi) * g2rad
self.gauss_next = _sin(x2pi) * g2rad
return mu + z * sigma
def lognormvariate(self, mu, sigma):
"""Log normal distribution.
If you take the natural logarithm of this distribution, you'll get a
normal distribution with mean mu and standard deviation sigma.
mu can have any value, and sigma must be greater than zero.
"""
return _exp(self.normalvariate(mu, sigma))
def expovariate(self, lambd):
"""Exponential distribution.
lambd is 1.0 divided by the desired mean. It should be
nonzero. (The parameter would be called "lambda", but that is
a reserved word in Python.) Returned values range from 0 to
positive infinity if lambd is positive, and from negative
infinity to 0 if lambd is negative.
"""
# lambd: rate lambd = 1/mean
# ('lambda' is a Python reserved word)
# we use 1-random() instead of random() to preclude the
# possibility of taking the log of zero.
return -_log(1.0 - self.random()) / lambd
def vonmisesvariate(self, mu, kappa):
"""Circular data distribution.
mu is the mean angle, expressed in radians between 0 and 2*pi, and
kappa is the concentration parameter, which must be greater than or
equal to zero. If kappa is equal to zero, this distribution reduces
to a uniform random angle over the range 0 to 2*pi.
"""
# Based upon an algorithm published in: Fisher, N.I.,
# "Statistical Analysis of Circular Data", Cambridge
# University Press, 1993.
# Thanks to Magnus Kessler for a correction to the
# implementation of step 4.
random = self.random
if kappa <= 1e-6:
return TWOPI * random()
s = 0.5 / kappa
r = s + _sqrt(1.0 + s * s)
while True:
u1 = random()
z = _cos(_pi * u1)
d = z / (r + z)
u2 = random()
if u2 < 1.0 - d * d or u2 <= (1.0 - d) * _exp(d):
break
q = 1.0 / r
f = (q + z) / (1.0 + q * z)
u3 = random()
if u3 > 0.5:
theta = (mu + _acos(f)) % TWOPI
else:
theta = (mu - _acos(f)) % TWOPI
return theta
def gammavariate(self, alpha, beta):
"""Gamma distribution. Not the gamma function!
Conditions on the parameters are alpha > 0 and beta > 0.
The probability distribution function is:
x ** (alpha - 1) * math.exp(-x / beta)
pdf(x) = --------------------------------------
math.gamma(alpha) * beta ** alpha
"""
# alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
# Warning: a few older sources define the gamma distribution in terms
# of alpha > -1.0
if alpha <= 0.0 or beta <= 0.0:
raise ValueError('gammavariate: alpha and beta must be > 0.0')
random = self.random
if alpha > 1.0:
# Uses R.C.H. Cheng, "The generation of Gamma
# variables with non-integral shape parameters",
# Applied Statistics, (1977), 26, No. 1, p71-74
ainv = _sqrt(2.0 * alpha - 1.0)
bbb = alpha - LOG4
ccc = alpha + ainv
while True:
u1 = random()
if not 1e-7 < u1 < 0.9999999:
continue
u2 = 1.0 - random()
v = _log(u1 / (1.0 - u1)) / ainv
x = alpha * _exp(v)
z = u1 * u1 * u2
r = bbb + ccc * v - x
if r + SG_MAGICCONST - 4.5 * z >= 0.0 or r >= _log(z):
return x * beta
elif alpha == 1.0:
# expovariate(1/beta)
return -_log(1.0 - random()) * beta
else:
# alpha is between 0 and 1 (exclusive)
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
while True:
u = random()
b = (_e + alpha) / _e
p = b * u
if p <= 1.0:
x = p ** (1.0 / alpha)
else:
x = -_log((b - p) / alpha)
u1 = random()
if p > 1.0:
if u1 <= x ** (alpha - 1.0):
break
elif u1 <= _exp(-x):
break
return x * beta
def betavariate(self, alpha, beta):
"""Beta distribution.
Conditions on the parameters are alpha > 0 and beta > 0.
Returned values range between 0 and 1.
"""
## See
## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
## for Ivan Frohne's insightful analysis of why the original implementation:
##
## def betavariate(self, alpha, beta):
## # Discrete Event Simulation in C, pp 87-88.
##
## y = self.expovariate(alpha)
## z = self.expovariate(1.0/beta)
## return z/(y+z)
##
## was dead wrong, and how it probably got that way.
# This version due to Janne Sinkkonen, and matches all the std
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
y = self.gammavariate(alpha, 1.0)
if y:
return y / (y + self.gammavariate(beta, 1.0))
return 0.0
def paretovariate(self, alpha):
"""Pareto distribution. alpha is the shape parameter."""
# Jain, pg. 495
u = 1.0 - self.random()
return u ** (-1.0 / alpha)
def weibullvariate(self, alpha, beta):
"""Weibull distribution.
alpha is the scale parameter and beta is the shape parameter.
"""
# Jain, pg. 499; bug fix courtesy Bill Arms
u = 1.0 - self.random()
return alpha * (-_log(u)) ** (1.0 / beta)
## ------------------------------------------------------------------
## --------------- Operating System Random Source ------------------
class SystemRandom(Random):
"""Alternate random number generator using sources provided
by the operating system (such as /dev/urandom on Unix or
CryptGenRandom on Windows).
Not available on all systems (see os.urandom() for details).
"""
def random(self):
"""Get the next random number in the range [0.0, 1.0)."""
return (int.from_bytes(_urandom(7)) >> 3) * RECIP_BPF
def getrandbits(self, k):
"""getrandbits(k) -> x. Generates an int with k random bits."""
if k < 0:
raise ValueError('number of bits must be non-negative')
numbytes = (k + 7) // 8 # bits / 8 and rounded up
x = int.from_bytes(_urandom(numbytes))
return x >> (numbytes * 8 - k) # trim excess bits
def randbytes(self, n):
"""Generate n random bytes."""
# os.urandom(n) fails with ValueError for n < 0
# and returns an empty bytes string for n == 0.
return _urandom(n)
def seed(self, *args, **kwds):
"Stub method. Not used for a system random number generator."
return None
def _notimplemented(self, *args, **kwds):
"Method should not be called for a system random number generator."
raise NotImplementedError('System entropy source does not have state.')
getstate = setstate = _notimplemented
# ----------------------------------------------------------------------
# Create one instance, seeded from current time, and export its methods
# as module-level functions. The functions share state across all uses
# (both in the user's code and in the Python libraries), but that's fine
# for most programs and is easier for the casual user than making them
# instantiate their own Random() instance.
_inst = Random()
seed = _inst.seed
random = _inst.random
uniform = _inst.uniform
triangular = _inst.triangular
randint = _inst.randint
choice = _inst.choice
randrange = _inst.randrange
sample = _inst.sample
shuffle = _inst.shuffle
choices = _inst.choices
normalvariate = _inst.normalvariate
lognormvariate = _inst.lognormvariate
expovariate = _inst.expovariate
vonmisesvariate = _inst.vonmisesvariate
gammavariate = _inst.gammavariate
gauss = _inst.gauss
betavariate = _inst.betavariate
paretovariate = _inst.paretovariate
weibullvariate = _inst.weibullvariate
getstate = _inst.getstate
setstate = _inst.setstate
getrandbits = _inst.getrandbits
randbytes = _inst.randbytes
## ------------------------------------------------------
## ----------------- test program -----------------------
def _test_generator(n, func, args):
from statistics import stdev, fmean as mean
from time import perf_counter
t0 = perf_counter()
data = [func(*args) for i in _repeat(None, n)]
t1 = perf_counter()
xbar = mean(data)
sigma = stdev(data, xbar)
low = min(data)
high = max(data)
print(f'{t1 - t0:.3f} sec, {n} times {func.__name__}')
print('avg %g, stddev %g, min %g, max %g\n' % (xbar, sigma, low, high))
def _test(N=2000):
_test_generator(N, random, ())
_test_generator(N, normalvariate, (0.0, 1.0))
_test_generator(N, lognormvariate, (0.0, 1.0))
_test_generator(N, vonmisesvariate, (0.0, 1.0))
_test_generator(N, gammavariate, (0.01, 1.0))
_test_generator(N, gammavariate, (0.1, 1.0))
_test_generator(N, gammavariate, (0.1, 2.0))
_test_generator(N, gammavariate, (0.5, 1.0))
_test_generator(N, gammavariate, (0.9, 1.0))
_test_generator(N, gammavariate, (1.0, 1.0))
_test_generator(N, gammavariate, (2.0, 1.0))
_test_generator(N, gammavariate, (20.0, 1.0))
_test_generator(N, gammavariate, (200.0, 1.0))
_test_generator(N, gauss, (0.0, 1.0))
_test_generator(N, betavariate, (3.0, 3.0))
_test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0))
## ------------------------------------------------------
## ------------------ fork support ---------------------
if hasattr(_os, "fork"):
_os.register_at_fork(after_in_child=_inst.seed)
if __name__ == '__main__':
_test()
| bsd-3-clause | 86ea304009192008826100b3e765bec2 | 34.506104 | 97 | 0.556313 | 4.121489 | false | false | false | false |
brython-dev/brython | www/src/Lib/encodings/bz2_codec.py | 214 | 2249 | """Python 'bz2_codec' Codec - bz2 compression encoding.
This codec de/encodes from bytes to bytes and is therefore usable with
bytes.transform() and bytes.untransform().
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2 # this codec needs the optional bz2 module !
### Codec APIs
def bz2_encode(input, errors='strict'):
assert errors == 'strict'
return (bz2.compress(input), len(input))
def bz2_decode(input, errors='strict'):
assert errors == 'strict'
return (bz2.decompress(input), len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return bz2_encode(input, errors)
def decode(self, input, errors='strict'):
return bz2_decode(input, errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.compressobj = bz2.BZ2Compressor()
def encode(self, input, final=False):
if final:
c = self.compressobj.compress(input)
return c + self.compressobj.flush()
else:
return self.compressobj.compress(input)
def reset(self):
self.compressobj = bz2.BZ2Compressor()
class IncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.decompressobj = bz2.BZ2Decompressor()
def decode(self, input, final=False):
try:
return self.decompressobj.decompress(input)
except EOFError:
return ''
def reset(self):
self.decompressobj = bz2.BZ2Decompressor()
class StreamWriter(Codec, codecs.StreamWriter):
charbuffertype = bytes
class StreamReader(Codec, codecs.StreamReader):
charbuffertype = bytes
### encodings module API
def getregentry():
return codecs.CodecInfo(
name="bz2",
encode=bz2_encode,
decode=bz2_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
_is_text_encoding=False,
)
| bsd-3-clause | 0ce8cc38e6e2b3664d7c420e34449339 | 27.833333 | 70 | 0.666963 | 4.037702 | false | false | false | false |
brython-dev/brython | www/src/Lib/encodings/utf_16_le.py | 860 | 1037 | """ Python 'utf-16-le' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
encode = codecs.utf_16_le_encode
def decode(input, errors='strict'):
return codecs.utf_16_le_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.utf_16_le_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
_buffer_decode = codecs.utf_16_le_decode
class StreamWriter(codecs.StreamWriter):
encode = codecs.utf_16_le_encode
class StreamReader(codecs.StreamReader):
decode = codecs.utf_16_le_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-16-le',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| bsd-3-clause | 1f99020a31cb295af4eed457ab212635 | 23.690476 | 61 | 0.712633 | 3.898496 | false | false | false | false |
brython-dev/brython | www/src/Lib/encodings/iso8859_5.py | 35 | 13322 | """ Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-5',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0401' # 0xA1 -> CYRILLIC CAPITAL LETTER IO
'\u0402' # 0xA2 -> CYRILLIC CAPITAL LETTER DJE
'\u0403' # 0xA3 -> CYRILLIC CAPITAL LETTER GJE
'\u0404' # 0xA4 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
'\u0405' # 0xA5 -> CYRILLIC CAPITAL LETTER DZE
'\u0406' # 0xA6 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
'\u0407' # 0xA7 -> CYRILLIC CAPITAL LETTER YI
'\u0408' # 0xA8 -> CYRILLIC CAPITAL LETTER JE
'\u0409' # 0xA9 -> CYRILLIC CAPITAL LETTER LJE
'\u040a' # 0xAA -> CYRILLIC CAPITAL LETTER NJE
'\u040b' # 0xAB -> CYRILLIC CAPITAL LETTER TSHE
'\u040c' # 0xAC -> CYRILLIC CAPITAL LETTER KJE
'\xad' # 0xAD -> SOFT HYPHEN
'\u040e' # 0xAE -> CYRILLIC CAPITAL LETTER SHORT U
'\u040f' # 0xAF -> CYRILLIC CAPITAL LETTER DZHE
'\u0410' # 0xB0 -> CYRILLIC CAPITAL LETTER A
'\u0411' # 0xB1 -> CYRILLIC CAPITAL LETTER BE
'\u0412' # 0xB2 -> CYRILLIC CAPITAL LETTER VE
'\u0413' # 0xB3 -> CYRILLIC CAPITAL LETTER GHE
'\u0414' # 0xB4 -> CYRILLIC CAPITAL LETTER DE
'\u0415' # 0xB5 -> CYRILLIC CAPITAL LETTER IE
'\u0416' # 0xB6 -> CYRILLIC CAPITAL LETTER ZHE
'\u0417' # 0xB7 -> CYRILLIC CAPITAL LETTER ZE
'\u0418' # 0xB8 -> CYRILLIC CAPITAL LETTER I
'\u0419' # 0xB9 -> CYRILLIC CAPITAL LETTER SHORT I
'\u041a' # 0xBA -> CYRILLIC CAPITAL LETTER KA
'\u041b' # 0xBB -> CYRILLIC CAPITAL LETTER EL
'\u041c' # 0xBC -> CYRILLIC CAPITAL LETTER EM
'\u041d' # 0xBD -> CYRILLIC CAPITAL LETTER EN
'\u041e' # 0xBE -> CYRILLIC CAPITAL LETTER O
'\u041f' # 0xBF -> CYRILLIC CAPITAL LETTER PE
'\u0420' # 0xC0 -> CYRILLIC CAPITAL LETTER ER
'\u0421' # 0xC1 -> CYRILLIC CAPITAL LETTER ES
'\u0422' # 0xC2 -> CYRILLIC CAPITAL LETTER TE
'\u0423' # 0xC3 -> CYRILLIC CAPITAL LETTER U
'\u0424' # 0xC4 -> CYRILLIC CAPITAL LETTER EF
'\u0425' # 0xC5 -> CYRILLIC CAPITAL LETTER HA
'\u0426' # 0xC6 -> CYRILLIC CAPITAL LETTER TSE
'\u0427' # 0xC7 -> CYRILLIC CAPITAL LETTER CHE
'\u0428' # 0xC8 -> CYRILLIC CAPITAL LETTER SHA
'\u0429' # 0xC9 -> CYRILLIC CAPITAL LETTER SHCHA
'\u042a' # 0xCA -> CYRILLIC CAPITAL LETTER HARD SIGN
'\u042b' # 0xCB -> CYRILLIC CAPITAL LETTER YERU
'\u042c' # 0xCC -> CYRILLIC CAPITAL LETTER SOFT SIGN
'\u042d' # 0xCD -> CYRILLIC CAPITAL LETTER E
'\u042e' # 0xCE -> CYRILLIC CAPITAL LETTER YU
'\u042f' # 0xCF -> CYRILLIC CAPITAL LETTER YA
'\u0430' # 0xD0 -> CYRILLIC SMALL LETTER A
'\u0431' # 0xD1 -> CYRILLIC SMALL LETTER BE
'\u0432' # 0xD2 -> CYRILLIC SMALL LETTER VE
'\u0433' # 0xD3 -> CYRILLIC SMALL LETTER GHE
'\u0434' # 0xD4 -> CYRILLIC SMALL LETTER DE
'\u0435' # 0xD5 -> CYRILLIC SMALL LETTER IE
'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
'\u0437' # 0xD7 -> CYRILLIC SMALL LETTER ZE
'\u0438' # 0xD8 -> CYRILLIC SMALL LETTER I
'\u0439' # 0xD9 -> CYRILLIC SMALL LETTER SHORT I
'\u043a' # 0xDA -> CYRILLIC SMALL LETTER KA
'\u043b' # 0xDB -> CYRILLIC SMALL LETTER EL
'\u043c' # 0xDC -> CYRILLIC SMALL LETTER EM
'\u043d' # 0xDD -> CYRILLIC SMALL LETTER EN
'\u043e' # 0xDE -> CYRILLIC SMALL LETTER O
'\u043f' # 0xDF -> CYRILLIC SMALL LETTER PE
'\u0440' # 0xE0 -> CYRILLIC SMALL LETTER ER
'\u0441' # 0xE1 -> CYRILLIC SMALL LETTER ES
'\u0442' # 0xE2 -> CYRILLIC SMALL LETTER TE
'\u0443' # 0xE3 -> CYRILLIC SMALL LETTER U
'\u0444' # 0xE4 -> CYRILLIC SMALL LETTER EF
'\u0445' # 0xE5 -> CYRILLIC SMALL LETTER HA
'\u0446' # 0xE6 -> CYRILLIC SMALL LETTER TSE
'\u0447' # 0xE7 -> CYRILLIC SMALL LETTER CHE
'\u0448' # 0xE8 -> CYRILLIC SMALL LETTER SHA
'\u0449' # 0xE9 -> CYRILLIC SMALL LETTER SHCHA
'\u044a' # 0xEA -> CYRILLIC SMALL LETTER HARD SIGN
'\u044b' # 0xEB -> CYRILLIC SMALL LETTER YERU
'\u044c' # 0xEC -> CYRILLIC SMALL LETTER SOFT SIGN
'\u044d' # 0xED -> CYRILLIC SMALL LETTER E
'\u044e' # 0xEE -> CYRILLIC SMALL LETTER YU
'\u044f' # 0xEF -> CYRILLIC SMALL LETTER YA
'\u2116' # 0xF0 -> NUMERO SIGN
'\u0451' # 0xF1 -> CYRILLIC SMALL LETTER IO
'\u0452' # 0xF2 -> CYRILLIC SMALL LETTER DJE
'\u0453' # 0xF3 -> CYRILLIC SMALL LETTER GJE
'\u0454' # 0xF4 -> CYRILLIC SMALL LETTER UKRAINIAN IE
'\u0455' # 0xF5 -> CYRILLIC SMALL LETTER DZE
'\u0456' # 0xF6 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
'\u0457' # 0xF7 -> CYRILLIC SMALL LETTER YI
'\u0458' # 0xF8 -> CYRILLIC SMALL LETTER JE
'\u0459' # 0xF9 -> CYRILLIC SMALL LETTER LJE
'\u045a' # 0xFA -> CYRILLIC SMALL LETTER NJE
'\u045b' # 0xFB -> CYRILLIC SMALL LETTER TSHE
'\u045c' # 0xFC -> CYRILLIC SMALL LETTER KJE
'\xa7' # 0xFD -> SECTION SIGN
'\u045e' # 0xFE -> CYRILLIC SMALL LETTER SHORT U
'\u045f' # 0xFF -> CYRILLIC SMALL LETTER DZHE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| bsd-3-clause | 595f11b8482fe8853230d5fdd1ad2b70 | 41.394137 | 107 | 0.51764 | 3.093823 | false | false | false | false |
brython-dev/brython | scripts/make_stdlib_static.py | 1 | 2896 | # Create file stdlib_paths.js : static mapping between module names and paths
# in the standard library
import os
import git
libfolder = os.path.join(os.path.dirname(os.getcwd()), 'www', 'src')
simple_javascript_template_string = """;(function($B){\n
$B.stdlib = {}
"""
with open(os.path.join(libfolder, 'stdlib_paths.js'), 'w') as out:
out.write(simple_javascript_template_string)
pylist = []
pkglist = ['browser']
pypath = os.path.join(libfolder, 'Lib')
for dirpath, dirnames, filenames in os.walk(pypath):
if os.path.exists(os.path.join(dirpath,'__init__.py')):
# package
filenames = []
path = dirpath[len(pypath)+len(os.sep):].split(os.sep)
pkglist.append('.'.join(path))
elif not dirnames:
filenames = []
for filename in filenames:
mod_name, ext = os.path.splitext(filename)
if ext != '.py':
continue
path = dirpath[len(pypath)+len(os.sep):].split(os.sep)+[mod_name]
if not path[0]:
path = path[1:]
mod_name = '.'.join(path).lstrip('.')
if filename == '__init__.py':
mod_name = '.'.join(path[:-1]).lstrip('.')
mod_path = 'Lib/'+'/'.join(path)
if not git.in_index(mod_path):
print(mod_path, 'not in index')
continue
if filename == '__init__.py':
pkglist.append(mod_name)
else:
pylist.append(mod_name)
pylist.sort()
out.write("var pylist = ['%s']\n" % "','".join(pylist))
pkglist.sort()
out.write(
"for(var i = 0; i < pylist.length; i++)" +
"{$B.stdlib[pylist[i]] = ['py']}\n\n")
jspath = os.path.join(libfolder, 'libs')
jslist = []
for dirpath, dirnames, filenames in os.walk(jspath):
for filename in filenames:
mod_name, ext = os.path.splitext(filename)
path = dirpath[len(pypath)+len(os.sep):].split(os.sep)+[mod_name]
if not path[0]:
path = path[1:]
mod_path = 'libs/'+'/'.join(path)
if ext != '.js':
continue
if not git.in_index(mod_path):
print(mod_path, 'not in index')
continue
mod_name = os.path.splitext(filename)[0]
jslist.append(mod_name)
jslist.sort()
out.write("var js = ['%s']\n" % "','".join(jslist))
out.write("for(var i = 0; i < js.length; i++)" +
"{$B.stdlib[js[i]] = ['js']}\n\n""")
out.write("var pkglist = ['%s']\n" % "','".join(pkglist))
out.write("for(var i = 0; i < pkglist.length; i++)" +
"{$B.stdlib[pkglist[i]] = ['py', true]}\n")
out.write('$B.stdlib_module_names = Object.keys($B.stdlib)\n')
out.write('})(__BRYTHON__)')
print('static stdlib mapping ok')
| bsd-3-clause | abdd0a26de278c45e64034d55b8b527d | 33.891566 | 77 | 0.515539 | 3.37923 | false | false | false | false |
brython-dev/brython | www/src/Lib/test/test_codecencodings_cn.py | 11 | 3950 | #
# test_codecencodings_cn.py
# Codec encoding tests for PRC encodings.
#
from test import multibytecodec_support
import unittest
class Test_GB2312(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'gb2312'
tstring = multibytecodec_support.load_teststring('gb2312')
codectests = (
# invalid bytes
(b"abc\x81\x81\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
(b"abc\x81\x81\xc1\xc4", "replace", "abc\ufffd\ufffd\u804a"),
(b"abc\x81\x81\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u804a\ufffd"),
(b"abc\x81\x81\xc1\xc4", "ignore", "abc\u804a"),
(b"\xc1\x64", "strict", None),
)
class Test_GBK(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'gbk'
tstring = multibytecodec_support.load_teststring('gbk')
codectests = (
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
(b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u804a"),
(b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u804a\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u804a"),
(b"\x83\x34\x83\x31", "strict", None),
("\u30fb", "strict", None),
)
class Test_GB18030(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'gb18030'
tstring = multibytecodec_support.load_teststring('gb18030')
codectests = (
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
(b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u804a"),
(b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u804a\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u804a"),
(b"abc\x84\x39\x84\x39\xc1\xc4", "replace", "abc\ufffd9\ufffd9\u804a"),
("\u30fb", "strict", b"\x819\xa79"),
(b"abc\x84\x32\x80\x80def", "replace", 'abc\ufffd2\ufffd\ufffddef'),
(b"abc\x81\x30\x81\x30def", "strict", 'abc\x80def'),
(b"abc\x86\x30\x81\x30def", "replace", 'abc\ufffd0\ufffd0def'),
# issue29990
(b"\xff\x30\x81\x30", "strict", None),
(b"\x81\x30\xff\x30", "strict", None),
(b"abc\x81\x39\xff\x39\xc1\xc4", "replace", "abc\ufffd\x39\ufffd\x39\u804a"),
(b"abc\xab\x36\xff\x30def", "replace", 'abc\ufffd\x36\ufffd\x30def'),
(b"abc\xbf\x38\xff\x32\xc1\xc4", "ignore", "abc\x38\x32\u804a"),
)
has_iso10646 = True
class Test_HZ(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'hz'
tstring = multibytecodec_support.load_teststring('hz')
codectests = (
# test '~\n' (3 lines)
(b'This sentence is in ASCII.\n'
b'The next sentence is in GB.~{<:Ky2;S{#,~}~\n'
b'~{NpJ)l6HK!#~}Bye.\n',
'strict',
'This sentence is in ASCII.\n'
'The next sentence is in GB.'
'\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
'Bye.\n'),
# test '~\n' (4 lines)
(b'This sentence is in ASCII.\n'
b'The next sentence is in GB.~\n'
b'~{<:Ky2;S{#,NpJ)l6HK!#~}~\n'
b'Bye.\n',
'strict',
'This sentence is in ASCII.\n'
'The next sentence is in GB.'
'\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
'Bye.\n'),
# invalid bytes
(b'ab~cd', 'replace', 'ab\uFFFDcd'),
(b'ab\xffcd', 'replace', 'ab\uFFFDcd'),
(b'ab~{\x81\x81\x41\x44~}cd', 'replace', 'ab\uFFFD\uFFFD\u804Acd'),
(b'ab~{\x41\x44~}cd', 'replace', 'ab\u804Acd'),
(b"ab~{\x79\x79\x41\x44~}cd", "replace", "ab\ufffd\ufffd\u804acd"),
# issue 30003
('ab~cd', 'strict', b'ab~~cd'), # escape ~
(b'~{Dc~~:C~}', 'strict', None), # ~~ only in ASCII mode
(b'~{Dc~\n:C~}', 'strict', None), # ~\n only in ASCII mode
)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 32f557f3411f708a934629689d86d1aa | 40.145833 | 85 | 0.567089 | 2.474937 | false | true | false | false |
brython-dev/brython | www/src/Lib/_struct.py | 1 | 15809 | #
# This module is a pure Python version of pypy.module.struct.
# It is only imported if the vastly faster pypy.module.struct is not
# compiled in. For now we keep this version for reference and
# because pypy.module.struct is not ootype-backend-friendly yet.
#
# this module 'borrowed' from
# https://bitbucket.org/pypy/pypy/src/18626459a9b2/lib_pypy/_struct.py?at=py3k-listview_str
# with many bug fixes
"""Functions to convert between Python values and C structs.
Python strings are used to hold the data representing the C struct
and also as format strings to describe the layout of data in the C struct.
The optional first format char indicates byte order, size and alignment:
@: native order, size & alignment (default)
=: native order, std. size & alignment
<: little-endian, std. size & alignment
>: big-endian, std. size & alignment
!: same as >
The remaining chars indicate types of args and must match exactly;
these can be preceded by a decimal repeat count:
x: pad byte (no data);
c:char;
b:signed byte;
B:unsigned byte;
h:short;
H:unsigned short;
i:int;
I:unsigned int;
l:long;
L:unsigned long;
f:float;
d:double.
Special cases (preceding decimal count indicates length):
s:string (array of char); p: pascal string (with count byte).
Special case (only available in native format):
P:an integer type that is wide enough to hold a pointer.
Special case (not in native mode unless 'long long' in platform C):
q:long long;
Q:unsigned long long
Whitespace between formats is ignored.
The variable struct.error is an exception raised on errors."""
import math
import re
import sys
# TODO: XXX Find a way to get information on native sizes and alignments
class StructError(Exception):
pass
error = StructError
def _normalize(fmt):
"""Check if there are illegal whitespaces (between a count and its format)
and remove other whitespaces."""
if re.search(r"\d\s+", fmt):
raise StructError("bad char in struct format")
return fmt.replace(" ", "")
def unpack_int(data, index, size, le):
bytes = [b for b in data[index:index+size]]
if le == 'little':
bytes.reverse()
number = 0
for b in bytes:
number = number << 8 | b
return int(number)
def unpack_signed_int(data, index, size, le):
number = unpack_int(data, index, size, le)
max = 2 ** (size * 8)
if number > 2 ** (size * 8 - 1) - 1:
number = int(-1 * (max - number))
return number
INFINITY = 1e200 * 1e200
NAN = INFINITY / INFINITY
BIG_ENDIAN = 0
LITTLE_ENDIAN = 1
def unpack_char(data, index, size, le):
return data[index:index + size]
def pack_int(number, size, le):
x = number
res = []
for i in range(size):
res.append(x&0xff)
x >>= 8
if le == 'big':
res.reverse()
return bytes(res)
def pack_signed_int(number, size, le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number > 2 ** (8 * size - 1) - 1 or number < -1 * 2 ** (8 * size - 1):
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number, size, le)
def pack_unsigned_int(number, size, le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number < 0:
raise TypeError("can't convert negative long to unsigned")
if number > 2 ** (8 * size) - 1:
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number, size, le)
def pack_char(char, size, le):
return bytes(char)
def isinf(x):
return x != 0.0 and x / 2 == x
def isnan(v):
return v != v * 1.0 or (v == 1.0 and v == 2.0)
def pack_float(x, size, le):
unsigned = float_pack(x, size)
result = []
for i in range(size):
result.append((unsigned >> (i * 8)) & 0xFF)
if le == "big":
result.reverse()
return bytes(result)
def unpack_float(data, index, size, le):
binary = [data[i] for i in range(index, index + size)]
if le == "big":
binary.reverse()
unsigned = 0
for i in range(size):
unsigned |= binary[i] << (i * 8)
return float_unpack(unsigned, size, le)
def round_to_nearest(x):
"""Python 3 style round: round a float x to the nearest int, but
unlike the builtin Python 2.x round function:
- return an int, not a float
- do round-half-to-even, not round-half-away-from-zero.
We assume that x is finite and nonnegative; except wrong results
if you use this for negative x.
"""
int_part = int(x)
frac_part = x - int_part
if frac_part > 0.5 or frac_part == 0.5 and int_part & 1 == 1:
int_part += 1
return int_part
def float_unpack(Q, size, order=LITTLE_ENDIAN):
"""Convert a 32-bit or 64-bit integer created
by float_pack into a Python float."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
if Q >> BITS:
raise ValueError("input out of range")
# extract pieces
sign = Q >> BITS - 1
exp = (Q & ((1 << BITS - 1) - (1 << MANT_DIG - 1))) >> MANT_DIG - 1
mant = Q & ((1 << MANT_DIG - 1) - 1)
if exp == MAX_EXP - MIN_EXP + 2:
# nan or infinity
result = float('nan') if mant else float('inf')
elif exp == 0:
# subnormal or zero
result = math.ldexp(float(mant), MIN_EXP - MANT_DIG)
else:
# normal
mant += 1 << MANT_DIG - 1
result = math.ldexp(float(mant), exp + MIN_EXP - MANT_DIG - 1)
return -result if sign else result
def float_pack(x, size, order=LITTLE_ENDIAN):
"""Convert a Python float x into a 64-bit unsigned integer
with the same byte representation."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
sign = math.copysign(1.0, x) < 0.0
if math.isinf(x):
mant = 0
exp = MAX_EXP - MIN_EXP + 2
elif math.isnan(x):
mant = 1 << (MANT_DIG-2) # other values possible
exp = MAX_EXP - MIN_EXP + 2
elif x == 0.0:
mant = 0
exp = 0
else:
m, e = math.frexp(abs(x)) # abs(x) == m * 2**e
exp = e - (MIN_EXP - 1)
if exp > 0:
# Normal case.
mant = round_to_nearest(m * (1 << MANT_DIG))
mant -= 1 << MANT_DIG - 1
else:
# Subnormal case.
if exp + MANT_DIG - 1 >= 0:
mant = round_to_nearest(m * (1 << exp + MANT_DIG - 1))
else:
mant = 0
exp = 0
# Special case: rounding produced a MANT_DIG-bit mantissa.
assert 0 <= mant <= 1 << MANT_DIG - 1
if mant == 1 << MANT_DIG - 1:
mant = 0
exp += 1
# Raise on overflow (in some circumstances, may want to return
# infinity instead).
if exp >= MAX_EXP - MIN_EXP + 2:
raise OverflowError("float too large to pack in this format")
# check constraints
assert 0 <= mant < 1 << MANT_DIG - 1
assert 0 <= exp <= MAX_EXP - MIN_EXP + 2
assert 0 <= sign <= 1
return ((sign << BITS - 1) | (exp << MANT_DIG - 1)) | mant
big_endian_format = {
'x': {'size': 1, 'alignment': 0, 'pack': None, 'unpack': None},
'b': {'size': 1, 'alignment': 0, 'pack': pack_signed_int, 'unpack': unpack_signed_int},
'B': {'size': 1, 'alignment': 0, 'pack': pack_unsigned_int, 'unpack': unpack_int},
'c': {'size': 1, 'alignment': 0, 'pack': pack_char, 'unpack': unpack_char},
's': {'size': 1, 'alignment': 0, 'pack': None, 'unpack': None},
'p': {'size': 1, 'alignment': 0, 'pack': None, 'unpack': None},
'h': {'size': 2, 'alignment': 0, 'pack': pack_signed_int, 'unpack': unpack_signed_int},
'H': {'size': 2, 'alignment': 0, 'pack': pack_unsigned_int, 'unpack': unpack_int},
'i': {'size': 4, 'alignment': 0, 'pack': pack_signed_int, 'unpack': unpack_signed_int},
'I': {'size': 4, 'alignment': 0, 'pack': pack_unsigned_int, 'unpack': unpack_int},
'l': {'size': 4, 'alignment': 0, 'pack': pack_signed_int, 'unpack': unpack_signed_int},
'L': {'size': 4, 'alignment': 0, 'pack': pack_unsigned_int, 'unpack': unpack_int},
'q': {'size': 8, 'alignment': 0, 'pack': pack_signed_int, 'unpack': unpack_signed_int},
'Q': {'size': 8, 'alignment': 0, 'pack': pack_unsigned_int, 'unpack': unpack_int},
'f': {'size': 4, 'alignment': 0, 'pack': pack_float, 'unpack': unpack_float},
'd': {'size': 8, 'alignment': 0, 'pack': pack_float, 'unpack': unpack_float},
'P': {'size': 8, 'alignment': 0, 'pack': pack_unsigned_int, 'unpack': unpack_int}
}
default = big_endian_format
formatmode = { '<' : (default, 'little'),
'>' : (default, 'big'),
'!' : (default, 'big'),
'=' : (default, sys.byteorder),
'@' : (default, sys.byteorder)
}
def _getmode(fmt):
try:
formatdef, endianness = formatmode[fmt[0]]
alignment = fmt[0] not in formatmode or fmt[0] == '@'
index = 1
except (IndexError, KeyError):
formatdef, endianness = formatmode['@']
alignment = True
index = 0
return formatdef, endianness, index, alignment
def _getnum(fmt, i):
num = None
cur = fmt[i]
while ('0'<= cur ) and ( cur <= '9'):
if num == None:
num = int(cur)
else:
num = 10 * num + int(cur)
i += 1
cur = fmt[i]
return num, i
def calcsize(fmt):
"""calcsize(fmt) -> int
Return size of C struct described by format string fmt.
See struct.__doc__ for more on format strings."""
if isinstance(fmt, bytes):
fmt = fmt.decode("ascii")
fmt = _normalize(fmt)
formatdef, endianness, i, alignment = _getmode(fmt)
num = 0
result = 0
while i < len(fmt):
num, i = _getnum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num != None :
result += num * format['size']
else:
# if formatdef is native, alignment is native, so we count a
# number of padding bytes until result is a multiple of size
if alignment and result:
result += format['size'] - result % format['size']
result += format['size']
num = 0
i += 1
return result
def pack(fmt, *args):
"""pack(fmt, v1, v2, ...) -> string
Return string containing values v1, v2, ... packed according to fmt.
See struct.__doc__ for more on format strings."""
fmt = _normalize(fmt)
formatdef, endianness, i, alignment = _getmode(fmt)
args = list(args)
n_args = len(args)
result = []
while i < len(fmt):
num, i = _getnum(fmt, i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num == None :
num_s = 0
num = 1
else:
num_s = num
if cur == 'x':
result += [b'\0' * num]
elif cur == 's':
if isinstance(args[0], bytes):
padding = num - len(args[0])
result += [args[0][:num] + b'\0' * padding]
args.pop(0)
else:
raise StructError("arg for string format not a string")
elif cur == 'p':
if isinstance(args[0], bytes):
padding = num - len(args[0]) - 1
if padding > 0:
result += [bytes([len(args[0])]) + args[0][:num-1] +
b'\0'*padding]
else:
if num < 255:
result += [bytes([num-1]) + args[0][:num - 1]]
else:
result += [bytes([255]) + args[0][:num - 1]]
args.pop(0)
else:
raise StructError("arg for string format not a string")
else:
if len(args) < num:
raise StructError("insufficient arguments to pack")
if len(result) and alignment:
# pad with 0 until position is a multiple of size
padding = format['size'] - len(result) % format['size']
result += [bytes([0])] * padding
for var in args[:num]:
result += [format['pack'](var, format['size'], endianness)]
args = args[num:]
num = None
i += 1
if len(args) != 0:
raise StructError("too many arguments for pack format")
return b''.join(result)
def unpack(fmt, data):
"""unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings."""
fmt = _normalize(fmt)
formatdef, endianness, i, alignment = _getmode(fmt)
j = 0
num = 0
result = []
length = calcsize(fmt)
if length != len (data):
raise StructError("unpack str size does not match format")
while i < len(fmt):
num, i = _getnum(fmt, i)
cur = fmt[i]
i += 1
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if not num :
num = 1
if cur == 'x':
j += num
elif cur == 's':
result.append(data[j:j + num])
j += num
elif cur == 'p':
n = data[j]
if n >= num:
n = num - 1
result.append(data[j + 1:j + n + 1])
j += num
else:
# skip padding bytes until we get at a multiple of size
if j > 0 and alignment:
padding = format['size'] - j % format['size']
j += padding
for n in range(num):
result += [format['unpack'](data, j, format['size'],
endianness)]
j += format['size']
return tuple(result)
def pack_into(fmt, buf, offset, *args):
data = pack(fmt, *args)
buf[offset:offset + len(data)] = data
def unpack_from(fmt, buf, offset=0):
size = calcsize(fmt)
data = buf[offset:offset + size]
if len(data) != size:
raise error("unpack_from requires a buffer of at least %d bytes"
% (size,))
return unpack(fmt, data)
def _clearcache():
"Clear the internal cache."
# No cache in this implementation
class Struct:
def __init__(self, fmt):
self.format = fmt
def pack(self, *args):
return pack(self.format, *args)
def pack_into(self, *args):
return pack_into(self.format, *args)
def unpack(self, *args):
return unpack(self.format, *args)
def unpack_from(self, *args):
return unpack_from(self.format, *args)
if __name__ == '__main__':
t = pack('Bf', 1, 2)
print(t, len(t))
print(unpack('Bf', t))
print(calcsize('Bf'))
| bsd-3-clause | 63293654e15a63fda3e6f021a53e91c6 | 31.197556 | 91 | 0.54722 | 3.480625 | false | false | false | false |
brython-dev/brython | www/src/Lib/_codecs_jp.py | 1 | 1174 | from encoding_cp932 import encoding_table, decoding_table # JS module in libs
### Codec APIs
class Codec:
def encode(self, input, errors='strict'):
b = []
for pos, car in enumerate(input):
cp = ord(car)
try:
code = encoding_table[cp]
high = ((code >> 8) & 0xff)
low = code & 0xff
if high:
b.append(high)
b.append(low)
except IndexError:
raise UnicodeEncodeError(pos)
return [bytes(b), len(input)]
def decode(self, input, errors='strict'):
i = 0
string = ''
while i < len(input):
dec = decoding_table[input[i]]
if dec == -1:
b = 256 * input[i] + input[i + 1]
try:
dec = decoding_table[b]
string += chr(dec)
i += 1
except IndexError:
raise UnicodeDecodeError(i)
else:
string += chr(dec)
i += 1
return [string, len(input)]
def getcodec(*args,**kw):
return Codec
| bsd-3-clause | c57e21f6d122d829413702e11608589c | 27.634146 | 77 | 0.441227 | 4.463878 | false | false | false | false |
brython-dev/brython | www/src/Lib/test/test_urllibnet.py | 4 | 9547 | import unittest
from test import support
from test.support import os_helper
from test.support import socket_helper
import contextlib
import socket
import urllib.parse
import urllib.request
import os
import email.message
import time
support.requires('network')
class URLTimeoutTest(unittest.TestCase):
# XXX this test doesn't seem to test anything useful.
def setUp(self):
socket.setdefaulttimeout(support.INTERNET_TIMEOUT)
def tearDown(self):
socket.setdefaulttimeout(None)
def testURLread(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)
domain = urllib.parse.urlparse(support.TEST_HTTP_URL).netloc
with socket_helper.transient_internet(domain):
f = urllib.request.urlopen(support.TEST_HTTP_URL)
f.read()
class urlopenNetworkTests(unittest.TestCase):
"""Tests urllib.request.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
setUp is not used for always constructing a connection to
http://www.pythontest.net/ since there a few tests that don't use that address
and making a connection is expensive enough to warrant minimizing unneeded
connections.
"""
url = 'http://www.pythontest.net/'
def setUp(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)
@contextlib.contextmanager
def urlopen(self, *args, **kwargs):
resource = args[0]
with socket_helper.transient_internet(resource):
r = urllib.request.urlopen(*args, **kwargs)
try:
yield r
finally:
r.close()
def test_basic(self):
# Simple test expected to pass.
with self.urlopen(self.url) as open_url:
for attr in ("read", "readline", "readlines", "fileno", "close",
"info", "geturl"):
self.assertTrue(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
self.assertTrue(open_url.read(), "calling 'read' failed")
def test_readlines(self):
# Test both readline and readlines.
with self.urlopen(self.url) as open_url:
self.assertIsInstance(open_url.readline(), bytes,
"readline did not return a string")
self.assertIsInstance(open_url.readlines(), list,
"readlines did not return a list")
def test_info(self):
# Test 'info'.
with self.urlopen(self.url) as open_url:
info_obj = open_url.info()
self.assertIsInstance(info_obj, email.message.Message,
"object returned by 'info' is not an "
"instance of email.message.Message")
self.assertEqual(info_obj.get_content_subtype(), "html")
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
with self.urlopen(self.url) as open_url:
gotten_url = open_url.geturl()
self.assertEqual(gotten_url, self.url)
def test_getcode(self):
# test getcode() with the fancy opener to get 404 error codes
URL = self.url + "XXXinvalidXXX"
with socket_helper.transient_internet(URL):
with self.assertWarns(DeprecationWarning):
open_url = urllib.request.FancyURLopener().open(URL)
try:
code = open_url.getcode()
finally:
open_url.close()
self.assertEqual(code, 404)
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
# Given that both VeriSign and various ISPs have in
# the past or are presently hijacking various invalid
# domain name requests in an attempt to boost traffic
# to their own sites, finding a domain name to use
# for this test is difficult. RFC2606 leads one to
# believe that '.invalid' should work, but experience
# seemed to indicate otherwise. Single character
# TLDs are likely to remain invalid, so this seems to
# be the best choice. The trailing '.' prevents a
# related problem: The normal DNS resolver appends
# the domain names from the search path if there is
# no '.' the end and, and if one of those domains
# implements a '*' rule a result is returned.
# However, none of this will prevent the test from
# failing if the ISP hijacks all invalid domain
# requests. The real solution would be to be able to
# parameterize the framework with a mock resolver.
bogus_domain = "sadflkjsasf.i.nvali.d."
try:
socket.gethostbyname(bogus_domain)
except OSError:
# socket.gaierror is too narrow, since getaddrinfo() may also
# fail with EAI_SYSTEM and ETIMEDOUT (seen on Ubuntu 13.04),
# i.e. Python's TimeoutError.
pass
else:
# This happens with some overzealous DNS providers such as OpenDNS
self.skipTest("%r should not resolve for test to work" % bogus_domain)
failure_explanation = ('opening an invalid URL did not raise OSError; '
'can be caused by a broken DNS server '
'(e.g. returns 404 or hijacks page)')
with self.assertRaises(OSError, msg=failure_explanation):
urllib.request.urlopen("http://{}/".format(bogus_domain))
class urlretrieveNetworkTests(unittest.TestCase):
"""Tests urllib.request.urlretrieve using the network."""
def setUp(self):
# remove temporary files created by urlretrieve()
self.addCleanup(urllib.request.urlcleanup)
@contextlib.contextmanager
def urlretrieve(self, *args, **kwargs):
resource = args[0]
with socket_helper.transient_internet(resource):
file_location, info = urllib.request.urlretrieve(*args, **kwargs)
try:
yield file_location, info
finally:
os_helper.unlink(file_location)
def test_basic(self):
# Test basic functionality.
with self.urlretrieve(self.logo) as (file_location, info):
self.assertTrue(os.path.exists(file_location), "file location returned by"
" urlretrieve is not a valid path")
with open(file_location, 'rb') as f:
self.assertTrue(f.read(), "reading from the file location returned"
" by urlretrieve failed")
def test_specified_path(self):
# Make sure that specifying the location of the file to write to works.
with self.urlretrieve(self.logo,
os_helper.TESTFN) as (file_location, info):
self.assertEqual(file_location, os_helper.TESTFN)
self.assertTrue(os.path.exists(file_location))
with open(file_location, 'rb') as f:
self.assertTrue(f.read(), "reading from temporary file failed")
def test_header(self):
# Make sure header returned as 2nd value from urlretrieve is good.
with self.urlretrieve(self.logo) as (file_location, info):
self.assertIsInstance(info, email.message.Message,
"info is not an instance of email.message.Message")
logo = "http://www.pythontest.net/"
def test_data_header(self):
with self.urlretrieve(self.logo) as (file_location, fileheaders):
datevalue = fileheaders.get('Date')
dateformat = '%a, %d %b %Y %H:%M:%S GMT'
try:
time.strptime(datevalue, dateformat)
except ValueError:
self.fail('Date value not in %r format' % dateformat)
def test_reporthook(self):
records = []
def recording_reporthook(blocks, block_size, total_size):
records.append((blocks, block_size, total_size))
with self.urlretrieve(self.logo, reporthook=recording_reporthook) as (
file_location, fileheaders):
expected_size = int(fileheaders['Content-Length'])
records_repr = repr(records) # For use in error messages.
self.assertGreater(len(records), 1, msg="There should always be two "
"calls; the first one before the transfer starts.")
self.assertEqual(records[0][0], 0)
self.assertGreater(records[0][1], 0,
msg="block size can't be 0 in %s" % records_repr)
self.assertEqual(records[0][2], expected_size)
self.assertEqual(records[-1][2], expected_size)
block_sizes = {block_size for _, block_size, _ in records}
self.assertEqual({records[0][1]}, block_sizes,
msg="block sizes in %s must be equal" % records_repr)
self.assertGreaterEqual(records[-1][0]*records[0][1], expected_size,
msg="number of blocks * block size must be"
" >= total size in %s" % records_repr)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 93cffd4949cf55b3821b30bbfd604ded | 40.329004 | 86 | 0.608149 | 4.367338 | false | true | false | false |
brython-dev/brython | www/src/Lib/statistics.py | 1 | 47565 | """
Basic statistics module.
This module provides functions for calculating statistics of data, including
averages, variance, and standard deviation.
Calculating averages
--------------------
================== ==================================================
Function Description
================== ==================================================
mean Arithmetic mean (average) of data.
fmean Fast, floating point arithmetic mean.
geometric_mean Geometric mean of data.
harmonic_mean Harmonic mean of data.
median Median (middle value) of data.
median_low Low median of data.
median_high High median of data.
median_grouped Median, or 50th percentile, of grouped data.
mode Mode (most common value) of data.
multimode List of modes (most common values of data).
quantiles Divide data into intervals with equal probability.
================== ==================================================
Calculate the arithmetic mean ("the average") of data:
>>> mean([-1.0, 2.5, 3.25, 5.75])
2.625
Calculate the standard median of discrete data:
>>> median([2, 3, 4, 5])
3.5
Calculate the median, or 50th percentile, of data grouped into class intervals
centred on the data values provided. E.g. if your data points are rounded to
the nearest whole number:
>>> median_grouped([2, 2, 3, 3, 3, 4]) #doctest: +ELLIPSIS
2.8333333333...
This should be interpreted in this way: you have two data points in the class
interval 1.5-2.5, three data points in the class interval 2.5-3.5, and one in
the class interval 3.5-4.5. The median of these data points is 2.8333...
Calculating variability or spread
---------------------------------
================== =============================================
Function Description
================== =============================================
pvariance Population variance of data.
variance Sample variance of data.
pstdev Population standard deviation of data.
stdev Sample standard deviation of data.
================== =============================================
Calculate the standard deviation of sample data:
>>> stdev([2.5, 3.25, 5.5, 11.25, 11.75]) #doctest: +ELLIPSIS
4.38961843444...
If you have previously calculated the mean, you can pass it as the optional
second argument to the four "spread" functions to avoid recalculating it:
>>> data = [1, 2, 2, 4, 4, 4, 5, 6]
>>> mu = mean(data)
>>> pvariance(data, mu)
2.5
Statistics for relations between two inputs
-------------------------------------------
================== ====================================================
Function Description
================== ====================================================
covariance Sample covariance for two variables.
correlation Pearson's correlation coefficient for two variables.
linear_regression Intercept and slope for simple linear regression.
================== ====================================================
Calculate covariance, Pearson's correlation, and simple linear regression
for two inputs:
>>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3]
>>> covariance(x, y)
0.75
>>> correlation(x, y) #doctest: +ELLIPSIS
0.31622776601...
>>> linear_regression(x, y) #doctest:
LinearRegression(slope=0.1, intercept=1.5)
Exceptions
----------
A single exception is defined: StatisticsError is a subclass of ValueError.
"""
__all__ = [
'NormalDist',
'StatisticsError',
'correlation',
'covariance',
'fmean',
'geometric_mean',
'harmonic_mean',
'linear_regression',
'mean',
'median',
'median_grouped',
'median_high',
'median_low',
'mode',
'multimode',
'pstdev',
'pvariance',
'quantiles',
'stdev',
'variance',
]
import math
import numbers
import random
import sys
from fractions import Fraction
from decimal import Decimal
from itertools import groupby, repeat
from bisect import bisect_left, bisect_right
from math import hypot, sqrt, fabs, exp, erf, tau, log, fsum
from functools import reduce
from operator import mul
from collections import Counter, namedtuple, defaultdict
_SQRT2 = sqrt(2.0)
# === Exceptions ===
class StatisticsError(ValueError):
pass
# === Private utilities ===
def _sum(data):
"""_sum(data) -> (type, sum, count)
Return a high-precision sum of the given numeric data as a fraction,
together with the type to be converted to and the count of items.
Examples
--------
>>> _sum([3, 2.25, 4.5, -0.5, 0.25])
(<class 'float'>, Fraction(19, 2), 5)
Some sources of round-off error will be avoided:
# Built-in sum returns zero.
>>> _sum([1e50, 1, -1e50] * 1000)
(<class 'float'>, Fraction(1000, 1), 3000)
Fractions and Decimals are also supported:
>>> from fractions import Fraction as F
>>> _sum([F(2, 3), F(7, 5), F(1, 4), F(5, 6)])
(<class 'fractions.Fraction'>, Fraction(63, 20), 4)
>>> from decimal import Decimal as D
>>> data = [D("0.1375"), D("0.2108"), D("0.3061"), D("0.0419")]
>>> _sum(data)
(<class 'decimal.Decimal'>, Fraction(6963, 10000), 4)
Mixed types are currently treated as an error, except that int is
allowed.
"""
count = 0
types = set()
types_add = types.add
partials = {}
partials_get = partials.get
for typ, values in groupby(data, type):
types_add(typ)
for n, d in map(_exact_ratio, values):
count += 1
partials[d] = partials_get(d, 0) + n
if None in partials:
# The sum will be a NAN or INF. We can ignore all the finite
# partials, and just look at this special one.
total = partials[None]
assert not _isfinite(total)
else:
# Sum all the partial sums using builtin sum.
total = sum(Fraction(n, d) for d, n in partials.items())
T = reduce(_coerce, types, int) # or raise TypeError
return (T, total, count)
def _ss(data, c=None):
"""Return the exact mean and sum of square deviations of sequence data.
Calculations are done in a single pass, allowing the input to be an iterator.
If given *c* is used the mean; otherwise, it is calculated from the data.
Use the *c* argument with care, as it can lead to garbage results.
"""
if c is not None:
T, ssd, count = _sum((d := x - c) * d for x in data)
return (T, ssd, c, count)
count = 0
types = set()
types_add = types.add
sx_partials = defaultdict(int)
sxx_partials = defaultdict(int)
for typ, values in groupby(data, type):
types_add(typ)
for n, d in map(_exact_ratio, values):
count += 1
sx_partials[d] += n
sxx_partials[d] += n * n
if not count:
ssd = c = Fraction(0)
elif None in sx_partials:
# The sum will be a NAN or INF. We can ignore all the finite
# partials, and just look at this special one.
ssd = c = sx_partials[None]
assert not _isfinite(ssd)
else:
sx = sum(Fraction(n, d) for d, n in sx_partials.items())
sxx = sum(Fraction(n, d*d) for d, n in sxx_partials.items())
# This formula has poor numeric properties for floats,
# but with fractions it is exact.
ssd = (count * sxx - sx * sx) / count
c = sx / count
T = reduce(_coerce, types, int) # or raise TypeError
return (T, ssd, c, count)
def _isfinite(x):
try:
return x.is_finite() # Likely a Decimal.
except AttributeError:
return math.isfinite(x) # Coerces to float first.
def _coerce(T, S):
"""Coerce types T and S to a common type, or raise TypeError.
Coercion rules are currently an implementation detail. See the CoerceTest
test class in test_statistics for details.
"""
# See http://bugs.python.org/issue24068.
assert T is not bool, "initial type T is bool"
# If the types are the same, no need to coerce anything. Put this
# first, so that the usual case (no coercion needed) happens as soon
# as possible.
if T is S: return T
# Mixed int & other coerce to the other type.
if S is int or S is bool: return T
if T is int: return S
# If one is a (strict) subclass of the other, coerce to the subclass.
if issubclass(S, T): return S
if issubclass(T, S): return T
# Ints coerce to the other type.
if issubclass(T, int): return S
if issubclass(S, int): return T
# Mixed fraction & float coerces to float (or float subclass).
if issubclass(T, Fraction) and issubclass(S, float):
return S
if issubclass(T, float) and issubclass(S, Fraction):
return T
# Any other combination is disallowed.
msg = "don't know how to coerce %s and %s"
raise TypeError(msg % (T.__name__, S.__name__))
def _exact_ratio(x):
"""Return Real number x to exact (numerator, denominator) pair.
>>> _exact_ratio(0.25)
(1, 4)
x is expected to be an int, Fraction, Decimal or float.
"""
# XXX We should revisit whether using fractions to accumulate exact
# ratios is the right way to go.
# The integer ratios for binary floats can have numerators or
# denominators with over 300 decimal digits. The problem is more
# acute with decimal floats where the default decimal context
# supports a huge range of exponents from Emin=-999999 to
# Emax=999999. When expanded with as_integer_ratio(), numbers like
# Decimal('3.14E+5000') and Decimal('3.14E-5000') have large
# numerators or denominators that will slow computation.
# When the integer ratios are accumulated as fractions, the size
# grows to cover the full range from the smallest magnitude to the
# largest. For example, Fraction(3.14E+300) + Fraction(3.14E-300),
# has a 616 digit numerator. Likewise,
# Fraction(Decimal('3.14E+5000')) + Fraction(Decimal('3.14E-5000'))
# has 10,003 digit numerator.
# This doesn't seem to have been problem in practice, but it is a
# potential pitfall.
try:
return x.as_integer_ratio()
except AttributeError:
pass
except (OverflowError, ValueError):
# float NAN or INF.
assert not _isfinite(x)
return (x, None)
try:
# x may be an Integral ABC.
return (x.numerator, x.denominator)
except AttributeError:
msg = f"can't convert type '{type(x).__name__}' to numerator/denominator"
raise TypeError(msg)
def _convert(value, T):
"""Convert value to given numeric type T."""
if type(value) is T:
# This covers the cases where T is Fraction, or where value is
# a NAN or INF (Decimal or float).
return value
if issubclass(T, int) and value.denominator != 1:
T = float
try:
# FIXME: what do we do if this overflows?
return T(value)
except TypeError:
if issubclass(T, Decimal):
return T(value.numerator) / T(value.denominator)
else:
raise
def _fail_neg(values, errmsg='negative value'):
"""Iterate over values, failing if any are less than zero."""
for x in values:
if x < 0:
raise StatisticsError(errmsg)
yield x
def _integer_sqrt_of_frac_rto(n: int, m: int) -> int:
"""Square root of n/m, rounded to the nearest integer using round-to-odd."""
# Reference: https://www.lri.fr/~melquion/doc/05-imacs17_1-expose.pdf
a = math.isqrt(n // m)
return a | (a*a*m != n)
# For 53 bit precision floats, the bit width used in
# _float_sqrt_of_frac() is 109.
_sqrt_bit_width: int = 2 * sys.float_info.mant_dig + 3
def _float_sqrt_of_frac(n: int, m: int) -> float:
"""Square root of n/m as a float, correctly rounded."""
# See principle and proof sketch at: https://bugs.python.org/msg407078
q = (n.bit_length() - m.bit_length() - _sqrt_bit_width) // 2
if q >= 0:
numerator = _integer_sqrt_of_frac_rto(n, m << 2 * q) << q
denominator = 1
else:
numerator = _integer_sqrt_of_frac_rto(n << -2 * q, m)
denominator = 1 << -q
return numerator / denominator # Convert to float
def _decimal_sqrt_of_frac(n: int, m: int) -> Decimal:
"""Square root of n/m as a Decimal, correctly rounded."""
# Premise: For decimal, computing (n/m).sqrt() can be off
# by 1 ulp from the correctly rounded result.
# Method: Check the result, moving up or down a step if needed.
if n <= 0:
if not n:
return Decimal('0.0')
n, m = -n, -m
root = (Decimal(n) / Decimal(m)).sqrt()
nr, dr = root.as_integer_ratio()
plus = root.next_plus()
np, dp = plus.as_integer_ratio()
# test: n / m > ((root + plus) / 2) ** 2
if 4 * n * (dr*dp)**2 > m * (dr*np + dp*nr)**2:
return plus
minus = root.next_minus()
nm, dm = minus.as_integer_ratio()
# test: n / m < ((root + minus) / 2) ** 2
if 4 * n * (dr*dm)**2 < m * (dr*nm + dm*nr)**2:
return minus
return root
# === Measures of central tendency (averages) ===
def mean(data):
"""Return the sample arithmetic mean of data.
>>> mean([1, 2, 3, 4, 4])
2.8
>>> from fractions import Fraction as F
>>> mean([F(3, 7), F(1, 21), F(5, 3), F(1, 3)])
Fraction(13, 21)
>>> from decimal import Decimal as D
>>> mean([D("0.5"), D("0.75"), D("0.625"), D("0.375")])
Decimal('0.5625')
If ``data`` is empty, StatisticsError will be raised.
"""
T, total, n = _sum(data)
if n < 1:
raise StatisticsError('mean requires at least one data point')
return _convert(total / n, T)
def fmean(data, weights=None):
"""Convert data to floats and compute the arithmetic mean.
This runs faster than the mean() function and it always returns a float.
If the input dataset is empty, it raises a StatisticsError.
>>> fmean([3.5, 4.0, 5.25])
4.25
"""
try:
n = len(data)
except TypeError:
# Handle iterators that do not define __len__().
n = 0
def count(iterable):
nonlocal n
for n, x in enumerate(iterable, start=1):
yield x
data = count(data)
if weights is None:
total = fsum(data)
if not n:
raise StatisticsError('fmean requires at least one data point')
return total / n
try:
num_weights = len(weights)
except TypeError:
weights = list(weights)
num_weights = len(weights)
num = fsum(map(mul, data, weights))
if n != num_weights:
raise StatisticsError('data and weights must be the same length')
den = fsum(weights)
if not den:
raise StatisticsError('sum of weights must be non-zero')
return num / den
def geometric_mean(data):
"""Convert data to floats and compute the geometric mean.
Raises a StatisticsError if the input dataset is empty,
if it contains a zero, or if it contains a negative value.
No special efforts are made to achieve exact results.
(However, this may change in the future.)
>>> round(geometric_mean([54, 24, 36]), 9)
36.0
"""
try:
return exp(fmean(map(log, data)))
except ValueError:
raise StatisticsError('geometric mean requires a non-empty dataset '
'containing positive numbers') from None
def harmonic_mean(data, weights=None):
"""Return the harmonic mean of data.
The harmonic mean is the reciprocal of the arithmetic mean of the
reciprocals of the data. It can be used for averaging ratios or
rates, for example speeds.
Suppose a car travels 40 km/hr for 5 km and then speeds-up to
60 km/hr for another 5 km. What is the average speed?
>>> harmonic_mean([40, 60])
48.0
Suppose a car travels 40 km/hr for 5 km, and when traffic clears,
speeds-up to 60 km/hr for the remaining 30 km of the journey. What
is the average speed?
>>> harmonic_mean([40, 60], weights=[5, 30])
56.0
If ``data`` is empty, or any element is less than zero,
``harmonic_mean`` will raise ``StatisticsError``.
"""
if iter(data) is data:
data = list(data)
errmsg = 'harmonic mean does not support negative values'
n = len(data)
if n < 1:
raise StatisticsError('harmonic_mean requires at least one data point')
elif n == 1 and weights is None:
x = data[0]
if isinstance(x, (numbers.Real, Decimal)):
if x < 0:
raise StatisticsError(errmsg)
return x
else:
raise TypeError('unsupported type')
if weights is None:
weights = repeat(1, n)
sum_weights = n
else:
if iter(weights) is weights:
weights = list(weights)
if len(weights) != n:
raise StatisticsError('Number of weights does not match data size')
_, sum_weights, _ = _sum(w for w in _fail_neg(weights, errmsg))
try:
data = _fail_neg(data, errmsg)
T, total, count = _sum(w / x if w else 0 for w, x in zip(weights, data))
except ZeroDivisionError:
return 0
if total <= 0:
raise StatisticsError('Weighted sum must be positive')
return _convert(sum_weights / total, T)
# FIXME: investigate ways to calculate medians without sorting? Quickselect?
def median(data):
"""Return the median (middle value) of numeric data.
When the number of data points is odd, return the middle data point.
When the number of data points is even, the median is interpolated by
taking the average of the two middle values:
>>> median([1, 3, 5])
3
>>> median([1, 3, 5, 7])
4.0
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
i = n // 2
return (data[i - 1] + data[i]) / 2
def median_low(data):
"""Return the low median of numeric data.
When the number of data points is odd, the middle value is returned.
When it is even, the smaller of the two middle values is returned.
>>> median_low([1, 3, 5])
3
>>> median_low([1, 3, 5, 7])
3
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
return data[n // 2 - 1]
def median_high(data):
"""Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
>>> median_high([1, 3, 5])
3
>>> median_high([1, 3, 5, 7])
5
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
return data[n // 2]
def median_grouped(data, interval=1.0):
"""Estimates the median for numeric data binned around the midpoints
of consecutive, fixed-width intervals.
The *data* can be any iterable of numeric data with each value being
exactly the midpoint of a bin. At least one value must be present.
The *interval* is width of each bin.
For example, demographic information may have been summarized into
consecutive ten-year age groups with each group being represented
by the 5-year midpoints of the intervals:
>>> demographics = Counter({
... 25: 172, # 20 to 30 years old
... 35: 484, # 30 to 40 years old
... 45: 387, # 40 to 50 years old
... 55: 22, # 50 to 60 years old
... 65: 6, # 60 to 70 years old
... })
The 50th percentile (median) is the 536th person out of the 1071
member cohort. That person is in the 30 to 40 year old age group.
The regular median() function would assume that everyone in the
tricenarian age group was exactly 35 years old. A more tenable
assumption is that the 484 members of that age group are evenly
distributed between 30 and 40. For that, we use median_grouped().
>>> data = list(demographics.elements())
>>> median(data)
35
>>> round(median_grouped(data, interval=10), 1)
37.5
The caller is responsible for making sure the data points are separated
by exact multiples of *interval*. This is essential for getting a
correct result. The function does not check this precondition.
Inputs may be any numeric type that can be coerced to a float during
the interpolation step.
"""
data = sorted(data)
n = len(data)
if not n:
raise StatisticsError("no median for empty data")
# Find the value at the midpoint. Remember this corresponds to the
# midpoint of the class interval.
x = data[n // 2]
# Using O(log n) bisection, find where all the x values occur in the data.
# All x will lie within data[i:j].
i = bisect_left(data, x)
j = bisect_right(data, x, lo=i)
# Coerce to floats, raising a TypeError if not possible
try:
interval = float(interval)
x = float(x)
except ValueError:
raise TypeError(f'Value cannot be converted to a float')
# Interpolate the median using the formula found at:
# https://www.cuemath.com/data/median-of-grouped-data/
L = x - interval / 2.0 # Lower limit of the median interval
cf = i # Cumulative frequency of the preceding interval
f = j - i # Number of elements in the median internal
return L + interval * (n / 2 - cf) / f
def mode(data):
"""Return the most common data point from discrete or nominal data.
``mode`` assumes discrete data, and returns a single value. This is the
standard treatment of the mode as commonly taught in schools:
>>> mode([1, 1, 2, 3, 3, 3, 3, 4])
3
This also works with nominal (non-numeric) data:
>>> mode(["red", "blue", "blue", "red", "green", "red", "red"])
'red'
If there are multiple modes with same frequency, return the first one
encountered:
>>> mode(['red', 'red', 'green', 'blue', 'blue'])
'red'
If *data* is empty, ``mode``, raises StatisticsError.
"""
pairs = Counter(iter(data)).most_common(1)
try:
return pairs[0][0]
except IndexError:
raise StatisticsError('no mode for empty data') from None
def multimode(data):
"""Return a list of the most frequently occurring values.
Will return more than one result if there are multiple modes
or an empty list if *data* is empty.
>>> multimode('aabbbbbbbbcc')
['b']
>>> multimode('aabbbbccddddeeffffgg')
['b', 'd', 'f']
>>> multimode('')
[]
"""
counts = Counter(iter(data))
if not counts:
return []
maxcount = max(counts.values())
return [value for value, count in counts.items() if count == maxcount]
# Notes on methods for computing quantiles
# ----------------------------------------
#
# There is no one perfect way to compute quantiles. Here we offer
# two methods that serve common needs. Most other packages
# surveyed offered at least one or both of these two, making them
# "standard" in the sense of "widely-adopted and reproducible".
# They are also easy to explain, easy to compute manually, and have
# straight-forward interpretations that aren't surprising.
# The default method is known as "R6", "PERCENTILE.EXC", or "expected
# value of rank order statistics". The alternative method is known as
# "R7", "PERCENTILE.INC", or "mode of rank order statistics".
# For sample data where there is a positive probability for values
# beyond the range of the data, the R6 exclusive method is a
# reasonable choice. Consider a random sample of nine values from a
# population with a uniform distribution from 0.0 to 1.0. The
# distribution of the third ranked sample point is described by
# betavariate(alpha=3, beta=7) which has mode=0.250, median=0.286, and
# mean=0.300. Only the latter (which corresponds with R6) gives the
# desired cut point with 30% of the population falling below that
# value, making it comparable to a result from an inv_cdf() function.
# The R6 exclusive method is also idempotent.
# For describing population data where the end points are known to
# be included in the data, the R7 inclusive method is a reasonable
# choice. Instead of the mean, it uses the mode of the beta
# distribution for the interior points. Per Hyndman & Fan, "One nice
# property is that the vertices of Q7(p) divide the range into n - 1
# intervals, and exactly 100p% of the intervals lie to the left of
# Q7(p) and 100(1 - p)% of the intervals lie to the right of Q7(p)."
# If needed, other methods could be added. However, for now, the
# position is that fewer options make for easier choices and that
# external packages can be used for anything more advanced.
def quantiles(data, *, n=4, method='exclusive'):
"""Divide *data* into *n* continuous intervals with equal probability.
Returns a list of (n - 1) cut points separating the intervals.
Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
Set *n* to 100 for percentiles which gives the 99 cuts points that
separate *data* in to 100 equal sized groups.
The *data* can be any iterable containing sample.
The cut points are linearly interpolated between data points.
If *method* is set to *inclusive*, *data* is treated as population
data. The minimum value is treated as the 0th percentile and the
maximum value is treated as the 100th percentile.
"""
if n < 1:
raise StatisticsError('n must be at least 1')
data = sorted(data)
ld = len(data)
if ld < 2:
raise StatisticsError('must have at least two data points')
if method == 'inclusive':
m = ld - 1
result = []
for i in range(1, n):
j, delta = divmod(i * m, n)
interpolated = (data[j] * (n - delta) + data[j + 1] * delta) / n
result.append(interpolated)
return result
if method == 'exclusive':
m = ld + 1
result = []
for i in range(1, n):
j = i * m // n # rescale i to m/n
j = 1 if j < 1 else ld-1 if j > ld-1 else j # clamp to 1 .. ld-1
delta = i*m - j*n # exact integer math
interpolated = (data[j - 1] * (n - delta) + data[j] * delta) / n
result.append(interpolated)
return result
raise ValueError(f'Unknown method: {method!r}')
# === Measures of spread ===
# See http://mathworld.wolfram.com/Variance.html
# http://mathworld.wolfram.com/SampleVariance.html
def variance(data, xbar=None):
"""Return the sample variance of data.
data should be an iterable of Real-valued numbers, with at least two
values. The optional argument xbar, if given, should be the mean of
the data. If it is missing or None, the mean is automatically calculated.
Use this function when your data is a sample from a population. To
calculate the variance from the entire population, see ``pvariance``.
Examples:
>>> data = [2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5]
>>> variance(data)
1.3720238095238095
If you have already calculated the mean of your data, you can pass it as
the optional second argument ``xbar`` to avoid recalculating it:
>>> m = mean(data)
>>> variance(data, m)
1.3720238095238095
This function does not check that ``xbar`` is actually the mean of
``data``. Giving arbitrary values for ``xbar`` may lead to invalid or
impossible results.
Decimals and Fractions are supported:
>>> from decimal import Decimal as D
>>> variance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")])
Decimal('31.01875')
>>> from fractions import Fraction as F
>>> variance([F(1, 6), F(1, 2), F(5, 3)])
Fraction(67, 108)
"""
T, ss, c, n = _ss(data, xbar)
if n < 2:
raise StatisticsError('variance requires at least two data points')
return _convert(ss / (n - 1), T)
def pvariance(data, mu=None):
"""Return the population variance of ``data``.
data should be a sequence or iterable of Real-valued numbers, with at least one
value. The optional argument mu, if given, should be the mean of
the data. If it is missing or None, the mean is automatically calculated.
Use this function to calculate the variance from the entire population.
To estimate the variance from a sample, the ``variance`` function is
usually a better choice.
Examples:
>>> data = [0.0, 0.25, 0.25, 1.25, 1.5, 1.75, 2.75, 3.25]
>>> pvariance(data)
1.25
If you have already calculated the mean of the data, you can pass it as
the optional second argument to avoid recalculating it:
>>> mu = mean(data)
>>> pvariance(data, mu)
1.25
Decimals and Fractions are supported:
>>> from decimal import Decimal as D
>>> pvariance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")])
Decimal('24.815')
>>> from fractions import Fraction as F
>>> pvariance([F(1, 4), F(5, 4), F(1, 2)])
Fraction(13, 72)
"""
T, ss, c, n = _ss(data, mu)
if n < 1:
raise StatisticsError('pvariance requires at least one data point')
return _convert(ss / n, T)
def stdev(data, xbar=None):
"""Return the square root of the sample variance.
See ``variance`` for arguments and other details.
>>> stdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75])
1.0810874155219827
"""
T, ss, c, n = _ss(data, xbar)
if n < 2:
raise StatisticsError('stdev requires at least two data points')
mss = ss / (n - 1)
if issubclass(T, Decimal):
return _decimal_sqrt_of_frac(mss.numerator, mss.denominator)
return _float_sqrt_of_frac(mss.numerator, mss.denominator)
def pstdev(data, mu=None):
"""Return the square root of the population variance.
See ``pvariance`` for arguments and other details.
>>> pstdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75])
0.986893273527251
"""
T, ss, c, n = _ss(data, mu)
if n < 1:
raise StatisticsError('pstdev requires at least one data point')
mss = ss / n
if issubclass(T, Decimal):
return _decimal_sqrt_of_frac(mss.numerator, mss.denominator)
return _float_sqrt_of_frac(mss.numerator, mss.denominator)
def _mean_stdev(data):
"""In one pass, compute the mean and sample standard deviation as floats."""
T, ss, xbar, n = _ss(data)
if n < 2:
raise StatisticsError('stdev requires at least two data points')
mss = ss / (n - 1)
try:
return float(xbar), _float_sqrt_of_frac(mss.numerator, mss.denominator)
except AttributeError:
# Handle Nans and Infs gracefully
return float(xbar), float(xbar) / float(ss)
# === Statistics for relations between two inputs ===
# See https://en.wikipedia.org/wiki/Covariance
# https://en.wikipedia.org/wiki/Pearson_correlation_coefficient
# https://en.wikipedia.org/wiki/Simple_linear_regression
def covariance(x, y, /):
"""Covariance
Return the sample covariance of two inputs *x* and *y*. Covariance
is a measure of the joint variability of two inputs.
>>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3]
>>> covariance(x, y)
0.75
>>> z = [9, 8, 7, 6, 5, 4, 3, 2, 1]
>>> covariance(x, z)
-7.5
>>> covariance(z, x)
-7.5
"""
n = len(x)
if len(y) != n:
raise StatisticsError('covariance requires that both inputs have same number of data points')
if n < 2:
raise StatisticsError('covariance requires at least two data points')
xbar = fsum(x) / n
ybar = fsum(y) / n
sxy = fsum((xi - xbar) * (yi - ybar) for xi, yi in zip(x, y))
return sxy / (n - 1)
def correlation(x, y, /):
"""Pearson's correlation coefficient
Return the Pearson's correlation coefficient for two inputs. Pearson's
correlation coefficient *r* takes values between -1 and +1. It measures the
strength and direction of the linear relationship, where +1 means very
strong, positive linear relationship, -1 very strong, negative linear
relationship, and 0 no linear relationship.
>>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1]
>>> correlation(x, x)
1.0
>>> correlation(x, y)
-1.0
"""
n = len(x)
if len(y) != n:
raise StatisticsError('correlation requires that both inputs have same number of data points')
if n < 2:
raise StatisticsError('correlation requires at least two data points')
xbar = fsum(x) / n
ybar = fsum(y) / n
sxy = fsum((xi - xbar) * (yi - ybar) for xi, yi in zip(x, y))
sxx = fsum((d := xi - xbar) * d for xi in x)
syy = fsum((d := yi - ybar) * d for yi in y)
try:
return sxy / sqrt(sxx * syy)
except ZeroDivisionError:
raise StatisticsError('at least one of the inputs is constant')
LinearRegression = namedtuple('LinearRegression', ('slope', 'intercept'))
def linear_regression(x, y, /, *, proportional=False):
"""Slope and intercept for simple linear regression.
Return the slope and intercept of simple linear regression
parameters estimated using ordinary least squares. Simple linear
regression describes relationship between an independent variable
*x* and a dependent variable *y* in terms of a linear function:
y = slope * x + intercept + noise
where *slope* and *intercept* are the regression parameters that are
estimated, and noise represents the variability of the data that was
not explained by the linear regression (it is equal to the
difference between predicted and actual values of the dependent
variable).
The parameters are returned as a named tuple.
>>> x = [1, 2, 3, 4, 5]
>>> noise = NormalDist().samples(5, seed=42)
>>> y = [3 * x[i] + 2 + noise[i] for i in range(5)]
>>> linear_regression(x, y) #doctest: +ELLIPSIS
LinearRegression(slope=3.09078914170..., intercept=1.75684970486...)
If *proportional* is true, the independent variable *x* and the
dependent variable *y* are assumed to be directly proportional.
The data is fit to a line passing through the origin.
Since the *intercept* will always be 0.0, the underlying linear
function simplifies to:
y = slope * x + noise
>>> y = [3 * x[i] + noise[i] for i in range(5)]
>>> linear_regression(x, y, proportional=True) #doctest: +ELLIPSIS
LinearRegression(slope=3.02447542484..., intercept=0.0)
"""
n = len(x)
if len(y) != n:
raise StatisticsError('linear regression requires that both inputs have same number of data points')
if n < 2:
raise StatisticsError('linear regression requires at least two data points')
if proportional:
sxy = fsum(xi * yi for xi, yi in zip(x, y))
sxx = fsum(xi * xi for xi in x)
else:
xbar = fsum(x) / n
ybar = fsum(y) / n
sxy = fsum((xi - xbar) * (yi - ybar) for xi, yi in zip(x, y))
sxx = fsum((d := xi - xbar) * d for xi in x)
try:
slope = sxy / sxx # equivalent to: covariance(x, y) / variance(x)
except ZeroDivisionError:
raise StatisticsError('x is constant')
intercept = 0.0 if proportional else ybar - slope * xbar
return LinearRegression(slope=slope, intercept=intercept)
## Normal Distribution #####################################################
def _normal_dist_inv_cdf(p, mu, sigma):
# There is no closed-form solution to the inverse CDF for the normal
# distribution, so we use a rational approximation instead:
# Wichura, M.J. (1988). "Algorithm AS241: The Percentage Points of the
# Normal Distribution". Applied Statistics. Blackwell Publishing. 37
# (3): 477–484. doi:10.2307/2347330. JSTOR 2347330.
q = p - 0.5
if fabs(q) <= 0.425:
r = 0.180625 - q * q
# Hash sum: 55.88319_28806_14901_4439
num = (((((((2.50908_09287_30122_6727e+3 * r +
3.34305_75583_58812_8105e+4) * r +
6.72657_70927_00870_0853e+4) * r +
4.59219_53931_54987_1457e+4) * r +
1.37316_93765_50946_1125e+4) * r +
1.97159_09503_06551_4427e+3) * r +
1.33141_66789_17843_7745e+2) * r +
3.38713_28727_96366_6080e+0) * q
den = (((((((5.22649_52788_52854_5610e+3 * r +
2.87290_85735_72194_2674e+4) * r +
3.93078_95800_09271_0610e+4) * r +
2.12137_94301_58659_5867e+4) * r +
5.39419_60214_24751_1077e+3) * r +
6.87187_00749_20579_0830e+2) * r +
4.23133_30701_60091_1252e+1) * r +
1.0)
x = num / den
return mu + (x * sigma)
r = p if q <= 0.0 else 1.0 - p
r = sqrt(-log(r))
if r <= 5.0:
r = r - 1.6
# Hash sum: 49.33206_50330_16102_89036
num = (((((((7.74545_01427_83414_07640e-4 * r +
2.27238_44989_26918_45833e-2) * r +
2.41780_72517_74506_11770e-1) * r +
1.27045_82524_52368_38258e+0) * r +
3.64784_83247_63204_60504e+0) * r +
5.76949_72214_60691_40550e+0) * r +
4.63033_78461_56545_29590e+0) * r +
1.42343_71107_49683_57734e+0)
den = (((((((1.05075_00716_44416_84324e-9 * r +
5.47593_80849_95344_94600e-4) * r +
1.51986_66563_61645_71966e-2) * r +
1.48103_97642_74800_74590e-1) * r +
6.89767_33498_51000_04550e-1) * r +
1.67638_48301_83803_84940e+0) * r +
2.05319_16266_37758_82187e+0) * r +
1.0)
else:
r = r - 5.0
# Hash sum: 47.52583_31754_92896_71629
num = (((((((2.01033_43992_92288_13265e-7 * r +
2.71155_55687_43487_57815e-5) * r +
1.24266_09473_88078_43860e-3) * r +
2.65321_89526_57612_30930e-2) * r +
2.96560_57182_85048_91230e-1) * r +
1.78482_65399_17291_33580e+0) * r +
5.46378_49111_64114_36990e+0) * r +
6.65790_46435_01103_77720e+0)
den = (((((((2.04426_31033_89939_78564e-15 * r +
1.42151_17583_16445_88870e-7) * r +
1.84631_83175_10054_68180e-5) * r +
7.86869_13114_56132_59100e-4) * r +
1.48753_61290_85061_48525e-2) * r +
1.36929_88092_27358_05310e-1) * r +
5.99832_20655_58879_37690e-1) * r +
1.0)
x = num / den
if q < 0.0:
x = -x
return mu + (x * sigma)
# If available, use C implementation
try:
from _statistics import _normal_dist_inv_cdf
except ImportError:
pass
class NormalDist:
"Normal distribution of a random variable"
# https://en.wikipedia.org/wiki/Normal_distribution
# https://en.wikipedia.org/wiki/Variance#Properties
__slots__ = {
'_mu': 'Arithmetic mean of a normal distribution',
'_sigma': 'Standard deviation of a normal distribution',
}
def __init__(self, mu=0.0, sigma=1.0):
"NormalDist where mu is the mean and sigma is the standard deviation."
if sigma < 0.0:
raise StatisticsError('sigma must be non-negative')
self._mu = float(mu)
self._sigma = float(sigma)
@classmethod
def from_samples(cls, data):
"Make a normal distribution instance from sample data."
return cls(*_mean_stdev(data))
def samples(self, n, *, seed=None):
"Generate *n* samples for a given mean and standard deviation."
gauss = random.gauss if seed is None else random.Random(seed).gauss
mu, sigma = self._mu, self._sigma
return [gauss(mu, sigma) for i in range(n)]
def pdf(self, x):
"Probability density function. P(x <= X < x+dx) / dx"
variance = self._sigma * self._sigma
if not variance:
raise StatisticsError('pdf() not defined when sigma is zero')
diff = x - self._mu
return exp(diff * diff / (-2.0 * variance)) / sqrt(tau * variance)
def cdf(self, x):
"Cumulative distribution function. P(X <= x)"
if not self._sigma:
raise StatisticsError('cdf() not defined when sigma is zero')
return 0.5 * (1.0 + erf((x - self._mu) / (self._sigma * _SQRT2)))
def inv_cdf(self, p):
"""Inverse cumulative distribution function. x : P(X <= x) = p
Finds the value of the random variable such that the probability of
the variable being less than or equal to that value equals the given
probability.
This function is also called the percent point function or quantile
function.
"""
if p <= 0.0 or p >= 1.0:
raise StatisticsError('p must be in the range 0.0 < p < 1.0')
if self._sigma <= 0.0:
raise StatisticsError('cdf() not defined when sigma at or below zero')
return _normal_dist_inv_cdf(p, self._mu, self._sigma)
def quantiles(self, n=4):
"""Divide into *n* continuous intervals with equal probability.
Returns a list of (n - 1) cut points separating the intervals.
Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
Set *n* to 100 for percentiles which gives the 99 cuts points that
separate the normal distribution in to 100 equal sized groups.
"""
return [self.inv_cdf(i / n) for i in range(1, n)]
def overlap(self, other):
"""Compute the overlapping coefficient (OVL) between two normal distributions.
Measures the agreement between two normal probability distributions.
Returns a value between 0.0 and 1.0 giving the overlapping area in
the two underlying probability density functions.
>>> N1 = NormalDist(2.4, 1.6)
>>> N2 = NormalDist(3.2, 2.0)
>>> N1.overlap(N2)
0.8035050657330205
"""
# See: "The overlapping coefficient as a measure of agreement between
# probability distributions and point estimation of the overlap of two
# normal densities" -- Henry F. Inman and Edwin L. Bradley Jr
# http://dx.doi.org/10.1080/03610928908830127
if not isinstance(other, NormalDist):
raise TypeError('Expected another NormalDist instance')
X, Y = self, other
if (Y._sigma, Y._mu) < (X._sigma, X._mu): # sort to assure commutativity
X, Y = Y, X
X_var, Y_var = X.variance, Y.variance
if not X_var or not Y_var:
raise StatisticsError('overlap() not defined when sigma is zero')
dv = Y_var - X_var
dm = fabs(Y._mu - X._mu)
if not dv:
return 1.0 - erf(dm / (2.0 * X._sigma * _SQRT2))
a = X._mu * Y_var - Y._mu * X_var
b = X._sigma * Y._sigma * sqrt(dm * dm + dv * log(Y_var / X_var))
x1 = (a + b) / dv
x2 = (a - b) / dv
return 1.0 - (fabs(Y.cdf(x1) - X.cdf(x1)) + fabs(Y.cdf(x2) - X.cdf(x2)))
def zscore(self, x):
"""Compute the Standard Score. (x - mean) / stdev
Describes *x* in terms of the number of standard deviations
above or below the mean of the normal distribution.
"""
# https://www.statisticshowto.com/probability-and-statistics/z-score/
if not self._sigma:
raise StatisticsError('zscore() not defined when sigma is zero')
return (x - self._mu) / self._sigma
@property
def mean(self):
"Arithmetic mean of the normal distribution."
return self._mu
@property
def median(self):
"Return the median of the normal distribution"
return self._mu
@property
def mode(self):
"""Return the mode of the normal distribution
The mode is the value x where which the probability density
function (pdf) takes its maximum value.
"""
return self._mu
@property
def stdev(self):
"Standard deviation of the normal distribution."
return self._sigma
@property
def variance(self):
"Square of the standard deviation."
return self._sigma * self._sigma
def __add__(x1, x2):
"""Add a constant or another NormalDist instance.
If *other* is a constant, translate mu by the constant,
leaving sigma unchanged.
If *other* is a NormalDist, add both the means and the variances.
Mathematically, this works only if the two distributions are
independent or if they are jointly normally distributed.
"""
if isinstance(x2, NormalDist):
return NormalDist(x1._mu + x2._mu, hypot(x1._sigma, x2._sigma))
return NormalDist(x1._mu + x2, x1._sigma)
def __sub__(x1, x2):
"""Subtract a constant or another NormalDist instance.
If *other* is a constant, translate by the constant mu,
leaving sigma unchanged.
If *other* is a NormalDist, subtract the means and add the variances.
Mathematically, this works only if the two distributions are
independent or if they are jointly normally distributed.
"""
if isinstance(x2, NormalDist):
return NormalDist(x1._mu - x2._mu, hypot(x1._sigma, x2._sigma))
return NormalDist(x1._mu - x2, x1._sigma)
def __mul__(x1, x2):
"""Multiply both mu and sigma by a constant.
Used for rescaling, perhaps to change measurement units.
Sigma is scaled with the absolute value of the constant.
"""
return NormalDist(x1._mu * x2, x1._sigma * fabs(x2))
def __truediv__(x1, x2):
"""Divide both mu and sigma by a constant.
Used for rescaling, perhaps to change measurement units.
Sigma is scaled with the absolute value of the constant.
"""
return NormalDist(x1._mu / x2, x1._sigma / fabs(x2))
def __pos__(x1):
"Return a copy of the instance."
return NormalDist(x1._mu, x1._sigma)
def __neg__(x1):
"Negates mu while keeping sigma the same."
return NormalDist(-x1._mu, x1._sigma)
__radd__ = __add__
def __rsub__(x1, x2):
"Subtract a NormalDist from a constant or another NormalDist."
return -(x1 - x2)
__rmul__ = __mul__
def __eq__(x1, x2):
"Two NormalDist objects are equal if their mu and sigma are both equal."
if not isinstance(x2, NormalDist):
return NotImplemented
return x1._mu == x2._mu and x1._sigma == x2._sigma
def __hash__(self):
"NormalDist objects hash equal if their mu and sigma are both equal."
return hash((self._mu, self._sigma))
def __repr__(self):
return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})'
| bsd-3-clause | 0cc8fd8c2e7ad7dab523ef239ba91a11 | 33.366329 | 108 | 0.597776 | 3.601348 | false | false | false | false |
brython-dev/brython | www/src/Lib/email/message.py | 3 | 47951 | # Copyright (C) 2001-2007 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""Basic message object for the email package object model."""
__all__ = ['Message', 'EmailMessage']
import binascii
import re
import quopri
from io import BytesIO, StringIO
# Intrapackage imports
from email import utils
from email import errors
from email._policybase import Policy, compat32
from email import charset as _charset
from email._encoded_words import decode_b
Charset = _charset.Charset
SEMISPACE = '; '
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _splitparam(param):
# Split header parameters. BAW: this may be too simple. It isn't
# strictly RFC 2045 (section 5.1) compliant, but it catches most headers
# found in the wild. We may eventually need a full fledged parser.
# RDM: we might have a Header here; for now just stringify it.
a, sep, b = str(param).partition(';')
if not sep:
return a.strip(), None
return a.strip(), b.strip()
def _formatparam(param, value=None, quote=True):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true. If value is a
three tuple (charset, language, value), it will be encoded according
to RFC2231 rules. If it contains non-ascii characters it will likewise
be encoded according to RFC2231 rules, using the utf-8 charset and
a null language.
"""
if value is not None and len(value) > 0:
# A tuple is used for RFC 2231 encoded parameter values where items
# are (charset, language, value). charset is a string, not a Charset
# instance. RFC 2231 encoded values are never quoted, per RFC.
if isinstance(value, tuple):
# Encode as per RFC 2231
param += '*'
value = utils.encode_rfc2231(value[2], value[0], value[1])
return '%s=%s' % (param, value)
else:
try:
value.encode('ascii')
except UnicodeEncodeError:
param += '*'
value = utils.encode_rfc2231(value, 'utf-8', '')
return '%s=%s' % (param, value)
# BAW: Please check this. I think that if quote is set it should
# force quoting even if not necessary.
if quote or tspecials.search(value):
return '%s="%s"' % (param, utils.quote(value))
else:
return '%s=%s' % (param, value)
else:
return param
def _parseparam(s):
# RDM This might be a Header, so for now stringify it.
s = ';' + str(s)
plist = []
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
if '=' in f:
i = f.index('=')
f = f[:i].strip().lower() + '=' + f[i+1:].strip()
plist.append(f.strip())
s = s[end:]
return plist
def _unquotevalue(value):
# This is different than utils.collapse_rfc2231_value() because it doesn't
# try to convert the value to a unicode. Message.get_param() and
# Message.get_params() are both currently defined to return the tuple in
# the face of RFC 2231 parameters.
if isinstance(value, tuple):
return value[0], value[1], utils.unquote(value[2])
else:
return utils.unquote(value)
def _decode_uu(encoded):
"""Decode uuencoded data."""
decoded_lines = []
encoded_lines_iter = iter(encoded.splitlines())
for line in encoded_lines_iter:
if line.startswith(b"begin "):
mode, _, path = line.removeprefix(b"begin ").partition(b" ")
try:
int(mode, base=8)
except ValueError:
continue
else:
break
else:
raise ValueError("`begin` line not found")
for line in encoded_lines_iter:
if not line:
raise ValueError("Truncated input")
elif line.strip(b' \t\r\n\f') == b'end':
break
try:
decoded_line = binascii.a2b_uu(line)
except binascii.Error:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((line[0]-32) & 63) * 4 + 5) // 3
decoded_line = binascii.a2b_uu(line[:nbytes])
decoded_lines.append(decoded_line)
return b''.join(decoded_lines)
class Message:
"""Basic message object.
A message object is defined as something that has a bunch of RFC 2822
headers and a payload. It may optionally have an envelope header
(a.k.a. Unix-From or From_ header). If the message is a container (i.e. a
multipart or a message/rfc822), then the payload is a list of Message
objects, otherwise it is a string.
Message objects implement part of the `mapping' interface, which assumes
there is exactly one occurrence of the header per message. Some headers
do in fact appear multiple times (e.g. Received) and for those headers,
you must use the explicit API to set or get all the headers. Not all of
the mapping methods are implemented.
"""
def __init__(self, policy=compat32):
self.policy = policy
self._headers = []
self._unixfrom = None
self._payload = None
self._charset = None
# Defaults for multipart messages
self.preamble = self.epilogue = None
self.defects = []
# Default content type
self._default_type = 'text/plain'
def __str__(self):
"""Return the entire formatted message as a string.
"""
return self.as_string()
def as_string(self, unixfrom=False, maxheaderlen=0, policy=None):
"""Return the entire formatted message as a string.
Optional 'unixfrom', when true, means include the Unix From_ envelope
header. For backward compatibility reasons, if maxheaderlen is
not specified it defaults to 0, so you must override it explicitly
if you want a different maxheaderlen. 'policy' is passed to the
Generator instance used to serialize the message; if it is not
specified the policy associated with the message instance is used.
If the message object contains binary data that is not encoded
according to RFC standards, the non-compliant data will be replaced by
unicode "unknown character" code points.
"""
from email.generator import Generator
policy = self.policy if policy is None else policy
fp = StringIO()
g = Generator(fp,
mangle_from_=False,
maxheaderlen=maxheaderlen,
policy=policy)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
def __bytes__(self):
"""Return the entire formatted message as a bytes object.
"""
return self.as_bytes()
def as_bytes(self, unixfrom=False, policy=None):
"""Return the entire formatted message as a bytes object.
Optional 'unixfrom', when true, means include the Unix From_ envelope
header. 'policy' is passed to the BytesGenerator instance used to
serialize the message; if not specified the policy associated with
the message instance is used.
"""
from email.generator import BytesGenerator
policy = self.policy if policy is None else policy
fp = BytesIO()
g = BytesGenerator(fp, mangle_from_=False, policy=policy)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
def is_multipart(self):
"""Return True if the message consists of multiple parts."""
return isinstance(self._payload, list)
#
# Unix From_ line
#
def set_unixfrom(self, unixfrom):
self._unixfrom = unixfrom
def get_unixfrom(self):
return self._unixfrom
#
# Payload manipulation.
#
def attach(self, payload):
"""Add the given payload to the current payload.
The current payload will always be a list of objects after this method
is called. If you want to set the payload to a scalar object, use
set_payload() instead.
"""
if self._payload is None:
self._payload = [payload]
else:
try:
self._payload.append(payload)
except AttributeError:
raise TypeError("Attach is not valid on a message with a"
" non-multipart payload")
def get_payload(self, i=None, decode=False):
"""Return a reference to the payload.
The payload will either be a list object or a string. If you mutate
the list object, you modify the message's payload in place. Optional
i returns that index into the payload.
Optional decode is a flag indicating whether the payload should be
decoded or not, according to the Content-Transfer-Encoding header
(default is False).
When True and the message is not a multipart, the payload will be
decoded if this header's value is `quoted-printable' or `base64'. If
some other encoding is used, or the header is missing, or if the
payload has bogus data (i.e. bogus base64 or uuencoded data), the
payload is returned as-is.
If the message is a multipart and the decode flag is True, then None
is returned.
"""
# Here is the logic table for this code, based on the email5.0.0 code:
# i decode is_multipart result
# ------ ------ ------------ ------------------------------
# None True True None
# i True True None
# None False True _payload (a list)
# i False True _payload element i (a Message)
# i False False error (not a list)
# i True False error (not a list)
# None False False _payload
# None True False _payload decoded (bytes)
# Note that Barry planned to factor out the 'decode' case, but that
# isn't so easy now that we handle the 8 bit data, which needs to be
# converted in both the decode and non-decode path.
if self.is_multipart():
if decode:
return None
if i is None:
return self._payload
else:
return self._payload[i]
# For backward compatibility, Use isinstance and this error message
# instead of the more logical is_multipart test.
if i is not None and not isinstance(self._payload, list):
raise TypeError('Expected list, got %s' % type(self._payload))
payload = self._payload
# cte might be a Header, so for now stringify it.
cte = str(self.get('content-transfer-encoding', '')).lower()
# payload may be bytes here.
if isinstance(payload, str):
if utils._has_surrogates(payload):
bpayload = payload.encode('ascii', 'surrogateescape')
if not decode:
try:
payload = bpayload.decode(self.get_param('charset', 'ascii'), 'replace')
except LookupError:
payload = bpayload.decode('ascii', 'replace')
elif decode:
try:
bpayload = payload.encode('ascii')
except UnicodeError:
# This won't happen for RFC compliant messages (messages
# containing only ASCII code points in the unicode input).
# If it does happen, turn the string into bytes in a way
# guaranteed not to fail.
bpayload = payload.encode('raw-unicode-escape')
if not decode:
return payload
if cte == 'quoted-printable':
return quopri.decodestring(bpayload)
elif cte == 'base64':
# XXX: this is a bit of a hack; decode_b should probably be factored
# out somewhere, but I haven't figured out where yet.
value, defects = decode_b(b''.join(bpayload.splitlines()))
for defect in defects:
self.policy.handle_defect(self, defect)
return value
elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
try:
return _decode_uu(bpayload)
except ValueError:
# Some decoding problem.
return bpayload
if isinstance(payload, str):
return bpayload
return payload
def set_payload(self, payload, charset=None):
"""Set the payload to the given value.
Optional charset sets the message's default character set. See
set_charset() for details.
"""
if hasattr(payload, 'encode'):
if charset is None:
self._payload = payload
return
if not isinstance(charset, Charset):
charset = Charset(charset)
payload = payload.encode(charset.output_charset)
if hasattr(payload, 'decode'):
self._payload = payload.decode('ascii', 'surrogateescape')
else:
self._payload = payload
if charset is not None:
self.set_charset(charset)
def set_charset(self, charset):
"""Set the charset of the payload to a given character set.
charset can be a Charset instance, a string naming a character set, or
None. If it is a string it will be converted to a Charset instance.
If charset is None, the charset parameter will be removed from the
Content-Type field. Anything else will generate a TypeError.
The message will be assumed to be of type text/* encoded with
charset.input_charset. It will be converted to charset.output_charset
and encoded properly, if needed, when generating the plain text
representation of the message. MIME headers (MIME-Version,
Content-Type, Content-Transfer-Encoding) will be added as needed.
"""
if charset is None:
self.del_param('charset')
self._charset = None
return
if not isinstance(charset, Charset):
charset = Charset(charset)
self._charset = charset
if 'MIME-Version' not in self:
self.add_header('MIME-Version', '1.0')
if 'Content-Type' not in self:
self.add_header('Content-Type', 'text/plain',
charset=charset.get_output_charset())
else:
self.set_param('charset', charset.get_output_charset())
if charset != charset.get_output_charset():
self._payload = charset.body_encode(self._payload)
if 'Content-Transfer-Encoding' not in self:
cte = charset.get_body_encoding()
try:
cte(self)
except TypeError:
# This 'if' is for backward compatibility, it allows unicode
# through even though that won't work correctly if the
# message is serialized.
payload = self._payload
if payload:
try:
payload = payload.encode('ascii', 'surrogateescape')
except UnicodeError:
payload = payload.encode(charset.output_charset)
self._payload = charset.body_encode(payload)
self.add_header('Content-Transfer-Encoding', cte)
def get_charset(self):
"""Return the Charset instance associated with the message's payload.
"""
return self._charset
#
# MAPPING INTERFACE (partial)
#
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __getitem__(self, name):
"""Get a header value.
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, exactly which
occurrence gets returned is undefined. Use get_all() to get all
the values matching a header field name.
"""
return self.get(name)
def __setitem__(self, name, val):
"""Set the value of a header.
Note: this does not overwrite an existing header with the same field
name. Use __delitem__() first to delete any existing headers.
"""
max_count = self.policy.header_max_count(name)
if max_count:
lname = name.lower()
found = 0
for k, v in self._headers:
if k.lower() == lname:
found += 1
if found >= max_count:
raise ValueError("There may be at most {} {} headers "
"in a message".format(max_count, name))
self._headers.append(self.policy.header_store_parse(name, val))
def __delitem__(self, name):
"""Delete all occurrences of a header, if present.
Does not raise an exception if the header is missing.
"""
name = name.lower()
newheaders = []
for k, v in self._headers:
if k.lower() != name:
newheaders.append((k, v))
self._headers = newheaders
def __contains__(self, name):
return name.lower() in [k.lower() for k, v in self._headers]
def __iter__(self):
for field, value in self._headers:
yield field
def keys(self):
"""Return a list of all the message's header field names.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all the message's header values.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [self.policy.header_fetch_parse(k, v)
for k, v in self._headers]
def items(self):
"""Get all the message's header fields and values.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [(k, self.policy.header_fetch_parse(k, v))
for k, v in self._headers]
def get(self, name, failobj=None):
"""Get a header value.
Like __getitem__() but return failobj instead of None when the field
is missing.
"""
name = name.lower()
for k, v in self._headers:
if k.lower() == name:
return self.policy.header_fetch_parse(k, v)
return failobj
#
# "Internal" methods (public API, but only intended for use by a parser
# or generator, not normal application code.
#
def set_raw(self, name, value):
"""Store name and value in the model without modification.
This is an "internal" API, intended only for use by a parser.
"""
self._headers.append((name, value))
def raw_items(self):
"""Return the (name, value) header pairs without modification.
This is an "internal" API, intended only for use by a generator.
"""
return iter(self._headers.copy())
#
# Additional useful stuff
#
def get_all(self, name, failobj=None):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original
message, and may contain duplicates. Any fields deleted and
re-inserted are always appended to the header list.
If no such fields exist, failobj is returned (defaults to None).
"""
values = []
name = name.lower()
for k, v in self._headers:
if k.lower() == name:
values.append(self.policy.header_fetch_parse(k, v))
if not values:
return failobj
return values
def add_header(self, _name, _value, **_params):
"""Extended header setting.
name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added. If a
parameter value contains non-ASCII characters it can be specified as a
three-tuple of (charset, language, value), in which case it will be
encoded according to RFC2231 rules. Otherwise it will be encoded using
the utf-8 charset and a language of ''.
Examples:
msg.add_header('content-disposition', 'attachment', filename='bud.gif')
msg.add_header('content-disposition', 'attachment',
filename=('utf-8', '', Fußballer.ppt'))
msg.add_header('content-disposition', 'attachment',
filename='Fußballer.ppt'))
"""
parts = []
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
if _value is not None:
parts.insert(0, _value)
self[_name] = SEMISPACE.join(parts)
def replace_header(self, _name, _value):
"""Replace a header.
Replace the first matching header found in the message, retaining
header order and case. If no matching header was found, a KeyError is
raised.
"""
_name = _name.lower()
for i, (k, v) in zip(range(len(self._headers)), self._headers):
if k.lower() == _name:
self._headers[i] = self.policy.header_store_parse(k, _value)
break
else:
raise KeyError(_name)
#
# Use these three methods instead of the three above.
#
def get_content_type(self):
"""Return the message's content type.
The returned string is coerced to lower case of the form
`maintype/subtype'. If there was no Content-Type header in the
message, the default type as given by get_default_type() will be
returned. Since according to RFC 2045, messages always have a default
type this will always return a value.
RFC 2045 defines a message's default type to be text/plain unless it
appears inside a multipart/digest container, in which case it would be
message/rfc822.
"""
missing = object()
value = self.get('content-type', missing)
if value is missing:
# This should have no parameters
return self.get_default_type()
ctype = _splitparam(value)[0].lower()
# RFC 2045, section 5.2 says if its invalid, use text/plain
if ctype.count('/') != 1:
return 'text/plain'
return ctype
def get_content_maintype(self):
"""Return the message's main content type.
This is the `maintype' part of the string returned by
get_content_type().
"""
ctype = self.get_content_type()
return ctype.split('/')[0]
def get_content_subtype(self):
"""Returns the message's sub-content type.
This is the `subtype' part of the string returned by
get_content_type().
"""
ctype = self.get_content_type()
return ctype.split('/')[1]
def get_default_type(self):
"""Return the `default' content type.
Most messages have a default content type of text/plain, except for
messages that are subparts of multipart/digest containers. Such
subparts have a default content type of message/rfc822.
"""
return self._default_type
def set_default_type(self, ctype):
"""Set the `default' content type.
ctype should be either "text/plain" or "message/rfc822", although this
is not enforced. The default content type is not stored in the
Content-Type header.
"""
self._default_type = ctype
def _get_params_preserve(self, failobj, header):
# Like get_params() but preserves the quoting of values. BAW:
# should this be part of the public interface?
missing = object()
value = self.get(header, missing)
if value is missing:
return failobj
params = []
for p in _parseparam(value):
try:
name, val = p.split('=', 1)
name = name.strip()
val = val.strip()
except ValueError:
# Must have been a bare attribute
name = p.strip()
val = ''
params.append((name, val))
params = utils.decode_params(params)
return params
def get_params(self, failobj=None, header='content-type', unquote=True):
"""Return the message's Content-Type parameters, as a list.
The elements of the returned list are 2-tuples of key/value pairs, as
split on the `=' sign. The left hand side of the `=' is the key,
while the right hand side is the value. If there is no `=' sign in
the parameter the value is the empty string. The value is as
described in the get_param() method.
Optional failobj is the object to return if there is no Content-Type
header. Optional header is the header to search instead of
Content-Type. If unquote is True, the value is unquoted.
"""
missing = object()
params = self._get_params_preserve(missing, header)
if params is missing:
return failobj
if unquote:
return [(k, _unquotevalue(v)) for k, v in params]
else:
return params
def get_param(self, param, failobj=None, header='content-type',
unquote=True):
"""Return the parameter value if found in the Content-Type header.
Optional failobj is the object to return if there is no Content-Type
header, or the Content-Type header has no such parameter. Optional
header is the header to search instead of Content-Type.
Parameter keys are always compared case insensitively. The return
value can either be a string, or a 3-tuple if the parameter was RFC
2231 encoded. When it's a 3-tuple, the elements of the value are of
the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and
LANGUAGE can be None, in which case you should consider VALUE to be
encoded in the us-ascii charset. You can usually ignore LANGUAGE.
The parameter value (either the returned string, or the VALUE item in
the 3-tuple) is always unquoted, unless unquote is set to False.
If your application doesn't care whether the parameter was RFC 2231
encoded, it can turn the return value into a string as follows:
rawparam = msg.get_param('foo')
param = email.utils.collapse_rfc2231_value(rawparam)
"""
if header not in self:
return failobj
for k, v in self._get_params_preserve(failobj, header):
if k.lower() == param.lower():
if unquote:
return _unquotevalue(v)
else:
return v
return failobj
def set_param(self, param, value, header='Content-Type', requote=True,
charset=None, language='', replace=False):
"""Set a parameter in the Content-Type header.
If the parameter already exists in the header, its value will be
replaced with the new value.
If header is Content-Type and has not yet been defined for this
message, it will be set to "text/plain" and the new parameter and
value will be appended as per RFC 2045.
An alternate header can be specified in the header argument, and all
parameters will be quoted as necessary unless requote is False.
If charset is specified, the parameter will be encoded according to RFC
2231. Optional language specifies the RFC 2231 language, defaulting
to the empty string. Both charset and language should be strings.
"""
if not isinstance(value, tuple) and charset:
value = (charset, language, value)
if header not in self and header.lower() == 'content-type':
ctype = 'text/plain'
else:
ctype = self.get(header)
if not self.get_param(param, header=header):
if not ctype:
ctype = _formatparam(param, value, requote)
else:
ctype = SEMISPACE.join(
[ctype, _formatparam(param, value, requote)])
else:
ctype = ''
for old_param, old_value in self.get_params(header=header,
unquote=requote):
append_param = ''
if old_param.lower() == param.lower():
append_param = _formatparam(param, value, requote)
else:
append_param = _formatparam(old_param, old_value, requote)
if not ctype:
ctype = append_param
else:
ctype = SEMISPACE.join([ctype, append_param])
if ctype != self.get(header):
if replace:
self.replace_header(header, ctype)
else:
del self[header]
self[header] = ctype
def del_param(self, param, header='content-type', requote=True):
"""Remove the given parameter completely from the Content-Type header.
The header will be re-written in place without the parameter or its
value. All values will be quoted as necessary unless requote is
False. Optional header specifies an alternative to the Content-Type
header.
"""
if header not in self:
return
new_ctype = ''
for p, v in self.get_params(header=header, unquote=requote):
if p.lower() != param.lower():
if not new_ctype:
new_ctype = _formatparam(p, v, requote)
else:
new_ctype = SEMISPACE.join([new_ctype,
_formatparam(p, v, requote)])
if new_ctype != self.get(header):
del self[header]
self[header] = new_ctype
def set_type(self, type, header='Content-Type', requote=True):
"""Set the main type and subtype for the Content-Type header.
type must be a string in the form "maintype/subtype", otherwise a
ValueError is raised.
This method replaces the Content-Type header, keeping all the
parameters in place. If requote is False, this leaves the existing
header's quoting as is. Otherwise, the parameters will be quoted (the
default).
An alternative header can be specified in the header argument. When
the Content-Type header is set, we'll always also add a MIME-Version
header.
"""
# BAW: should we be strict?
if not type.count('/') == 1:
raise ValueError
# Set the Content-Type, you get a MIME-Version
if header.lower() == 'content-type':
del self['mime-version']
self['MIME-Version'] = '1.0'
if header not in self:
self[header] = type
return
params = self.get_params(header=header, unquote=requote)
del self[header]
self[header] = type
# Skip the first param; it's the old type.
for p, v in params[1:]:
self.set_param(p, v, header, requote)
def get_filename(self, failobj=None):
"""Return the filename associated with the payload if present.
The filename is extracted from the Content-Disposition header's
`filename' parameter, and it is unquoted. If that header is missing
the `filename' parameter, this method falls back to looking for the
`name' parameter.
"""
missing = object()
filename = self.get_param('filename', missing, 'content-disposition')
if filename is missing:
filename = self.get_param('name', missing, 'content-type')
if filename is missing:
return failobj
return utils.collapse_rfc2231_value(filename).strip()
def get_boundary(self, failobj=None):
"""Return the boundary associated with the payload if present.
The boundary is extracted from the Content-Type header's `boundary'
parameter, and it is unquoted.
"""
missing = object()
boundary = self.get_param('boundary', missing)
if boundary is missing:
return failobj
# RFC 2046 says that boundaries may begin but not end in w/s
return utils.collapse_rfc2231_value(boundary).rstrip()
def set_boundary(self, boundary):
"""Set the boundary parameter in Content-Type to 'boundary'.
This is subtly different than deleting the Content-Type header and
adding a new one with a new boundary parameter via add_header(). The
main difference is that using the set_boundary() method preserves the
order of the Content-Type header in the original message.
HeaderParseError is raised if the message has no Content-Type header.
"""
missing = object()
params = self._get_params_preserve(missing, 'content-type')
if params is missing:
# There was no Content-Type header, and we don't know what type
# to set it to, so raise an exception.
raise errors.HeaderParseError('No Content-Type header found')
newparams = []
foundp = False
for pk, pv in params:
if pk.lower() == 'boundary':
newparams.append(('boundary', '"%s"' % boundary))
foundp = True
else:
newparams.append((pk, pv))
if not foundp:
# The original Content-Type header had no boundary attribute.
# Tack one on the end. BAW: should we raise an exception
# instead???
newparams.append(('boundary', '"%s"' % boundary))
# Replace the existing Content-Type header with the new value
newheaders = []
for h, v in self._headers:
if h.lower() == 'content-type':
parts = []
for k, v in newparams:
if v == '':
parts.append(k)
else:
parts.append('%s=%s' % (k, v))
val = SEMISPACE.join(parts)
newheaders.append(self.policy.header_store_parse(h, val))
else:
newheaders.append((h, v))
self._headers = newheaders
def get_content_charset(self, failobj=None):
"""Return the charset parameter of the Content-Type header.
The returned string is always coerced to lower case. If there is no
Content-Type header, or if that header has no charset parameter,
failobj is returned.
"""
missing = object()
charset = self.get_param('charset', missing)
if charset is missing:
return failobj
if isinstance(charset, tuple):
# RFC 2231 encoded, so decode it, and it better end up as ascii.
pcharset = charset[0] or 'us-ascii'
try:
# LookupError will be raised if the charset isn't known to
# Python. UnicodeError will be raised if the encoded text
# contains a character not in the charset.
as_bytes = charset[2].encode('raw-unicode-escape')
charset = str(as_bytes, pcharset)
except (LookupError, UnicodeError):
charset = charset[2]
# charset characters must be in us-ascii range
try:
charset.encode('us-ascii')
except UnicodeError:
return failobj
# RFC 2046, $4.1.2 says charsets are not case sensitive
return charset.lower()
def get_charsets(self, failobj=None):
"""Return a list containing the charset(s) used in this message.
The returned list of items describes the Content-Type headers'
charset parameter for this message and all the subparts in its
payload.
Each item will either be a string (the value of the charset parameter
in the Content-Type header of that part) or the value of the
'failobj' parameter (defaults to None), if the part does not have a
main MIME type of "text", or the charset is not defined.
The list will contain one string for each part of the message, plus
one for the container message (i.e. self), so that a non-multipart
message will still return a list of length 1.
"""
return [part.get_content_charset(failobj) for part in self.walk()]
def get_content_disposition(self):
"""Return the message's content-disposition if it exists, or None.
The return values can be either 'inline', 'attachment' or None
according to the rfc2183.
"""
value = self.get('content-disposition')
if value is None:
return None
c_d = _splitparam(value)[0].lower()
return c_d
# I.e. def walk(self): ...
from email.iterators import walk
class MIMEPart(Message):
def __init__(self, policy=None):
if policy is None:
from email.policy import default
policy = default
super().__init__(policy)
def as_string(self, unixfrom=False, maxheaderlen=None, policy=None):
"""Return the entire formatted message as a string.
Optional 'unixfrom', when true, means include the Unix From_ envelope
header. maxheaderlen is retained for backward compatibility with the
base Message class, but defaults to None, meaning that the policy value
for max_line_length controls the header maximum length. 'policy' is
passed to the Generator instance used to serialize the message; if it
is not specified the policy associated with the message instance is
used.
"""
policy = self.policy if policy is None else policy
if maxheaderlen is None:
maxheaderlen = policy.max_line_length
return super().as_string(unixfrom, maxheaderlen, policy)
def __str__(self):
return self.as_string(policy=self.policy.clone(utf8=True))
def is_attachment(self):
c_d = self.get('content-disposition')
return False if c_d is None else c_d.content_disposition == 'attachment'
def _find_body(self, part, preferencelist):
if part.is_attachment():
return
maintype, subtype = part.get_content_type().split('/')
if maintype == 'text':
if subtype in preferencelist:
yield (preferencelist.index(subtype), part)
return
if maintype != 'multipart' or not self.is_multipart():
return
if subtype != 'related':
for subpart in part.iter_parts():
yield from self._find_body(subpart, preferencelist)
return
if 'related' in preferencelist:
yield (preferencelist.index('related'), part)
candidate = None
start = part.get_param('start')
if start:
for subpart in part.iter_parts():
if subpart['content-id'] == start:
candidate = subpart
break
if candidate is None:
subparts = part.get_payload()
candidate = subparts[0] if subparts else None
if candidate is not None:
yield from self._find_body(candidate, preferencelist)
def get_body(self, preferencelist=('related', 'html', 'plain')):
"""Return best candidate mime part for display as 'body' of message.
Do a depth first search, starting with self, looking for the first part
matching each of the items in preferencelist, and return the part
corresponding to the first item that has a match, or None if no items
have a match. If 'related' is not included in preferencelist, consider
the root part of any multipart/related encountered as a candidate
match. Ignore parts with 'Content-Disposition: attachment'.
"""
best_prio = len(preferencelist)
body = None
for prio, part in self._find_body(self, preferencelist):
if prio < best_prio:
best_prio = prio
body = part
if prio == 0:
break
return body
_body_types = {('text', 'plain'),
('text', 'html'),
('multipart', 'related'),
('multipart', 'alternative')}
def iter_attachments(self):
"""Return an iterator over the non-main parts of a multipart.
Skip the first of each occurrence of text/plain, text/html,
multipart/related, or multipart/alternative in the multipart (unless
they have a 'Content-Disposition: attachment' header) and include all
remaining subparts in the returned iterator. When applied to a
multipart/related, return all parts except the root part. Return an
empty iterator when applied to a multipart/alternative or a
non-multipart.
"""
maintype, subtype = self.get_content_type().split('/')
if maintype != 'multipart' or subtype == 'alternative':
return
payload = self.get_payload()
# Certain malformed messages can have content type set to `multipart/*`
# but still have single part body, in which case payload.copy() can
# fail with AttributeError.
try:
parts = payload.copy()
except AttributeError:
# payload is not a list, it is most probably a string.
return
if maintype == 'multipart' and subtype == 'related':
# For related, we treat everything but the root as an attachment.
# The root may be indicated by 'start'; if there's no start or we
# can't find the named start, treat the first subpart as the root.
start = self.get_param('start')
if start:
found = False
attachments = []
for part in parts:
if part.get('content-id') == start:
found = True
else:
attachments.append(part)
if found:
yield from attachments
return
parts.pop(0)
yield from parts
return
# Otherwise we more or less invert the remaining logic in get_body.
# This only really works in edge cases (ex: non-text related or
# alternatives) if the sending agent sets content-disposition.
seen = [] # Only skip the first example of each candidate type.
for part in parts:
maintype, subtype = part.get_content_type().split('/')
if ((maintype, subtype) in self._body_types and
not part.is_attachment() and subtype not in seen):
seen.append(subtype)
continue
yield part
def iter_parts(self):
"""Return an iterator over all immediate subparts of a multipart.
Return an empty iterator for a non-multipart.
"""
if self.is_multipart():
yield from self.get_payload()
def get_content(self, *args, content_manager=None, **kw):
if content_manager is None:
content_manager = self.policy.content_manager
return content_manager.get_content(self, *args, **kw)
def set_content(self, *args, content_manager=None, **kw):
if content_manager is None:
content_manager = self.policy.content_manager
content_manager.set_content(self, *args, **kw)
def _make_multipart(self, subtype, disallowed_subtypes, boundary):
if self.get_content_maintype() == 'multipart':
existing_subtype = self.get_content_subtype()
disallowed_subtypes = disallowed_subtypes + (subtype,)
if existing_subtype in disallowed_subtypes:
raise ValueError("Cannot convert {} to {}".format(
existing_subtype, subtype))
keep_headers = []
part_headers = []
for name, value in self._headers:
if name.lower().startswith('content-'):
part_headers.append((name, value))
else:
keep_headers.append((name, value))
if part_headers:
# There is existing content, move it to the first subpart.
part = type(self)(policy=self.policy)
part._headers = part_headers
part._payload = self._payload
self._payload = [part]
else:
self._payload = []
self._headers = keep_headers
self['Content-Type'] = 'multipart/' + subtype
if boundary is not None:
self.set_param('boundary', boundary)
def make_related(self, boundary=None):
self._make_multipart('related', ('alternative', 'mixed'), boundary)
def make_alternative(self, boundary=None):
self._make_multipart('alternative', ('mixed',), boundary)
def make_mixed(self, boundary=None):
self._make_multipart('mixed', (), boundary)
def _add_multipart(self, _subtype, *args, _disp=None, **kw):
if (self.get_content_maintype() != 'multipart' or
self.get_content_subtype() != _subtype):
getattr(self, 'make_' + _subtype)()
part = type(self)(policy=self.policy)
part.set_content(*args, **kw)
if _disp and 'content-disposition' not in part:
part['Content-Disposition'] = _disp
self.attach(part)
def add_related(self, *args, **kw):
self._add_multipart('related', *args, _disp='inline', **kw)
def add_alternative(self, *args, **kw):
self._add_multipart('alternative', *args, **kw)
def add_attachment(self, *args, **kw):
self._add_multipart('mixed', *args, _disp='attachment', **kw)
def clear(self):
self._headers = []
self._payload = None
def clear_content(self):
self._headers = [(n, v) for n, v in self._headers
if not n.lower().startswith('content-')]
self._payload = None
class EmailMessage(MIMEPart):
def set_content(self, *args, **kw):
super().set_content(*args, **kw)
if 'MIME-Version' not in self:
self['MIME-Version'] = '1.0'
| bsd-3-clause | 3fe8634d20c3bae06a9b68eea019d1b6 | 38.9575 | 96 | 0.586477 | 4.454571 | false | false | false | false |
brython-dev/brython | www/tests/compression/test_deflate.py | 2 | 2553 | import zlib
import deflate
import lz77
compresser = zlib.compressobj(wbits=15)
text = """Pleurez, doux alcyons, ô vous, oiseaux sacrés,
Oiseaux chers à Thétis, doux alcyons, pleurez.
Elle a vécu, Myrto, la jeune Tarentine.
Un vaisseau la portait aux bords de Camarine.
Là l'hymen, les chansons, les flûtes, lentement,
Devaient la reconduire au seuil de son amant.
Une clef vigilante a pour cette journée
Dans le cèdre enfermé sa robe d'hyménée
Et l'or dont au festin ses bras seraient parés
Et pour ses blonds cheveux les parfums préparés.
Mais, seule sur la proue, invoquant les étoiles,
Le vent impétueux qui soufflait dans les voiles
L'enveloppe. Étonnée, et loin des matelots,
Elle crie, elle tombe, elle est au sein des flots.
Elle est au sein des flots, la jeune Tarentine.
Son beau corps a roulé sous la vague marine.
Thétis, les yeux en pleurs, dans le creux d'un rocher
Aux monstres dévorants eut soin de la cacher.
Par ses ordres bientôt les belles Néréides
L'élèvent au-dessus des demeures humides,
Le portent au rivage, et dans ce monument
L'ont, au cap du Zéphir, déposé mollement.
Puis de loin à grands cris appelant leurs compagnes,
Et les Nymphes des bois, des sources, des montagnes,
Toutes frappant leur sein et traînant un long deuil,
Répétèrent : « hélas ! » autour de son cercueil.
Hélas ! chez ton amant tu n'es point ramenée.
Tu n'as point revêtu ta robe d'hyménée.
L'or autour de tes bras n'a point serré de nœuds.
Les doux parfums n'ont point coulé sur tes cheveux."""
#text = "adsqfqgqs"
text = text.encode("utf-8")
with open("du cote de chez swann.txt", "rb") as f:
text = f.read()
chars = {x for x in text}
"""
lz = lz77.LZ77()
gen = lz.compress(text, 32 * 1024, 3)
literals = set()
lengths = set()
distances = set()
for item in gen:
if isinstance(item, tuple):
lengths.add(item[0])
distances.add(item[1])
else:
literals.add(item)
print("lengths", lengths)
print(len(literals | lengths), "literals/lengths")
print(len(distances), "distances")
cd = set()
for d in distances:
if d < 4:
cd.add(d)
else:
coef = 2
p = 0
while coef < d:
coef *= 2
p += 1
code = p * 2
if d - (coef // 2) > coef // 4:
code += 1
cd.add(code)
"""
buf = compresser.compress(text)
buf += compresser.flush()
dec = deflate.decompress(buf)
if dec != text:
for i, car in enumerate(dec):
if text[i] != car:
print("erreur", i , car, text[i]) | bsd-3-clause | 433934e38300b01cdd37d2fff574b8a3 | 27.179775 | 56 | 0.679298 | 2.501996 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/userreports/reports/util.py | 1 | 3503 | from memoized import memoized
from couchexport.export import export_from_tables
from corehq.apps.userreports.columns import get_expanded_column_config
from corehq.apps.userreports.models import get_report_config
def get_expanded_columns(column_configs, data_source_config):
return {
column_config.column_id: [
sql_col.slug for sql_col in get_expanded_column_config(
data_source_config, column_config, 'en'
).columns
]
for column_config in column_configs
if column_config.type == 'expanded'
}
def report_has_location_filter(config_id, domain):
"""check that the report has at least one location based filter or
location choice provider filter
"""
if not (config_id and domain):
return False
report, _ = get_report_config(config_id=config_id, domain=domain)
return any(
getattr(getattr(filter_, 'choice_provider', None), 'location_safe', False) or
getattr(filter_, 'location_filter', False)
for filter_ in report.ui_filters
)
class ReportExport(object):
"""Export all the rows of a UCR report
"""
def __init__(self, domain, title, report_config, lang, filter_values):
self.domain = domain
self.title = title
self.report_config = report_config
self.lang = lang
self.filter_values = filter_values
@property
@memoized
def data_source(self):
from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource
data_source = ConfigurableReportDataSource.from_spec(self.report_config, include_prefilters=True)
data_source.lang = self.lang
data_source.set_filter_values(self.filter_values)
data_source.set_order_by([(o['field'], o['order']) for o in self.report_config.sort_expression])
return data_source
def create_export(self, file_path, format_):
"""Save this report to a file
:param file_path: The path to the file the report should be saved
:param format_: The format of the resulting export
"""
return export_from_tables(self.get_table(), file_path, format_)
@property
def header_rows(self):
return [[
column.header
for column in self.data_source.inner_columns if column.data_tables_column.visible
]]
@memoized
def get_data(self):
return list(self.data_source.get_data())
@property
@memoized
def total_rows(self):
return [self.data_source.get_total_row()] if self.data_source.has_total_row else []
@property
@memoized
def data_rows(self):
column_id_to_expanded_column_ids = get_expanded_columns(
self.data_source.top_level_columns,
self.data_source.config
)
column_ids = []
for column in self.report_config.report_columns:
if column.visible:
column_ids.extend(column_id_to_expanded_column_ids.get(column.column_id, [column.column_id]))
return [[raw_row[column_id] for column_id in column_ids] for raw_row in self.get_data()]
def get_table_data(self):
return self.header_rows + self.data_rows + self.total_rows
@memoized
def get_table(self):
"""Generate a table of all rows of this report
"""
export_table = [
[
self.title,
self.get_table_data()
]
]
return export_table
| bsd-3-clause | df98c4605368c317cd5a095ea4113cc2 | 31.738318 | 109 | 0.631173 | 3.922732 | false | true | false | false |
dimagi/commcare-hq | corehq/apps/domain/views/internal.py | 1 | 19674 | import copy
from django.conf import settings
from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import redirect
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.html import format_html
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from django.views.decorators.http import require_GET
from django.views.generic import View
from memoized import memoized
from corehq.apps.accounting.decorators import always_allow_project_access
from corehq.apps.domain.utils import log_domain_changes
from corehq.apps.ota.rate_limiter import restore_rate_limiter
from dimagi.utils.web import get_ip, json_request, json_response
from corehq import feature_previews, privileges, toggles
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.domain.calculations import (
CALC_FNS,
CALC_ORDER,
CALCS,
dom_calc,
)
from corehq.apps.domain.decorators import (
domain_admin_required,
login_and_domain_required,
login_required,
require_superuser,
)
from corehq.apps.domain.forms import DomainInternalForm, TransferDomainForm
from corehq.apps.domain.models import Domain, TransferDomainRequest, AllowedUCRExpressionSettings
from corehq.apps.domain.views.settings import (
BaseAdminProjectSettingsView,
BaseProjectSettingsView,
)
from corehq.apps.hqwebapp.decorators import use_jquery_ui, use_multiselect
from corehq.apps.hqwebapp.tasks import send_html_email_async, send_mail_async
from corehq.apps.hqwebapp.views import BasePageView
from corehq.apps.receiverwrapper.rate_limiter import submission_rate_limiter
from corehq.apps.toggle_ui.views import ToggleEditView
from corehq.apps.users.models import CouchUser
from corehq.const import USER_CHANGE_VIA_WEB
class BaseInternalDomainSettingsView(BaseProjectSettingsView):
strict_domain_fetching = True
@method_decorator(always_allow_project_access)
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs)
@property
def main_context(self):
context = super(BaseInternalDomainSettingsView, self).main_context
context.update({
'project': self.domain_object,
})
return context
@property
def page_name(self):
return format_html("{} <small>Internal</small>", self.page_title)
class EditInternalDomainInfoView(BaseInternalDomainSettingsView):
urlname = 'domain_internal_settings'
page_title = gettext_lazy("Project Information")
template_name = 'domain/internal_settings.html'
strict_domain_fetching = True
@method_decorator(always_allow_project_access)
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
@use_jquery_ui # datepicker
@use_multiselect
def dispatch(self, request, *args, **kwargs):
return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs)
@property
@memoized
def internal_settings_form(self):
can_edit_eula = toggles.CAN_EDIT_EULA.enabled(self.request.couch_user.username)
if self.request.method == 'POST':
return DomainInternalForm(self.request.domain, can_edit_eula, self.request.POST)
initial = {
'countries': self.domain_object.deployment.countries,
'is_test': self.domain_object.is_test,
'use_custom_auto_case_update_hour': 'Y' if self.domain_object.auto_case_update_hour else 'N',
'auto_case_update_hour': self.domain_object.auto_case_update_hour,
'use_custom_auto_case_update_limit': 'Y' if self.domain_object.auto_case_update_limit else 'N',
'auto_case_update_limit': self.domain_object.auto_case_update_limit,
'use_custom_odata_feed_limit': 'Y' if self.domain_object.odata_feed_limit else 'N',
'odata_feed_limit': self.domain_object.odata_feed_limit,
'granted_messaging_access': self.domain_object.granted_messaging_access,
}
internal_attrs = [
'sf_contract_id',
'sf_account_id',
'initiative',
'self_started',
'area',
'sub_area',
'organization_name',
'notes',
'phone_model',
'commtrack_domain',
'performance_threshold',
'experienced_threshold',
'amplifies_workers',
'amplifies_project',
'data_access_threshold',
'business_unit',
'workshop_region',
'partner_technical_competency',
'support_prioritization',
'gs_continued_involvement',
'technical_complexity',
'app_design_comments',
'training_materials',
'partner_comments',
'partner_contact',
'dimagi_contact',
]
if can_edit_eula:
internal_attrs += [
'custom_eula',
'can_use_data',
]
for attr in internal_attrs:
val = getattr(self.domain_object.internal, attr)
if isinstance(val, bool):
val = 'true' if val else 'false'
initial[attr] = val
initial['active_ucr_expressions'] = AllowedUCRExpressionSettings.get_allowed_ucr_expressions(
domain_name=self.domain_object.name
)
return DomainInternalForm(self.request.domain, can_edit_eula, initial=initial)
@property
def page_context(self):
return {
'project': self.domain_object,
'form': self.internal_settings_form,
'areas': dict([(a["name"], a["sub_areas"]) for a in settings.INTERNAL_DATA["area"]]),
}
def send_handoff_email(self):
partner_contact = self.internal_settings_form.cleaned_data['partner_contact']
dimagi_contact = self.internal_settings_form.cleaned_data['dimagi_contact']
recipients = [partner_contact, dimagi_contact]
params = {'contact_name': CouchUser.get_by_username(dimagi_contact).human_friendly_name}
send_html_email_async.delay(
subject="Project Support Transition",
recipient=recipients,
html_content=render_to_string(
"domain/email/support_handoff.html", params),
text_content=render_to_string(
"domain/email/support_handoff.txt", params),
email_from=settings.SUPPORT_EMAIL,
)
messages.success(self.request,
_("Sent hand-off email to {}.").format(" and ".join(recipients)))
def post(self, request, *args, **kwargs):
if self.internal_settings_form.is_valid():
old_attrs = copy.copy(self.domain_object.internal)
old_ucr_permissions = AllowedUCRExpressionSettings.get_allowed_ucr_expressions(self.domain)
self.internal_settings_form.save(self.domain_object)
log_domain_changes(
self.request.couch_user.username,
self.domain,
self.internal_settings_form.cleaned_data['active_ucr_expressions'],
old_ucr_permissions,
)
eula_props_changed = (bool(old_attrs.custom_eula) != bool(self.domain_object.internal.custom_eula) or
bool(old_attrs.can_use_data) != bool(self.domain_object.internal.can_use_data))
if eula_props_changed and settings.EULA_CHANGE_EMAIL:
message = '\n'.join([
'{user} changed either the EULA or data sharing properties for domain {domain}.',
'',
'The properties changed were:',
'- Custom eula: {eula_old} --> {eula_new}',
'- Can use data: {can_use_data_old} --> {can_use_data_new}'
]).format(
user=self.request.couch_user.username,
domain=self.domain,
eula_old=old_attrs.custom_eula,
eula_new=self.domain_object.internal.custom_eula,
can_use_data_old=old_attrs.can_use_data,
can_use_data_new=self.domain_object.internal.can_use_data,
)
send_mail_async.delay(
'Custom EULA or data use flags changed for {}'.format(self.domain),
message, settings.DEFAULT_FROM_EMAIL, [settings.EULA_CHANGE_EMAIL]
)
messages.success(request,
_("The internal information for project %s was successfully updated!") % self.domain)
if self.internal_settings_form.cleaned_data['send_handoff_email']:
self.send_handoff_email()
return redirect(self.urlname, self.domain)
else:
messages.error(request, _(
"Your settings are not valid, see below for errors. Correct them and try again!"))
return self.get(request, *args, **kwargs)
class EditInternalCalculationsView(BaseInternalDomainSettingsView):
urlname = 'domain_internal_calculations'
page_title = gettext_lazy("Calculated Properties")
template_name = 'domain/internal_calculations.html'
@method_decorator(always_allow_project_access)
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs)
@property
def page_context(self):
return {
'calcs': CALCS,
'order': CALC_ORDER,
}
@method_decorator(always_allow_project_access, name='dispatch')
@method_decorator(require_superuser, name='dispatch')
class FlagsAndPrivilegesView(BaseAdminProjectSettingsView):
urlname = 'feature_flags_and_privileges'
page_title = gettext_lazy("Feature Flags and Privileges")
template_name = 'domain/admin/flags_and_privileges.html'
def _get_toggles(self):
def _sort_key(toggle):
return (not (toggle['domain_enabled'] or toggle['user_enabled']),
toggle['tag_index'],
toggle['label'])
unsorted_toggles = [{
'slug': toggle.slug,
'label': toggle.label,
'description': toggle.description,
'help_link': toggle.help_link,
'tag': toggle.tag.name,
'tag_index': toggle.tag.index,
'tag_description': toggle.tag.description,
'tag_css_class': toggle.tag.css_class,
'has_domain_namespace': toggles.NAMESPACE_DOMAIN in toggle.namespaces,
'domain_enabled': toggle.enabled(self.domain, namespace=toggles.NAMESPACE_DOMAIN),
'user_enabled': toggle.enabled(self.request.couch_user.username,
namespace=toggles.NAMESPACE_USER),
} for toggle in toggles.all_toggles()]
return sorted(unsorted_toggles, key=_sort_key)
def _get_privileges(self):
return sorted([
(privileges.Titles.get_name_from_privilege(privilege),
domain_has_privilege(self.domain, privilege))
for privilege in privileges.MAX_PRIVILEGES
], key=lambda name_has: (not name_has[1], name_has[0]))
@property
def page_context(self):
return {
'toggles': self._get_toggles(),
'privileges': self._get_privileges(),
}
@method_decorator(always_allow_project_access, name='dispatch')
@method_decorator(require_superuser, name='dispatch')
class ProjectLimitsView(BaseAdminProjectSettingsView):
urlname = 'internal_project_limits_summary'
page_title = gettext_lazy("Project Limits")
template_name = 'domain/admin/project_limits.html'
@property
def page_context(self):
return get_project_limits_context([
('Submission Rate Limits', submission_rate_limiter),
('Restore Rate Limits', restore_rate_limiter),
], self.domain)
def get_project_limits_context(name_limiter_tuple_list, scope=None):
return {
'project_limits': [
(name, _get_rate_limits(scope, rate_limiter))
for (name, rate_limiter) in name_limiter_tuple_list
]
}
def _get_rate_limits(scope, rate_limiter):
return [
{'key': scope + ' ' + key, 'current_usage': int(current_usage), 'limit': int(limit),
'percent_usage': round(100 * current_usage / limit, 1)}
for scope, limits in rate_limiter.iter_rates(scope)
for key, current_usage, limit in limits
]
class TransferDomainView(BaseAdminProjectSettingsView):
urlname = 'transfer_domain_view'
page_title = gettext_lazy("Transfer Project")
template_name = 'domain/admin/transfer_domain.html'
@property
@memoized
def active_transfer(self):
return TransferDomainRequest.get_active_transfer(self.domain,
self.request.user.username)
@property
@memoized
def transfer_domain_form(self):
return TransferDomainForm(self.domain,
self.request.user.username,
self.request.POST or None)
def get(self, request, *args, **kwargs):
if self.active_transfer:
self.template_name = 'domain/admin/transfer_domain_pending.html'
if request.GET.get('resend', None):
self.active_transfer.send_transfer_request()
messages.info(request,
_("Resent transfer request for project '{domain}'").format(domain=self.domain))
return super(TransferDomainView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
form = self.transfer_domain_form
if form.is_valid():
# Initiate domain transfer
transfer = form.save()
transfer.send_transfer_request()
return HttpResponseRedirect(self.page_url)
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
@property
def page_context(self):
if self.active_transfer:
return {'transfer': self.active_transfer.as_dict()}
else:
return {'form': self.transfer_domain_form}
@method_decorator(domain_admin_required)
def dispatch(self, request, *args, **kwargs):
if not toggles.TRANSFER_DOMAIN.enabled(request.domain):
raise Http404()
return super(TransferDomainView, self).dispatch(request, *args, **kwargs)
class ActivateTransferDomainView(BasePageView):
urlname = 'activate_transfer_domain'
page_title = 'Activate Domain Transfer'
template_name = 'domain/activate_transfer_domain.html'
@property
@memoized
def active_transfer(self):
return TransferDomainRequest.get_by_guid(self.guid)
@property
def page_context(self):
if self.active_transfer:
return {'transfer': self.active_transfer.as_dict()}
else:
return {}
@property
def page_url(self):
return self.request.get_full_path()
def get(self, request, guid, *args, **kwargs):
self.guid = guid
if (self.active_transfer and
self.active_transfer.to_username != request.user.username and
not request.user.is_superuser):
return HttpResponseRedirect(reverse("no_permissions"))
return super(ActivateTransferDomainView, self).get(request, *args, **kwargs)
def post(self, request, guid, *args, **kwargs):
self.guid = guid
if not self.active_transfer:
raise Http404()
if self.active_transfer.to_username != request.user.username and not request.user.is_superuser:
return HttpResponseRedirect(reverse("no_permissions"))
self.active_transfer.transfer_domain(by_user=request.couch_user, transfer_via=USER_CHANGE_VIA_WEB,
ip=get_ip(request))
messages.success(request, _("Successfully transferred ownership of project '{domain}'")
.format(domain=self.active_transfer.domain))
return HttpResponseRedirect(reverse('dashboard_default', args=[self.active_transfer.domain]))
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ActivateTransferDomainView, self).dispatch(*args, **kwargs)
class DeactivateTransferDomainView(View):
def post(self, request, guid, *args, **kwargs):
transfer = TransferDomainRequest.get_by_guid(guid)
if not transfer:
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
if (transfer.to_username != request.user.username and
transfer.from_username != request.user.username and
not request.user.is_superuser):
return HttpResponseRedirect(reverse("no_permissions"))
transfer.active = False
transfer.save()
referer = request.META.get('HTTP_REFERER', '/')
# Do not want to send them back to the activate page
if referer.endswith(reverse('activate_transfer_domain', args=[guid])):
messages.info(request,
_("Declined ownership of project '{domain}'").format(domain=transfer.domain))
return HttpResponseRedirect('/')
else:
return HttpResponseRedirect(referer)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DeactivateTransferDomainView, self).dispatch(*args, **kwargs)
@login_and_domain_required
@require_superuser
@require_GET
def toggle_diff(request, domain):
params = json_request(request.GET)
other_domain = params.get('domain')
diff = []
if Domain.get_by_name(other_domain):
diff = [{
'slug': t.slug,
'label': t.label,
'url': reverse(ToggleEditView.urlname, args=[t.slug]),
'tag_name': _('Preview'),
'tag_css_class': 'default',
'tag_index': -1,
} for t in feature_previews.all_previews() if _can_copy_toggle(t, request.domain, other_domain)]
diff.extend([{
'slug': t.slug,
'label': t.label,
'url': reverse(ToggleEditView.urlname, args=[t.slug]),
'tag_name': t.tag.name,
'tag_css_class': t.tag.css_class,
'tag_index': t.tag.index,
} for t in toggles.all_toggles() if _can_copy_toggle(t, request.domain, other_domain)])
diff.sort(key=lambda x: (x['tag_index'], x['label']))
return json_response(diff)
def _can_copy_toggle(toggle, domain, other_domain):
return (
toggle.enabled(domain, toggles.NAMESPACE_DOMAIN)
and not toggle.enabled(other_domain, toggles.NAMESPACE_DOMAIN)
)
@login_and_domain_required
@require_superuser
def calculated_properties(request, domain):
calc_tag = request.GET.get("calc_tag", '').split('--')
extra_arg = calc_tag[1] if len(calc_tag) > 1 else ''
calc_tag = calc_tag[0]
if not calc_tag or calc_tag not in list(CALC_FNS):
data = {"error": 'This tag does not exist'}
else:
data = {"value": dom_calc(calc_tag, domain, extra_arg)}
return json_response(data)
| bsd-3-clause | df17831aeb140fa30c8d4fcaf90778bf | 38.269461 | 114 | 0.629155 | 3.970535 | false | false | false | false |
dimagi/commcare-hq | corehq/ex-submodules/dimagi/utils/management/commands/prime_views.py | 1 | 2761 | from couchdbkit.exceptions import ResourceNotFound
# http://www.gevent.org/gevent.monkey.html#module-gevent.monkey
from gevent import monkey; monkey.patch_all()
import sys
import gevent
from gevent.pool import Pool
from django.core.management.base import BaseCommand
from django.conf import settings
setattr(settings, 'COUCHDB_TIMEOUT', 999999)
from couchdbkit.ext.django.loading import get_db
DESIGN_DOC_VIEW = '_all_docs'
DESIGN_SK = "_design"
DESIGN_EK = "_design0"
POOL_SIZE=12
REPEAT_INTERVAL = getattr(settings, 'PRIME_VIEWS_INTERVAL', 3600)
def get_unique_dbs():
"""
In order to not break abstraction barrier, we walk through all the COUCH_DATABASES
and assemble the unique set of databases (based upon the URL) to prime the views and all the design docs in it.
"""
ret = []
seen_dbs = []
db_apps = settings.COUCHDB_DATABASES
for t in db_apps:
app_name = t[0]
db_name = t[0].split('/')[-1]
if db_name in seen_dbs:
continue
else:
seen_dbs.append(db_name)
ret.append(app_name)
return ret
def do_prime(app_label, design_doc_name, view_name, verbose=False):
db = get_db(app_label)
try:
list(db.view('%s/%s' % (design_doc_name, view_name), limit=0))
if verbose:
sys.stdout.write('.')
sys.stdout.flush()
except ResourceNotFound:
if verbose:
sys.stdout.write('!=>%s/%s/%s' % (app_label, design_doc_name, view_name))
sys.stdout.flush()
class Command(BaseCommand):
help = 'Sync live design docs with gevent'
def handle(self, **options):
pool = Pool(POOL_SIZE)
while True:
self.prime_everything(pool)
gevent.sleep(REPEAT_INTERVAL)
def prime_everything(self, pool, verbose=False):
unique_dbs = get_unique_dbs()
for app in unique_dbs:
try:
db = get_db(app)
design_docs = db.view(DESIGN_DOC_VIEW, startkey=DESIGN_SK, endkey=DESIGN_EK, include_docs=True).all()
for res in design_docs:
design_doc = res['doc']
design_doc_name = design_doc['_id'].split('/')[-1] # _design/app_name
if design_doc_name.endswith('-tmp'):
# it's a dangling -tmp preindex view, skip
continue
else:
views = design_doc.get('views', {})
# get the first view
for view_name in views:
pool.spawn(do_prime, app, design_doc_name, view_name, verbose=verbose)
break
except Exception:
pass
| bsd-3-clause | 8abae0e553a3e69bbba70ea08373319e | 32.670732 | 117 | 0.572981 | 3.792582 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/segments/migrations/0007_auto_20220119_0945.py | 1 | 3107 | # Generated by Django 2.2.24 on 2022-01-19 08:45
from django.db import migrations
from django.utils.text import slugify
def migrate_extra_fields_to_segments(apps, schema_editor):
Member = apps.get_model('members', 'Member')
SegmentType = apps.get_model('segments', 'SegmentType')
Segment = apps.get_model('segments', 'Segment')
CustomMemberFieldSettings = apps.get_model('members', 'CustomMemberFieldSettings')
MemberPlatformSettings = apps.get_model('members', 'MemberPlatformSettings')
CustomMemberField = apps.get_model('members', 'CustomMemberField')
if CustomMemberFieldSettings.objects.count() == 1:
MemberPlatformSettings.objects.update(
create_segments=True,
enable_segments=True
)
department, _ = SegmentType.objects.get_or_create(
name='Department',
slug='department',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='department').first()
if field and field.value:
segment, _ = Segment.objects.get_or_create(
segment_type=department,
slug=slugify(field.value),
defaults={
'alternate_names': [field.value],
'name': field.value
}
)
member.segments.add(segment)
if CustomMemberFieldSettings.objects.count() == 4:
city, _ = SegmentType.objects.get_or_create(
name='City',
slug='city',
)
country, _ = SegmentType.objects.get_or_create(
name='Country',
slug='country',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='country').first()
if field and field.value:
my_country, _ = Segment.objects.get_or_create(
segment_type=country,
slug=slugify(field.value),
defaults={
'alternate_names': [field.value],
'name': field.value
}
)
member.segments.add(my_country)
field = CustomMemberField.objects.filter(member=member, field__name='city').first()
if field and field.value:
my_city, _ = Segment.objects.get_or_create(
segment_type=city,
slug=slugify(field.value),
defaults={
'alternate_names': [field.value],
'name': field.value
}
)
member.segments.add(my_city)
class Migration(migrations.Migration):
dependencies = [
('segments', '0006_auto_20210914_1134'),
('segments', '0014_auto_20211210_1246')
]
operations = [
migrations.RunPython(
migrate_extra_fields_to_segments,
migrations.RunPython.noop
)
]
| bsd-3-clause | add9abf647744dc9a413b0b50da6d12e | 35.988095 | 101 | 0.54168 | 4.496382 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/formplayer_api/management/commands/prime_formplayer_restores.py | 1 | 6540 | import csv
import inspect
import sys
from argparse import RawTextHelpFormatter
from datetime import datetime
from dateutil.relativedelta import relativedelta
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CouchUser
from corehq.util.argparse_types import validate_range
from custom.covid.tasks import get_users_for_priming, get_prime_restore_user_params, prime_formplayer_db_for_user
class Command(BaseCommand):
help = inspect.cleandoc(
"""Call the Formplayer sync API for users from CSV or matching criteria.
Usage:
Redirect stdout to file to allow viewing progress bar:
%(prog)s [args] > output.csv
### With users in a CSV file ###
CSV Columns: "domain, username, as_user"
%(prog)s --from-csv path/to/users.csv
### Query DB for users ###
%(prog)s --domains a b c --last-synced-days 2 --min-cases 500 --limit 1000
Use "--dry-run" and "--dry-run-count" to gauge impact of command.
"""
)
def create_parser(self, *args, **kwargs):
# required to get nice output from `--help`
parser = super(Command, self).create_parser(*args, **kwargs)
parser.formatter_class = RawTextHelpFormatter
return parser
def add_arguments(self, parser):
parser.add_argument('--from-csv',
help='Path to CSV file. Columns "domain, username, as_user". When this is supplied '
'only users in this file will be synced.')
parser.add_argument('--domains', nargs='+', help='Match users in these domains.')
parser.add_argument('--last-synced-hours', type=int, action=validate_range(gt=0, lt=673), default=48,
help='Match users who have synced within the given window. '
'Defaults to %(default)s hours. Max = 673 (4 weeks).')
parser.add_argument('--not-synced-hours', type=int, action=validate_range(gt=-1, lt=169),
help='Exclude users who have synced within the given window. '
'Max = 168 (1 week).')
parser.add_argument('--min-cases', type=int, action=validate_range(gt=0),
help='Match users with this many cases or more.')
parser.add_argument('--limit', type=int, action=validate_range(gt=0),
help='Limit the number of users matched.')
parser.add_argument('--dry-run', action='store_true', help='Only print the list of users.')
parser.add_argument('--dry-run-count', action='store_true', help='Only print the count of matched users.')
parser.add_argument('--clear-user-data', action='store_true',
help='Clear user data prior to performing sync.')
def handle(self,
from_csv=None,
domains=None,
last_synced_hours=None,
not_synced_hours=None,
min_cases=None,
limit=None,
**options
):
dry_run = options['dry_run']
dry_run_count = options['dry_run_count']
clear_user_data = options['clear_user_data']
if from_csv:
users = _get_users_from_csv(from_csv)
if dry_run_count:
sys.stderr.write(f"\n{len(list(users))} users in CSV file '{from_csv}'\n")
return
for domain, request_user, as_username in users:
request_user_id = CouchUser.get_by_username(request_user).user_id
as_user_id = None
if as_username:
as_user_id = CouchUser.get_by_username(as_username).user_id
sys.stdout.write(f"{domain},{request_user},{request_user_id},{as_username},{as_user_id}\n")
if not dry_run:
prime_formplayer_db_for_user.delay(
domain, request_user_id, as_user_id, clear_data=clear_user_data
)
else:
domains = [domain.strip() for domain in domains if domain.strip()]
synced_since = datetime.utcnow() - relativedelta(hours=last_synced_hours)
not_synced_since = (
datetime.utcnow() - relativedelta(hours=not_synced_hours)
if not_synced_hours else None
)
if dry_run_count:
users = list(_get_user_rows(domains, synced_since, not_synced_since, min_cases, limit))
sys.stderr.write(f"\nMatched {len(users)} users for filters:\n")
sys.stderr.write(f"\tDomains: {domains or '---'}\n")
sys.stderr.write(f"\tSynced after: {synced_since}\n")
sys.stderr.write(f"\tNot Synced after: {not_synced_since}\n")
sys.stderr.write(f"\tMin cases: {min_cases or '---'}\n")
sys.stderr.write(f"\tLimit: {limit or '---'}\n")
return
users = _get_user_rows(domains, synced_since, not_synced_since, min_cases, limit)
for domain, request_user_id, as_user_id in users:
request_user, as_username = get_prime_restore_user_params(request_user_id, as_user_id)
sys.stdout.write(f"{domain},{request_user},{request_user_id},{as_username},{as_user_id}\n")
if not dry_run:
prime_formplayer_db_for_user.delay(
domain, request_user_id, as_user_id, clear_data=clear_user_data
)
def _get_users_from_csv(path):
with open(path, 'r') as file:
reader = csv.reader(file)
for row in reader:
if not row or row == ["domain", "username", "as_user"]: # skip header
continue
if len(row) != 3:
row_csv = ','.join(['' if f is None else f for f in row])
sys.stdout.write(f'{row_csv},ERROR,"Expected exactly 3 values in each row"\n')
continue
yield row
def _get_user_rows(domains, synced_since, not_synced_since=None, min_cases=None, limit=None):
remaining_limit = limit
for domain in domains:
if remaining_limit is not None and remaining_limit <= 0:
break
users = get_users_for_priming(domain, synced_since, not_synced_since, min_cases)
if remaining_limit:
users = users[:remaining_limit]
remaining_limit -= len(users)
for row in users:
yield (domain, *row)
| bsd-3-clause | 7fbcbe78fbd33f493f3e3b875ae54327 | 43.189189 | 114 | 0.569266 | 3.913824 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/cms/migrations/0004_projectsmapcontent.py | 1 | 1330 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-07 11:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
from fluent_contents.models.managers import ContentItemManager
class Migration(migrations.Migration):
dependencies = [
('fluent_contents', '0001_initial'),
('cms', '0003_merge_20161207_1037'),
]
operations = [
migrations.CreateModel(
name='ProjectsMapContent',
fields=[
('contentitem_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='fluent_contents.ContentItem')),
('title', models.CharField(blank=True, max_length=63, null=True)),
('sub_title', models.CharField(blank=True, max_length=100, null=True)),
],
options={
'db_table': 'contentitem_cms_projectsmapcontent',
'verbose_name': 'Projects Map',
},
bases=('fluent_contents.contentitem',),
managers=[
('objects', ContentItemManager()),
('base_objects', django.db.models.manager.Manager()),
],
),
]
| bsd-3-clause | 9479a2388945df888f7f58eca65c3192 | 34.945946 | 209 | 0.6 | 4.079755 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/domain/migrations/0013_accountconfirmationsettings_squashed_0016_alter_smsaccountconfirmationsettings_project_name.py | 1 | 1158 | # Generated by Django 3.2.13 on 2022-07-06 13:34
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
replaces = [('domain', '0013_accountconfirmationsettings'), ('domain', '0014_alter_accountconfirmationsettings_project_name'), ('domain', '0015_rename_accountconfirmationsettings_smsaccountconfirmationsettings'), ('domain', '0016_alter_smsaccountconfirmationsettings_project_name')]
dependencies = [
('domain', '0012_operatorcalllimitsettings'),
]
operations = [
migrations.CreateModel(
name='SMSAccountConfirmationSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('domain', models.CharField(db_index=True, max_length=256)),
('project_name', models.CharField(default='CommCare HQ', max_length=30)),
('confirmation_link_expiry_time', models.IntegerField(default=14, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(30)])),
],
),
]
| bsd-3-clause | cd18085d9bc8e5749388121a5d967423 | 45.32 | 286 | 0.677029 | 4.226277 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/funding_vitepay/tests/test_api.py | 1 | 2411 | import json
from django.urls import reverse
from mock import patch
from rest_framework import status
from bluebottle.funding.tests.factories import FundingFactory, DonorFactory
from bluebottle.funding_vitepay.models import VitepayPaymentProvider
from bluebottle.funding_vitepay.tests.factories import VitepayPaymentProviderFactory
from bluebottle.initiatives.tests.factories import InitiativeFactory
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient
class VitepayPaymentTestCase(BluebottleTestCase):
def setUp(self):
super(VitepayPaymentTestCase, self).setUp()
VitepayPaymentProvider.objects.all().delete()
VitepayPaymentProviderFactory.create()
self.client = JSONAPITestClient()
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory.create()
self.initiative.states.submit()
self.initiative.states.approve(save=True)
self.funding = FundingFactory.create(initiative=self.initiative)
self.donation = DonorFactory.create(activity=self.funding, user=self.user)
self.payment_url = reverse('vitepay-payment-list')
self.data = {
'data': {
'type': 'payments/vitepay-payments',
'attributes': {
},
'relationships': {
'donation': {
'data': {
'type': 'contributors/donations',
'id': self.donation.pk,
}
}
}
}
}
@patch('bluebottle.funding_vitepay.utils.requests.post',
return_value=type('obj', (object,),
{'status_code': 200, 'content': b'https://vitepay.com/some-path-to-pay'}))
def test_create_payment(self, vitepay_post):
response = self.client.post(self.payment_url, data=json.dumps(self.data), user=self.user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = json.loads(response.content)
self.assertEqual(data['data']['attributes']['status'], 'new')
self.assertEqual(data['data']['attributes']['payment-url'], 'https://vitepay.com/some-path-to-pay')
self.assertEqual(data['included'][0]['attributes']['status'], 'new')
| bsd-3-clause | 8c7088a44acbb3e41098fe9a0364a4b3 | 39.183333 | 107 | 0.637495 | 4.135506 | false | true | false | false |
dimagi/commcare-hq | corehq/util/management/commands/mark_email_as_bounced.py | 1 | 1710 | import datetime
from django.core.management.base import BaseCommand
from corehq.util.models import (
BouncedEmail,
PermanentBounceMeta,
BounceSubType,
)
class Command(BaseCommand):
help = "Force an email to be marked as Permanently Bounced and blocked " \
"from receiving any further emails from HQ"
def add_arguments(self, parser):
parser.add_argument('bounced_email', help="""
Emails to mark as bounced
- To mark multiple emails, separate with a ','
""")
parser.add_argument(
'--show-details',
action='store_true',
default=False,
help='Show extra details of bounced messages',
)
def handle(self, bounced_email, **options):
bounced_emails = bounced_email.split(',')
for email in bounced_emails:
self.mark_email_as_bounced(email)
def mark_email_as_bounced(self, email_string):
is_actively_blocked = (
len(BouncedEmail.get_hard_bounced_emails([email_string])) > 0
)
if is_actively_blocked:
self.stdout.write(
f"{email_string} is already blocked. "
f"Use check_bounced_email --show-details for more information."
)
return
bounced_email = BouncedEmail.objects.create(email=email_string)
PermanentBounceMeta.objects.create(
bounced_email=bounced_email,
timestamp=datetime.datetime.utcnow(),
sub_type=BounceSubType.SUPPRESSED,
reason="Manual suppression from management command."
)
self.stdout.write(f"Successfully marked {email_string} as bounced.")
| bsd-3-clause | a850ad73afaa0d4cb77d37739398893b | 31.884615 | 79 | 0.611111 | 3.949192 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/userreports/reports/filters/values.py | 1 | 16451 | import datetime
from django.urls import reverse
import sqlalchemy
from memoized import memoized
from sqlagg.filters import (
ANDFilter,
BasicFilter,
BetweenFilter,
EQFilter,
GTEFilter,
GTFilter,
INFilter,
ISNULLFilter,
LTEFilter,
LTFilter,
NOTEQFilter,
NOTNULLFilter,
ORFilter,
)
from dimagi.utils.dates import DateSpan
from corehq.apps.reports.daterange import (
get_all_daterange_choices,
get_daterange_start_end_dates,
)
from corehq.apps.reports.util import (
get_INFilter_bindparams,
get_INFilter_element_bindparam,
get_null_empty_value_bindparam)
# todo: if someone wants to name an actual choice any of these values, it will break
from corehq.apps.userreports.datatypes import (
DATA_TYPE_DATE,
DATA_TYPE_DATETIME
)
SHOW_ALL_CHOICE = '_all'
NONE_CHOICE = "\u2400"
CHOICE_DELIMITER = "\u001f"
class FilterValue(object):
def __init__(self, filter, value):
"""
args:
filter: should be a raw filter spec, the filter dict
defined in the ReportConfiguraion
value: should be the filter value from the user request
"""
self.filter = filter
self.value = value
def to_sql_filter(self):
raise NotImplementedError()
def to_sql_values(self):
raise NotImplementedError()
class DateFilterValue(FilterValue):
def __init__(self, filter, value):
assert filter['type'] == 'date'
assert isinstance(value, DateSpan) or value is None
super(DateFilterValue, self).__init__(filter, value)
def to_sql_filter(self):
if self.value is None:
return None
if self.value.startdate is None:
return LTFilter(
self.filter['field'],
'%s_enddate' % self.filter['slug'])
elif self.value.enddate is None:
return GTFilter(
self.filter['field'],
'%s_startdate' % self.filter['slug']
)
else:
return BetweenFilter(
self.filter['field'],
'%s_startdate' % self.filter['slug'],
'%s_enddate' % self.filter['slug']
)
def to_sql_values(self):
if self.value is None:
return {}
startdate = self.value.startdate
enddate = self.value.enddate
if self.value.inclusive:
enddate = self._offset_enddate(enddate)
if self.filter.get('compare_as_string'):
startdate = startdate.isoformat() if startdate is not None else None
enddate = enddate.isoformat() if enddate is not None else None
sql_values = {}
if startdate is not None:
sql_values.update({'%s_startdate' % self.filter['slug']: startdate})
if enddate is not None:
sql_values.update({'%s_enddate' % self.filter['slug']: enddate})
return sql_values
def _offset_enddate(self, enddate):
# offset enddate for SQL query
if enddate:
enddate = datetime.datetime.combine(enddate, datetime.datetime.max.time())
return enddate
class QuarterFilterValue(FilterValue):
@property
def startdate_slug(self):
return '%s_startdate' % self.filter['slug']
@property
def enddate_slug(self):
return '%s_enddate' % self.filter['slug']
def to_sql_filter(self):
return ANDFilter([
GTEFilter(self.filter['field'], self.startdate_slug),
LTFilter(self.filter['field'], self.enddate_slug)
])
def to_sql_values(self):
return {
self.startdate_slug: self.value.computed_startdate,
self.enddate_slug: self.value.computed_enddate
}
class IsDistinctFromFilter(BasicFilter):
def build_expression(self):
return sqlalchemy.column(self.column_name).is_distinct_from(sqlalchemy.bindparam(self.parameter))
class NumericFilterValue(FilterValue):
operators_to_filters = {
'=': EQFilter,
'!=': NOTEQFilter,
'distinct from': IsDistinctFromFilter,
'>=': GTEFilter,
'>': GTFilter,
'<=': LTEFilter,
'<': LTFilter,
}
def __init__(self, filter, value):
assert filter['type'] == "numeric"
assert (isinstance(value, dict) and "operator" in value and "operand" in value) or value is None
if value:
assert value['operator'] in self.operators_to_filters
assert isinstance(value['operand'], int) or isinstance(value['operand'], float)
super(NumericFilterValue, self).__init__(filter, value)
def to_sql_filter(self):
if self.value is None:
return None
filter_class = self.operators_to_filters[self.value['operator']]
return filter_class(self.filter['field'], self.filter['slug'])
def to_sql_values(self):
if self.value is None:
return {}
return {
self.filter['slug']: self.value["operand"],
}
class BasicBetweenFilter(BasicFilter):
"""
BasicBetweenFilter implements a BetweenFilter that accepts the
same constructor arguments as INFilter.
PreFilterValue uses this to select the filter using
PreFilterValue.value['operator'] and instantiate either filter the
same way.
"""
def build_expression(self):
assert len(self.parameter) == 2
return sqlalchemy.column(self.column_name).between(
sqlalchemy.bindparam(self.parameter[0]), sqlalchemy.bindparam(self.parameter[1])
)
class PreFilterValue(FilterValue):
# All dynamic date operators use BasicBetweenFilter
dyn_date_operators = [c.slug for c in get_all_daterange_choices()]
null_operator_filters = {
'=': ISNULLFilter,
'!=': NOTNULLFilter,
'is': ISNULLFilter,
'is not': NOTNULLFilter,
}
array_operator_filters = {
'in': INFilter,
'between': BasicBetweenFilter,
}
scalar_operator_filters = NumericFilterValue.operators_to_filters
def _is_dyn_date(self):
return self.value.get('operator') in self.dyn_date_operators
def _is_list(self):
"""
Returns true if operand should be treated like an array when building
the query.
"""
return isinstance(self.value['operand'], list)
def _has_empty_value(self):
"""
Returns true if operand has no value.
"""
return self.value['operand'] == '' or self.value['operand'] is None
def _is_empty(self):
"""
Returns true if the value should be treated as a filter to show only empty data
"""
operator = self.value.get('operator') or '='
return self._has_empty_value() and operator == '='
def _is_exists(self):
"""
Returns true if the value should be treated as a filter to show non-empty data
"""
return self._has_empty_value() and self.value.get('operator') == '!='
@property
def _array_filter(self):
operator = self.value.get('operator') or 'in'
try:
return self.array_operator_filters[operator]
except KeyError:
raise TypeError('Array value does not support "{}" operator'.format(operator))
@property
def _scalar_filter(self):
operator = self.value.get('operator') or '='
try:
return self.scalar_operator_filters[operator]
except KeyError:
raise TypeError('Scalar value does not support "{}" operator'.format(operator))
def to_sql_filter(self):
if self._is_dyn_date():
return BasicBetweenFilter(
self.filter['field'],
get_INFilter_bindparams(self.filter['slug'], ['start_date', 'end_date'])
)
elif self._is_empty():
if self.filter.get('datatype') in [DATA_TYPE_DATE, DATA_TYPE_DATETIME]:
return ISNULLFilter(self.filter['field'])
else:
return ORFilter([
EQFilter(self.filter['field'], self.filter['slug']),
ISNULLFilter(self.filter['field']),
])
elif self._is_exists():
if self.filter.get('datatype') in [DATA_TYPE_DATE, DATA_TYPE_DATETIME]:
return NOTNULLFilter(self.filter['field'])
else:
# this resolves to != '', which also filters out null data in postgres
return NOTEQFilter(self.filter['field'], self.filter['slug'])
elif self._is_list():
return self._array_filter(
self.filter['field'],
get_INFilter_bindparams(self.filter['slug'], self.value['operand'])
)
else:
return self._scalar_filter(self.filter['field'], self.filter['slug'])
def to_sql_values(self):
if self._is_dyn_date():
start_date, end_date = get_daterange_start_end_dates(self.value['operator'], *self.value['operand'])
return {
get_INFilter_element_bindparam(self.filter['slug'], i): str(v)
for i, v in enumerate([start_date, end_date])
}
elif self._is_empty() or self._is_exists():
if self.filter.get('datatype') in [DATA_TYPE_DATE, DATA_TYPE_DATETIME]:
# Both == '' and != '' do not work for dates in postgres so the expression should only be for NULL
# checks that get added later. Hence don't return any comparison for value here
return {}
else:
return {
self.filter['slug']: '',
}
elif self._is_list():
# Array params work like IN bind params
return {
get_INFilter_element_bindparam(self.filter['slug'], i): v
for i, v in enumerate(self.value['operand'])
}
else:
return {self.filter['slug']: self.value['operand']}
class ChoiceListFilterValue(FilterValue):
ALLOWED_TYPES = ('choice_list', 'dynamic_choice_list', 'multi_field_dynamic_choice_list')
def __init__(self, filter, value):
assert filter['type'] in self.ALLOWED_TYPES
if not isinstance(value, list):
# if in single selection mode just force it to a list
value = [value]
super(ChoiceListFilterValue, self).__init__(filter, value)
@property
def show_all(self):
return SHOW_ALL_CHOICE in [choice.value for choice in self.value]
@property
def is_null(self):
return NONE_CHOICE in [choice.value for choice in self.value]
def _get_value_without_nulls(self):
return [choice for choice in self.value if choice.value != NONE_CHOICE]
@property
def _ancestor_filter(self):
"""
Returns an instance of AncestorSQLParams per the spec, returns None
if it is not applicable
"""
ancestor_expression = self.filter.get('ancestor_expression')
if not (self.show_all and self.show_none) and ancestor_expression:
if len(self.value) > 1:
# if multiple locations are passed, for partition to work
# multiple ancestor locations should be passed, but that
# would require multiple DB calls. So instead don't pass
# any ancestors at all
return None
location = self.value[0].value
params = AncestorSQLParams(self.filter['ancestor_expression'], location)
if params.sql_value():
return params
def to_sql_filter(self):
if self.show_all:
return None
sql_filters = []
non_null_values = self._get_value_without_nulls()
if non_null_values:
in_filter = INFilter(
self.filter['field'],
get_INFilter_bindparams(self.filter['slug'], non_null_values)
)
if self._ancestor_filter:
sql_filters.append(ANDFilter([
self._ancestor_filter.sql_filter(),
in_filter,
]))
else:
sql_filters.append(in_filter)
elif self._ancestor_filter:
sql_filters.append(self._ancestor_filter.sql_filter())
if self.is_null:
# combine null and blank fields into a single filter
sql_filters.append(
ORFilter([
ISNULLFilter(self.filter['field']),
EQFilter(self.filter['field'], get_null_empty_value_bindparam(self.filter['slug'])),
])
)
if len(sql_filters) > 1:
return ORFilter(
sql_filters
)
else:
return sql_filters[0]
def to_sql_values(self):
if self.show_all:
return {}
values = {
get_INFilter_element_bindparam(self.filter['slug'], i): val.value
for i, val in enumerate(self._get_value_without_nulls())
}
if self.is_null:
values[get_null_empty_value_bindparam(self.filter['slug'])] = ''
if self._ancestor_filter:
values.update(self._ancestor_filter.sql_value())
return values
class MultiFieldChoiceListFilterValue(ChoiceListFilterValue):
ALLOWED_TYPES = ('multi_field_dynamic_choice_list', )
def to_sql_filter(self):
if self.show_all:
return None
if self.is_null:
return ORFilter([ISNULLFilter(field) for field in self.filter.get('fields')])
return ORFilter([
INFilter(
field,
get_INFilter_bindparams(self.filter['slug'], self.value)
) for field in self.filter.get('fields')
])
class LocationDrilldownFilterValue(FilterValue):
SHOW_NONE = "show_none"
SHOW_ALL = "show_all"
@property
def show_all(self):
return self.value == self.SHOW_ALL
@property
def show_none(self):
return self.value == self.SHOW_NONE
@property
def _ancestor_filter(self):
ancestor_expression = self.filter.get('ancestor_expression')
if (not (self.show_all and self.show_none) and
ancestor_expression and len(self.value) == 1):
params = AncestorSQLParams(self.filter['ancestor_expression'], self.value[0])
if params.sql_value():
return params
def to_sql_filter(self):
if self.show_all:
return None
in_filter = INFilter(
self.filter['field'],
get_INFilter_bindparams(self.filter['slug'], [None] if self.show_none else self.value)
)
if self._ancestor_filter:
return ANDFilter(
[self._ancestor_filter.sql_filter(), in_filter]
)
else:
return in_filter
def to_sql_values(self):
if self.show_all:
return {}
values = {
get_INFilter_element_bindparam(self.filter['slug'], i): val
for i, val in enumerate([None] if self.show_none else self.value)
}
if self._ancestor_filter:
values.update(self._ancestor_filter.sql_value())
return values
class AncestorSQLParams(object):
def __init__(self, ancestor_expression, location_id):
self.ancestor_expression = ancestor_expression
self.location_id = location_id
def sql_filter(self):
return EQFilter(self.ancestor_expression['field'], self.ancestor_expression['field'])
@memoized
def sql_value(self):
# all locations in self.value will have same ancestor, so just pick first one to query
from corehq.apps.locations.models import SQLLocation
location = SQLLocation.by_location_id(self.location_id)
if location:
ancestor = location.get_ancestor_of_type(
self.ancestor_expression['location_type']
)
else:
return None
if ancestor:
return {
self.ancestor_expression['field']: ancestor.location_id
}
else:
return None
def dynamic_choice_list_url(domain, report, filter):
# filter must be an instance of DynamicChoiceListFilter
return reverse('choice_list_api', args=[domain, report.spec._id, filter.name])
| bsd-3-clause | 1bb16e0728cc29afbb9c015a77043a51 | 31.770916 | 114 | 0.583673 | 4.142785 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/funding_pledge/admin.py | 1 | 1402 | from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from bluebottle.funding.admin import PaymentChildAdmin, PaymentProviderChildAdmin, BankAccountChildAdmin
from bluebottle.funding.models import PaymentProvider, Payment
from bluebottle.funding_pledge.models import PledgePayment, PledgePaymentProvider, PledgeBankAccount
@admin.register(PledgePayment)
class PledgePaymentAdmin(PaymentChildAdmin):
base_model = Payment
fields = PaymentChildAdmin.fields
@admin.register(PledgePaymentProvider)
class PledgePaymentProviderAdmin(PaymentProviderChildAdmin):
base_model = PaymentProvider
readonly_fields = ('settings',)
fields = readonly_fields
def settings(self, obj):
return _('No settings are required for this payment provider')
@admin.register(PledgeBankAccount)
class PledgeBankAccountAdmin(BankAccountChildAdmin):
model = PledgeBankAccount
fields = (
'account_holder_name',
'account_holder_address',
'account_holder_postal_code',
'account_holder_city',
'account_holder_country',
'account_number',
'account_details',
'account_bank_country'
) + BankAccountChildAdmin.fields
list_filter = ['reviewed']
search_fields = ['account_holder_name', 'account_number']
list_display = ['created', 'account_holder_name', 'account_number', 'reviewed']
| bsd-3-clause | 9eed3e7c7f23c43abf117bd0d76d0e8b | 33.195122 | 104 | 0.740371 | 4.087464 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/funding/migrations/0023_add_permissions.py | 1 | 1414 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-08-28 07:04
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.utils.utils import update_group_permissions
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
def add_group_permissions(apps, schema_editor):
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
group_perms = {
'Staff': {
'perms': (
'add_funding', 'change_funding', 'delete_funding',
)
},
'Anonymous': {
'perms': ('api_read_funding', ) if not properties.CLOSED_SITE else ()
},
'Authenticated': {
'perms': (
'api_read_funding',
'api_add_own_funding',
'api_change_own_funding',
'api_delete_own_funding',
)
}
}
update_group_permissions('funding', group_perms, apps)
class Migration(migrations.Migration):
dependencies = [
('funding', '0022_auto_20190804_1022'),
]
operations = [
migrations.RunPython(
add_group_permissions,
migrations.RunPython.noop
)
]
| bsd-3-clause | 08d072d9020883752b2ce92a84b4851a | 27.28 | 85 | 0.557284 | 4.195846 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/smsbillables/tests/test_billable_creation.py | 1 | 2946 | from decimal import Decimal
from django.test import TestCase
from corehq.apps.sms.api import create_billable_for_sms
from corehq.apps.sms.models import OUTGOING
from corehq.apps.smsbillables.models import SmsBillable, SmsGatewayFee
from corehq.apps.smsbillables.tests.utils import create_sms, short_text, long_text
from corehq.messaging.smsbackends.test.models import SQLTestSMSBackend
class TestBillableCreation(TestCase):
@classmethod
def setUpClass(cls):
super(TestBillableCreation, cls).setUpClass()
cls.domain = 'sms_test_domain'
cls.backend = SQLTestSMSBackend(
name="TEST",
is_global=True,
domain=cls.domain,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
cls.backend.save()
@classmethod
def tearDownClass(cls):
cls.backend.delete()
super(TestBillableCreation, cls).tearDownClass()
def setUp(self):
super(TestBillableCreation, self).setUp()
self.billable = self.gateway_fee = self.msg = None
def tearDown(self):
if self.billable is not None:
self.billable.delete()
if self.gateway_fee is not None:
self.gateway_fee.delete()
if self.msg is not None:
self.msg.delete()
super(TestBillableCreation, self).tearDown()
def test_creation(self):
self.msg = create_sms(self.domain, self.backend, '+12223334444', OUTGOING, short_text)
create_billable_for_sms(self.msg, delay=False)
sms_billables = SmsBillable.objects.filter(
domain=self.domain,
log_id=self.msg.couch_id
)
self.assertEqual(sms_billables.count(), 1)
self.billable = sms_billables[0]
self.assertEqual(self.billable.multipart_count, 1)
def test_long_creation(self):
self.msg = create_sms(self.domain, self.backend, '+12223334444', OUTGOING, long_text)
create_billable_for_sms(self.msg, delay=False)
sms_billables = SmsBillable.objects.filter(
domain=self.domain,
log_id=self.msg.couch_id
)
self.assertEqual(sms_billables.count(), 1)
self.billable = sms_billables[0]
self.assertEqual(self.billable.multipart_count, 2)
def test_gateway_fee_after_creation(self):
expected_fee = Decimal('0.005')
self.msg = create_sms(self.domain, self.backend, '+12223334444', OUTGOING, short_text)
self.gateway_fee = SmsGatewayFee.create_new(self.backend.hq_api_id, self.msg.direction, expected_fee)
create_billable_for_sms(self.msg, delay=False)
sms_billables = SmsBillable.objects.filter(
domain=self.domain,
log_id=self.msg.couch_id
)
self.assertEqual(sms_billables.count(), 1)
self.billable = sms_billables[0]
actual_fee = self.billable.gateway_fee.amount
self.assertEqual(expected_fee, actual_fee)
| bsd-3-clause | cf943653e202fb1ce5276fbd0b30ab0d | 33.658824 | 109 | 0.65241 | 3.511323 | false | true | false | false |
dimagi/commcare-hq | corehq/apps/domain/shortcuts.py | 1 | 1655 | """
Shortcuts for working with domains and users.
"""
from django.contrib.auth.models import User
from corehq.apps.domain.models import Domain
def create_domain(name, active=True):
"""Create domain without secure submissions for tests"""
return Domain.get_or_create_with_name(name=name, is_active=active,
secure_submissions=False)
def create_user(username, password, is_staff=False, is_superuser=False,
is_active=True, password_hashed=False, **kwargs):
user = User()
user.username = username.lower()
for key, val in kwargs.items():
if key and val:
setattr(user, key, val)
user.is_staff = is_staff
user.is_active = is_active
user.is_superuser = is_superuser
if not password_hashed:
user.set_password(password)
else:
user.password = password
# at this stage in the process there is no couch user so it's pointless
# trying to update it.
user.DO_NOT_SAVE_COUCH_USER = True
user.save()
return user
def publish_domain_saved(domain_obj):
from corehq.apps.change_feed import topics
from corehq.apps.change_feed.producer import producer
producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain_obj))
def _domain_to_change_meta(domain_obj):
from corehq.apps.change_feed import data_sources
from corehq.apps.change_feed.document_types import change_meta_from_doc
domain_doc = domain_obj.to_json()
return change_meta_from_doc(
document=domain_doc,
data_source_type=data_sources.SOURCE_COUCH,
data_source_name=Domain.get_db().dbname,
)
| bsd-3-clause | 31627a9b4deab1c30c0fa6d8a189d6d1 | 30.226415 | 75 | 0.673112 | 3.653422 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/time_based/migrations/0047_migrate_to_slots.py | 1 | 1879 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2021-01-06 14:09
from __future__ import unicode_literals
from django.db import migrations
from django.utils.timezone import now
def map_status(status):
mapping = {
'draft': 'draft',
'submitted': 'draft',
'needs_work': 'draft',
'rejected': 'draft',
'deleted': 'draft',
'cancelled': 'draft',
'expired': 'finished',
'open': 'open',
'succeeded': 'finished',
'full': 'full',
'running': 'running'
}
return getattr(mapping, status, 'draft')
def migrate_to_slots(apps, schema_editor):
DateActivity = apps.get_model('time_based', 'DateActivity')
DateActivitySlot = apps.get_model('time_based', 'DateActivitySlot')
for activity in DateActivity.objects.all():
status = map_status(activity.status)
slot = DateActivitySlot(
status=status,
activity_id=activity.id,
start=activity.start,
duration=activity.duration,
is_online=activity.is_online,
online_meeting_url=activity.online_meeting_url,
location=activity.location,
location_hint=activity.location_hint
)
if slot.start and slot.duration and slot.start + slot.duration < now():
slot.status = 'finished'
if slot.status == 'draft' \
and slot.start \
and slot.duration \
and (slot.is_online or slot.location):
slot.status = 'open'
slot.execute_triggers(send_messages=False)
slot.save()
class Migration(migrations.Migration):
dependencies = [
('time_based', '0046_auto_20210106_1507'),
]
operations = [
migrations.RunPython(
migrate_to_slots,
migrations.RunPython.noop
)
]
| bsd-3-clause | 9230e315723cba7b36688f45cbff49c7 | 27.044776 | 79 | 0.57637 | 4.014957 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/initiatives/migrations/0001_initial.py | 1 | 3393 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-03-29 10:01
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('bb_projects', '0015_auto_20190329_1101'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('categories', '0008_authenticated-permissions'),
('files', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Initiative',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('review_status', models.CharField(choices=[(b'created', 'created'), (b'submitted', 'submitted'), (b'needs_work', 'needs work'), (b'accepted', 'accepted'), (b'cancelled', 'cancelled'), (b'rejected', 'rejected')], default=b'created', max_length=50)),
('title', models.CharField(max_length=255, verbose_name='title')),
('slug', models.SlugField(max_length=100, verbose_name='slug')),
('pitch', models.TextField(blank=True, help_text='Pitch your smart idea in one sentence', verbose_name='pitch')),
('story', models.TextField(blank=True, verbose_name='story')),
('video_url', models.URLField(blank=True, default=b'', help_text="Do you have a video pitch or a short movie that explains your initiative? Cool! We can't wait to see it! You can paste the link to YouTube or Vimeo video here", max_length=100, null=True, verbose_name='video')),
('place', models.CharField(blank=True, help_text='Geographical impact location', max_length=200, null=True)),
('categories', models.ManyToManyField(blank=True, to='categories.Category')),
('image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='files.Image')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='own_initiative', to=settings.AUTH_USER_MODEL, verbose_name='owner')),
('reviewer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='review_initiative', to=settings.AUTH_USER_MODEL, verbose_name='reviewer')),
('theme', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='bb_projects.ProjectTheme')),
],
options={
'verbose_name': 'Initiative',
'verbose_name_plural': 'Initiatives',
'permissions': (('api_read_initiative', 'Can view initiative through the API'), ('api_add_initiative', 'Can add initiative through the API'), ('api_change_initiative', 'Can change initiative through the API'), ('api_delete_initiative', 'Can delete initiative through the API'), ('api_read_own_initiative', 'Can view own initiative through the API'), ('api_add_own_initiative', 'Can add own initiative through the API'), ('api_change_own_initiative', 'Can change own initiative through the API'), ('api_change_own_running_initiative', 'Can change own initiative through the API'), ('api_delete_own_initiative', 'Can delete own initiative through the API')),
},
),
]
| bsd-3-clause | 2c139e1f861402a23437d8e72ee08504 | 74.4 | 672 | 0.652815 | 3.908986 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/users/views/__init__.py | 1 | 54332 | import json
from collections import defaultdict
from datetime import datetime
from django.conf import settings
import langcodes
import six.moves.urllib.error
import six.moves.urllib.parse
import six.moves.urllib.request
from couchdbkit.exceptions import ResourceNotFound
from crispy_forms.utils import render_crispy_form
from corehq.apps.registry.utils import get_data_registry_dropdown_options
from corehq.apps.reports.models import TableauVisualization
from corehq.apps.sso.models import IdentityProvider
from corehq.apps.sso.utils.user_helpers import get_email_domain_from_username
from django.contrib import messages
from django.core.exceptions import ValidationError
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseBadRequest,
)
from django.http.response import HttpResponseServerError
from django.shortcuts import render
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _, ngettext, gettext_lazy, gettext_noop
from corehq.apps.users.analytics import get_role_user_count
from dimagi.utils.couch import CriticalSection
from soil.exceptions import TaskFailedError
from soil.util import expose_cached_download, get_download_context
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.http import require_GET, require_POST
from django_digest.decorators import httpdigest
from django_otp.plugins.otp_static.models import StaticToken
from django_prbac.utils import has_privilege
from memoized import memoized
from corehq import privileges, toggles
from corehq.apps.accounting.decorators import always_allow_project_access, requires_privilege_with_fallback
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.analytics.tasks import (
HUBSPOT_INVITATION_SENT_FORM,
send_hubspot_form,
track_workflow,
)
from corehq.apps.app_manager.dbaccessors import get_app_languages
from corehq.apps.cloudcare.esaccessors import login_as_user_filter
from corehq.apps.custom_data_fields.models import PROFILE_SLUG
from corehq.apps.domain.decorators import (
domain_admin_required,
login_and_domain_required,
require_superuser,
)
from corehq.apps.domain.forms import clean_password
from corehq.apps.domain.models import Domain
from corehq.apps.domain.views.base import BaseDomainView
from corehq.apps.enterprise.models import EnterprisePermissions
from corehq.apps.es import UserES, queries
from corehq.apps.hqwebapp.crispy import make_form_readonly
from corehq.apps.locations.permissions import (
location_safe,
user_can_access_other_user,
)
from corehq.apps.registration.forms import (
AdminInvitesUserForm,
)
from corehq.apps.reports.util import get_possible_reports
from corehq.apps.sms.mixin import BadSMSConfigException
from corehq.apps.sms.verify import (
VERIFICATION__ALREADY_IN_USE,
VERIFICATION__ALREADY_VERIFIED,
VERIFICATION__RESENT_PENDING,
VERIFICATION__WORKFLOW_STARTED,
initiate_sms_verification_workflow,
)
from corehq.apps.translations.models import SMSTranslations
from corehq.apps.userreports.util import has_report_builder_access
from corehq.apps.users.audit.change_messages import UserChangeMessage
from corehq.apps.users.decorators import (
can_use_filtered_user_download,
require_can_edit_or_view_web_users,
require_can_edit_web_users,
require_can_view_roles,
require_permission_to_edit_user,
)
from corehq.apps.users.exceptions import MissingRoleException, InvalidRequestException
from corehq.apps.users.forms import (
BaseUserInfoForm,
CommtrackUserForm,
SetUserPasswordForm,
UpdateUserRoleForm,
)
from corehq.apps.users.landing_pages import get_allowed_landing_pages, validate_landing_page
from corehq.apps.users.models import (
CommCareUser,
CouchUser,
DomainMembershipError,
DomainRemovalRecord,
DomainRequest,
Invitation,
StaticRole,
WebUser,
HqPermissions,
UserRole,
)
from corehq.apps.users.util import log_user_change
from corehq.apps.users.views.utils import get_editable_role_choices, BulkUploadResponseWrapper
from corehq.apps.user_importer.importer import UserUploadError
from corehq.apps.user_importer.models import UserUploadRecord
from corehq.apps.user_importer.tasks import import_users_and_groups, parallel_user_import
from corehq.const import USER_CHANGE_VIA_WEB
from corehq.pillows.utils import WEB_USER_TYPE
from corehq.toggles import PARALLEL_USER_IMPORTS
from corehq.util.couch import get_document_or_404
from corehq.util.view_utils import json_error
from corehq.util.workbook_json.excel import (
WorkbookJSONError,
WorksheetNotFound,
get_workbook,
)
def _users_context(request, domain):
couch_user = request.couch_user
web_users = WebUser.by_domain(domain)
for user in [couch_user] + list(web_users):
user.current_domain = domain
return {
'web_users': web_users,
'domain': domain,
'couch_user': couch_user,
}
class BaseUserSettingsView(BaseDomainView):
section_name = gettext_noop("Users")
@property
@memoized
def section_url(self):
return reverse(DefaultProjectUserSettingsView.urlname, args=[self.domain])
@property
@memoized
def couch_user(self):
user = self.request.couch_user
if user:
user.current_domain = self.domain
return user
@property
def main_context(self):
context = super(BaseUserSettingsView, self).main_context
context.update({
'couch_user': self.couch_user,
})
return context
@method_decorator(always_allow_project_access, name='dispatch')
@location_safe
class DefaultProjectUserSettingsView(BaseUserSettingsView):
urlname = "users_default"
@property
@memoized
def redirect(self):
redirect = None
has_project_access = has_privilege(self.request, privileges.PROJECT_ACCESS)
user = CouchUser.get_by_user_id(self.couch_user._id)
if user:
if ((user.has_permission(self.domain, 'edit_commcare_users')
or user.has_permission(self.domain, 'view_commcare_users'))
and has_project_access):
from corehq.apps.users.views.mobile import MobileWorkerListView
redirect = reverse(
MobileWorkerListView.urlname,
args=[self.domain]
)
elif ((user.has_permission(self.domain, 'edit_groups')
or user.has_permission(self.domain, 'view_groups'))
and has_project_access):
from corehq.apps.users.views.mobile import GroupsListView
redirect = reverse(
GroupsListView.urlname,
args=[self.domain]
)
elif (user.has_permission(self.domain, 'edit_web_users')
or user.has_permission(self.domain, 'view_web_users')):
redirect = reverse(
ListWebUsersView.urlname,
args=[self.domain]
)
elif (user.has_permission(self.domain, 'view_roles')
and has_project_access):
from corehq.apps.users.views import ListRolesView
redirect = reverse(
ListRolesView.urlname,
args=[self.domain]
)
elif ((user.has_permission(self.domain, 'edit_locations')
or user.has_permission(self.domain, 'view_locations'))
and has_project_access):
from corehq.apps.locations.views import LocationsListView
redirect = reverse(
LocationsListView.urlname,
args=[self.domain]
)
return redirect
def get(self, request, *args, **kwargs):
if not self.redirect:
raise Http404()
return HttpResponseRedirect(self.redirect)
class BaseEditUserView(BaseUserSettingsView):
@property
@memoized
def page_url(self):
if self.urlname:
return reverse(self.urlname, args=[self.domain, self.editable_user_id])
@property
def parent_pages(self):
return [{
'title': ListWebUsersView.page_title,
'url': reverse(ListWebUsersView.urlname, args=[self.domain]),
}]
@property
def editable_user_id(self):
return self.kwargs.get('couch_user_id')
@property
@memoized
def editable_user(self):
try:
return get_document_or_404(WebUser, self.domain, self.editable_user_id)
except (ResourceNotFound, CouchUser.AccountTypeError):
raise Http404()
@property
def existing_role(self):
try:
role = self.editable_user.get_role(self.domain)
except DomainMembershipError:
raise Http404()
if role is None:
if isinstance(self.editable_user, WebUser):
raise MissingRoleException()
return None
else:
return role.get_qualified_id()
@property
@memoized
def editable_role_choices(self):
return get_editable_role_choices(self.domain, self.request.couch_user, allow_admin_role=False)
@property
def can_change_user_roles(self):
return (
bool(self.editable_role_choices) and
self.request.couch_user.user_id != self.editable_user_id and
(
self.request.couch_user.is_domain_admin(self.domain) or
not self.existing_role or
self.existing_role in [choice[0] for choice in self.editable_role_choices]
)
)
def form_user_update(self):
raise NotImplementedError()
@property
def main_context(self):
context = super(BaseEditUserView, self).main_context
context.update({
'couch_user': self.editable_user,
'form_user_update': self.form_user_update,
'phonenumbers': self.editable_user.phone_numbers_extended(self.request.couch_user),
})
return context
@property
def backup_token(self):
if Domain.get_by_name(self.request.domain).two_factor_auth:
with CriticalSection([f"backup-token-{self.editable_user._id}"]):
device = (self.editable_user.get_django_user()
.staticdevice_set
.get_or_create(name='backup')[0])
token = device.token_set.first()
if token:
return device.token_set.first().token
else:
return device.token_set.create(token=StaticToken.random_token()).token
return None
@property
@memoized
def commtrack_form(self):
if self.request.method == "POST" and self.request.POST['form_type'] == "commtrack":
return CommtrackUserForm(self.request.POST, request=self.request, domain=self.domain)
user_domain_membership = self.editable_user.get_domain_membership(self.domain)
return CommtrackUserForm(
domain=self.domain,
request=self.request,
initial={
'primary_location': user_domain_membership.location_id,
'program_id': user_domain_membership.program_id,
'assigned_locations': user_domain_membership.assigned_location_ids,
},
)
def update_user(self):
if self.form_user_update.is_valid():
return self.form_user_update.update_user()
def post(self, request, *args, **kwargs):
saved = False
if self.request.POST['form_type'] == "commtrack":
if self.commtrack_form.is_valid():
self.commtrack_form.save(self.editable_user)
saved = True
elif self.request.POST['form_type'] == "update-user":
if self.update_user():
messages.success(self.request, _('Changes saved for user "%s"') % self.editable_user.raw_username)
saved = True
if saved:
return HttpResponseRedirect(self.page_url)
else:
return self.get(request, *args, **kwargs)
class EditWebUserView(BaseEditUserView):
template_name = "users/edit_web_user.html"
urlname = "user_account"
page_title = gettext_noop("Edit Web User")
@property
def page_name(self):
if self.request.is_view_only:
return _("Edit Web User (View Only)")
return self.page_title
@property
@memoized
def form_user_update(self):
if self.request.method == "POST" and self.request.POST['form_type'] == "update-user":
data = self.request.POST
else:
data = None
form = UpdateUserRoleForm(data=data, domain=self.domain, existing_user=self.editable_user,
request=self.request)
if self.can_change_user_roles:
try:
existing_role = self.existing_role
except MissingRoleException:
existing_role = None
messages.error(self.request, _("""
This user has no role. Please assign this user a role and save.
"""))
form.load_roles(current_role=existing_role, role_choices=self.user_role_choices)
else:
del form.fields['role']
return form
@property
def user_role_choices(self):
role_choices = get_editable_role_choices(self.domain, self.request.couch_user, allow_admin_role=True)
try:
self.existing_role
except MissingRoleException:
role_choices = [('none', _('(none)'))] + role_choices
return role_choices
@property
@memoized
def can_grant_superuser_access(self):
return self.request.couch_user.is_superuser and toggles.SUPPORT.enabled(self.request.couch_user.username)
@property
def page_context(self):
ctx = {
'form_uneditable': BaseUserInfoForm(),
'can_edit_role': self.can_change_user_roles,
}
if self.request.is_view_only:
make_form_readonly(self.commtrack_form)
if (self.request.project.commtrack_enabled or
self.request.project.uses_locations):
ctx.update({'update_form': self.commtrack_form})
if self.can_grant_superuser_access:
ctx.update({'update_permissions': True})
ctx.update({'token': self.backup_token})
idp = IdentityProvider.get_active_identity_provider_by_username(
self.editable_user.username
)
ctx.update({
'has_untrusted_identity_provider': (
not IdentityProvider.does_domain_trust_user(
self.domain,
self.editable_user.username
)
),
'idp_name': idp.name if idp else '',
})
return ctx
@method_decorator(always_allow_project_access)
@method_decorator(require_can_edit_or_view_web_users)
def dispatch(self, request, *args, **kwargs):
return super(EditWebUserView, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
return super(EditWebUserView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if self.request.is_view_only:
return self.get(request, *args, **kwargs)
if self.request.POST['form_type'] == 'trust-identity-provider':
idp = IdentityProvider.get_active_identity_provider_by_username(
self.editable_user.username
)
if idp:
idp.create_trust_with_domain(
self.domain,
self.request.user.username
)
messages.success(
self.request,
_('Your project space "{domain}" now trusts the SSO '
'Identity Provider "{idp_name}".').format(
domain=self.domain,
idp_name=idp.name,
)
)
return super(EditWebUserView, self).post(request, *args, **kwargs)
def get_domain_languages(domain):
app_languages = get_app_languages(domain)
translations = SMSTranslations.objects.filter(domain=domain).first()
sms_languages = translations.langs if translations else []
domain_languages = []
for lang_code in app_languages.union(sms_languages):
name = langcodes.get_name(lang_code)
label = "{} ({})".format(lang_code, name) if name else lang_code
domain_languages.append((lang_code, label))
return sorted(domain_languages) or langcodes.get_all_langs_for_select()
class BaseRoleAccessView(BaseUserSettingsView):
@property
@memoized
def can_restrict_access_by_location(self):
return self.domain_object.has_privilege(
privileges.RESTRICT_ACCESS_BY_LOCATION)
@property
@memoized
def web_apps_privilege(self):
return self.domain_object.has_privilege(
privileges.CLOUDCARE
)
@property
@memoized
def release_management_privilege(self):
return self.domain_object.has_privilege(privileges.RELEASE_MANAGEMENT)
@property
@memoized
def lite_release_management_privilege(self):
"""
Only true if domain does not have privileges.RELEASE_MANAGEMENT
"""
return self.domain_object.has_privilege(privileges.LITE_RELEASE_MANAGEMENT) and \
not self.domain_object.has_privilege(privileges.RELEASE_MANAGEMENT)
@method_decorator(always_allow_project_access, name='dispatch')
@method_decorator(toggles.ENTERPRISE_USER_MANAGEMENT.required_decorator(), name='dispatch')
class EnterpriseUsersView(BaseRoleAccessView):
template_name = 'users/enterprise_users.html'
page_title = gettext_lazy("Enterprise Users")
urlname = 'enterprise_users'
@property
def page_context(self):
return {
"show_profile_column": domain_has_privilege(self.domain, privileges.APP_USER_PROFILES),
}
@method_decorator(always_allow_project_access, name='dispatch')
@method_decorator(require_can_edit_or_view_web_users, name='dispatch')
class ListWebUsersView(BaseRoleAccessView):
template_name = 'users/web_users.html'
page_title = gettext_lazy("Web Users")
urlname = 'web_users'
@property
@memoized
def role_labels(self):
return {
r.get_qualified_id(): r.name
for r in [StaticRole.domain_admin(self.domain)] + UserRole.objects.get_by_domain(self.domain)
}
@property
@memoized
def invitations(self):
return [
{
"uuid": str(invitation.uuid),
"email": invitation.email,
"email_marked_as_bounced": bool(invitation.email_marked_as_bounced),
"invited_on": invitation.invited_on,
"role_label": self.role_labels.get(invitation.role, ""),
"email_status": invitation.email_status,
}
for invitation in Invitation.by_domain(self.domain)
]
@property
def page_context(self):
from corehq.apps.users.views.mobile.users import FilteredWebUserDownload
if can_use_filtered_user_download(self.domain):
bulk_download_url = reverse(FilteredWebUserDownload.urlname, args=[self.domain])
else:
bulk_download_url = reverse("download_web_users", args=[self.domain])
return {
'invitations': self.invitations,
'requests': DomainRequest.by_domain(self.domain) if self.request.couch_user.is_domain_admin else [],
'admins': WebUser.get_admins_by_domain(self.domain),
'domain_object': self.domain_object,
'bulk_download_url': bulk_download_url,
'from_address': settings.DEFAULT_FROM_EMAIL
}
@require_can_edit_or_view_web_users
def download_web_users(request, domain):
track_workflow(request.couch_user.get_email(), 'Bulk download web users selected')
from corehq.apps.users.views.mobile.users import download_users
return download_users(request, domain, user_type=WEB_USER_TYPE)
class DownloadWebUsersStatusView(BaseUserSettingsView):
urlname = 'download_web_users_status'
page_title = gettext_noop('Download Web Users Status')
@method_decorator(require_can_edit_or_view_web_users)
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
@property
def parent_pages(self):
return [{
'title': ListWebUsersView.page_title,
'url': reverse(ListWebUsersView.urlname, args=[self.domain]),
}]
def get(self, request, *args, **kwargs):
context = super(DownloadWebUsersStatusView, self).main_context
context.update({
'domain': self.domain,
'download_id': kwargs['download_id'],
'poll_url': reverse('user_download_job_poll', args=[self.domain, kwargs['download_id']]),
'title': _("Download Web Users Status"),
'progress_text': _("Preparing web user download."),
'error_text': _("There was an unexpected error! Please try again or report an issue."),
'next_url': reverse(ListWebUsersView.urlname, args=[self.domain]),
'next_url_text': _("Go back to Web Users"),
})
return render(request, 'hqwebapp/soil_status_full.html', context)
def page_url(self):
return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
class ListRolesView(BaseRoleAccessView):
template_name = 'users/roles_and_permissions.html'
page_title = gettext_lazy("Roles & Permissions")
urlname = 'roles_and_permissions'
@method_decorator(require_can_view_roles)
def dispatch(self, request, *args, **kwargs):
return super(ListRolesView, self).dispatch(request, *args, **kwargs)
@property
def can_edit_roles(self):
return (has_privilege(self.request, privileges.ROLE_BASED_ACCESS)
and self.couch_user.is_domain_admin)
@property
def landing_page_choices(self):
return [
{'id': None, 'name': _('Use Default')}
] + [
{'id': page.id, 'name': _(page.name)}
for page in get_allowed_landing_pages(self.domain)
]
@property
@memoized
def non_admin_roles(self):
return list(sorted(
[role for role in UserRole.objects.get_by_domain(self.domain) if not role.is_commcare_user_default],
key=lambda role: role.name if role.name else '\uFFFF'
)) + [UserRole.commcare_user_default(self.domain)] # mobile worker default listed last
def get_roles_for_display(self):
show_es_issue = False
role_view_data = [StaticRole.domain_admin(self.domain).to_json()]
for role in self.non_admin_roles:
role_data = role.to_json()
role_view_data.append(role_data)
if role.is_commcare_user_default:
role_data["preventRoleDelete"] = True
else:
try:
user_count = get_role_user_count(role.domain, role.couch_id)
role_data["preventRoleDelete"] = bool(user_count)
except TypeError:
# when query_result['hits'] returns None due to an ES issue
show_es_issue = True
role_data["has_unpermitted_location_restriction"] = (
not self.can_restrict_access_by_location
and not role.permissions.access_all_locations
)
if show_es_issue:
messages.error(
self.request,
mark_safe(_( # nosec: no user input
"We might be experiencing issues fetching the entire list "
"of user roles right now. This issue is likely temporary and "
"nothing to worry about, but if you keep seeing this for "
"more than a day, please <a href='#modalReportIssue' "
"data-toggle='modal'>Report an Issue</a>."
))
)
return role_view_data
@property
def page_context(self):
if (not self.can_restrict_access_by_location
and any(not role.permissions.access_all_locations
for role in self.non_admin_roles)):
messages.warning(self.request, _(
"This project has user roles that restrict data access by "
"organization, but the software plan no longer supports that. "
"Any users assigned to roles that are restricted in data access "
"by organization can no longer access this project. Please "
"update the existing roles."))
tableau_list = []
if toggles.EMBEDDED_TABLEAU.enabled(self.domain):
tableau_list = [{
'id': viz.id,
'name': viz.name,
} for viz in TableauVisualization.objects.filter(domain=self.domain)]
return {
'user_roles': self.get_roles_for_display(),
'non_admin_roles': self.non_admin_roles,
'can_edit_roles': self.can_edit_roles,
'default_role': StaticRole.domain_default(self.domain),
'tableau_list': tableau_list,
'report_list': get_possible_reports(self.domain),
'is_domain_admin': self.couch_user.is_domain_admin,
'domain_object': self.domain_object,
'uses_locations': self.domain_object.uses_locations,
'can_restrict_access_by_location': self.can_restrict_access_by_location,
'landing_page_choices': self.landing_page_choices,
'show_integration': (
toggles.OPENMRS_INTEGRATION.enabled(self.domain) or
toggles.DHIS2_INTEGRATION.enabled(self.domain)
),
'web_apps_privilege': self.web_apps_privilege,
'erm_privilege': self.release_management_privilege,
'mrm_privilege': self.lite_release_management_privilege,
'has_report_builder_access': has_report_builder_access(self.request),
'data_file_download_enabled': toggles.DATA_FILE_DOWNLOAD.enabled(self.domain),
'export_ownership_enabled': toggles.EXPORT_OWNERSHIP.enabled(self.domain),
'data_registry_choices': get_data_registry_dropdown_options(self.domain),
}
@always_allow_project_access
@require_can_edit_or_view_web_users
@require_GET
def paginate_enterprise_users(request, domain):
# Get web users
domains = [domain] + EnterprisePermissions.get_domains(domain)
web_users, pagination = _get_web_users(request, domains)
# Get linked mobile users
web_user_usernames = [u.username for u in web_users]
mobile_result = (
UserES().show_inactive().domains(domains).mobile_users().sort('username.exact')
.filter(
queries.nested(
'user_data_es',
login_as_user_filter(web_user_usernames)
)
)
.run()
)
mobile_users = defaultdict(list)
for hit in mobile_result.hits:
login_as_user = {data['key']: data['value'] for data in hit['user_data_es']}.get('login_as_user')
mobile_users[login_as_user].append(CommCareUser.wrap(hit))
users = []
allowed_domains = set(domains) - {domain}
for web_user in web_users:
loginAsUserCount = len(list(filter(lambda m: m['is_active'], mobile_users[web_user.username])))
other_domains = [m.domain for m in web_user.domain_memberships if m.domain in allowed_domains]
users.append({
**_format_enterprise_user(domain, web_user),
'otherDomains': other_domains,
'loginAsUserCount': loginAsUserCount,
'inactiveMobileCount': len(mobile_users[web_user.username]) - loginAsUserCount,
})
for mobile_user in sorted(mobile_users[web_user.username], key=lambda x: x.username):
profile = mobile_user.get_user_data_profile(mobile_user.metadata.get(PROFILE_SLUG))
users.append({
**_format_enterprise_user(mobile_user.domain, mobile_user),
'profile': profile.name if profile else None,
'otherDomains': [mobile_user.domain] if domain != mobile_user.domain else [],
'loginAsUser': web_user.username,
'is_active': mobile_user.is_active,
})
return JsonResponse({
'users': users,
**pagination,
})
# user may be either a WebUser or a CommCareUser
def _format_enterprise_user(domain, user):
membership = user.get_domain_membership(domain)
role = membership.role if membership else None
return {
'username': user.raw_username,
'name': user.full_name,
'id': user.get_id,
'role': role.name if role else None,
}
@always_allow_project_access
@require_can_edit_or_view_web_users
@require_GET
def paginate_web_users(request, domain):
web_users, pagination = _get_web_users(request, [domain])
web_users_fmt = [{
'email': u.get_email(),
'domain': domain,
'name': u.full_name,
'role': u.role_label(domain),
'phoneNumbers': u.phone_numbers,
'id': u.get_id,
'editUrl': reverse('user_account', args=[domain, u.get_id]),
'removeUrl': (
reverse('remove_web_user', args=[domain, u.user_id])
if request.user.username != u.username else None
),
'isUntrustedIdentityProvider': not IdentityProvider.does_domain_trust_user(
domain, u.username
),
} for u in web_users]
return JsonResponse({
'users': web_users_fmt,
**pagination,
})
def _get_web_users(request, domains):
limit = int(request.GET.get('limit', 10))
page = int(request.GET.get('page', 1))
skip = limit * (page - 1)
query = request.GET.get('query')
result = (
UserES().domains(domains).web_users().sort('username.exact')
.search_string_query(query, ["username", "last_name", "first_name"])
.start(skip).size(limit).run()
)
return (
[WebUser.wrap(w) for w in result.hits],
{
'total': result.total,
'page': page,
'query': query,
},
)
@always_allow_project_access
@require_can_edit_web_users
@require_POST
def remove_web_user(request, domain, couch_user_id):
user = WebUser.get_by_user_id(couch_user_id, domain)
# if no user, very likely they just pressed delete twice in rapid succession so
# don't bother doing anything.
if user:
record = user.delete_domain_membership(domain, create_record=True)
user.save()
# web user's membership is bound to the domain, so log as a change for that domain
log_user_change(by_domain=request.domain, for_domain=domain, couch_user=user,
changed_by_user=request.couch_user, changed_via=USER_CHANGE_VIA_WEB,
change_messages=UserChangeMessage.domain_removal(domain))
if record:
message = _('You have successfully removed {username} from your '
'project space. <a href="{url}" class="post-link">Undo</a>')
messages.success(request, message.format(
username=user.username,
url=reverse('undo_remove_web_user', args=[domain, record.get_id])
), extra_tags="html")
else:
message = _('It appears {username} has already been removed from your project space.')
messages.success(request, message.format(username=user.username))
return HttpResponseRedirect(
reverse(ListWebUsersView.urlname, args=[domain]))
@always_allow_project_access
@require_can_edit_web_users
def undo_remove_web_user(request, domain, record_id):
record = DomainRemovalRecord.get(record_id)
record.undo()
messages.success(request, 'You have successfully restored {username}.'.format(
username=WebUser.get_by_user_id(record.user_id).username
))
return HttpResponseRedirect(
reverse(ListWebUsersView.urlname, args=[domain]))
# If any permission less than domain admin were allowed here, having that permission would give you the permission
# to change the permissions of your own role such that you could do anything, and would thus be equivalent to having
# domain admin permissions.
@json_error
@domain_admin_required
@require_POST
def post_user_role(request, domain):
if not domain_has_privilege(domain, privileges.ROLE_BASED_ACCESS):
return JsonResponse({})
role_data = json.loads(request.body.decode('utf-8'))
try:
role = _update_role_from_view(domain, role_data)
except ValueError as e:
return JsonResponse({
"message": str(e)
}, status=400)
response_data = role.to_json()
if role.is_commcare_user_default:
response_data["preventRoleDelete"] = True
else:
user_count = get_role_user_count(domain, role.couch_id)
response_data['preventRoleDelete'] = user_count > 0
return JsonResponse(response_data)
def _update_role_from_view(domain, role_data):
landing_page = role_data["default_landing_page"]
if landing_page:
validate_landing_page(domain, landing_page)
if (
not domain_has_privilege(domain, privileges.RESTRICT_ACCESS_BY_LOCATION)
and not role_data['permissions']['access_all_locations']
):
# This shouldn't be possible through the UI, but as a safeguard...
role_data['permissions']['access_all_locations'] = True
if "_id" in role_data:
try:
role = UserRole.objects.by_couch_id(role_data["_id"])
except UserRole.DoesNotExist:
role = UserRole()
else:
if role.domain != domain:
raise Http404()
else:
role = UserRole()
name = role_data["name"]
if not role.id:
if name.lower() == 'admin' or UserRole.objects.filter(domain=domain, name__iexact=name).exists():
raise ValueError(_("A role with the same name already exists"))
role.domain = domain
role.name = name
role.default_landing_page = landing_page
role.is_non_admin_editable = role_data["is_non_admin_editable"]
role.save()
permissions = HqPermissions.wrap(role_data["permissions"])
permissions.normalize()
role.set_permissions(permissions.to_list())
assignable_by = role_data["assignable_by"]
role.set_assignable_by_couch(assignable_by)
return role
@domain_admin_required
@require_POST
def delete_user_role(request, domain):
if not domain_has_privilege(domain, privileges.ROLE_BASED_ACCESS):
return JsonResponse({})
role_data = json.loads(request.body.decode('utf-8'))
try:
response_data = _delete_user_role(domain, role_data)
except InvalidRequestException as e:
return JsonResponse({"message": str(e)}, status=400)
return JsonResponse(response_data)
def _delete_user_role(domain, role_data):
try:
role = UserRole.objects.by_couch_id(role_data["_id"], domain=domain)
except UserRole.DoesNotExist:
raise Http404
if role.is_commcare_user_default:
raise InvalidRequestException(_(
"Unable to delete role '{role}'. "
"This role is the default role for Mobile Users and can not be deleted.",
).format(role=role_data["name"]))
user_count = get_role_user_count(domain, role_data["_id"])
if user_count:
raise InvalidRequestException(ngettext(
"Unable to delete role '{role}'. "
"It has one user and/or invitation still assigned to it. "
"Remove all users assigned to the role before deleting it.",
"Unable to delete role '{role}'. "
"It has {user_count} users and/or invitations still assigned to it. "
"Remove all users assigned to the role before deleting it.",
user_count,
).format(role=role_data["name"], user_count=user_count))
copy_id = role.couch_id
role.delete()
# return removed id in order to remove it from UI
return {"_id": copy_id}
@always_allow_project_access
@require_POST
@require_can_edit_web_users
def delete_request(request, domain):
DomainRequest.objects.get(id=request.POST['id']).delete()
return JsonResponse({'status': 'ok'})
@always_allow_project_access
@require_POST
@require_can_edit_web_users
def check_sso_trust(request, domain):
username = request.POST['username']
is_trusted = IdentityProvider.does_domain_trust_user(domain, username)
response = {
'is_trusted': is_trusted,
}
if not is_trusted:
response.update({
'email_domain': get_email_domain_from_username(username),
'idp_name': IdentityProvider.get_active_identity_provider_by_username(
username
).name,
})
return JsonResponse(response)
class BaseManageWebUserView(BaseUserSettingsView):
@method_decorator(always_allow_project_access)
@method_decorator(require_can_edit_web_users)
def dispatch(self, request, *args, **kwargs):
return super(BaseManageWebUserView, self).dispatch(request, *args, **kwargs)
@property
def parent_pages(self):
return [{
'title': ListWebUsersView.page_title,
'url': reverse(ListWebUsersView.urlname, args=[self.domain]),
}]
class InviteWebUserView(BaseManageWebUserView):
template_name = "users/invite_web_user.html"
urlname = 'invite_web_user'
page_title = gettext_lazy("Invite Web User to Project")
@property
@memoized
def invite_web_user_form(self):
role_choices = get_editable_role_choices(self.domain, self.request.couch_user, allow_admin_role=True)
loc = None
domain_request = DomainRequest.objects.get(id=self.request_id) if self.request_id else None
is_add_user = self.request_id is not None
initial = {
'email': domain_request.email if domain_request else None,
}
if 'location_id' in self.request.GET:
from corehq.apps.locations.models import SQLLocation
loc = SQLLocation.objects.get(location_id=self.request.GET.get('location_id'))
if self.request.method == 'POST':
current_users = [user.username for user in WebUser.by_domain(self.domain)]
pending_invites = [di.email for di in Invitation.by_domain(self.domain)]
return AdminInvitesUserForm(
self.request.POST,
excluded_emails=current_users + pending_invites,
role_choices=role_choices,
domain=self.domain,
is_add_user=is_add_user,
)
return AdminInvitesUserForm(
initial=initial,
role_choices=role_choices,
domain=self.domain,
location=loc,
is_add_user=is_add_user,
)
@property
@memoized
def request_id(self):
if 'request_id' in self.request.GET:
return self.request.GET.get('request_id')
return None
@property
def page_context(self):
return {
'registration_form': self.invite_web_user_form,
}
def post(self, request, *args, **kwargs):
if self.invite_web_user_form.is_valid():
# If user exists and has already requested access, just add them to the project
# Otherwise, send an invitation
create_invitation = True
data = self.invite_web_user_form.cleaned_data
domain_request = DomainRequest.by_email(self.domain, data["email"])
if domain_request is not None:
domain_request.is_approved = True
domain_request.save()
user = CouchUser.get_by_username(domain_request.email)
if user is not None:
domain_request.send_approval_email()
create_invitation = False
user.add_as_web_user(self.domain, role=data["role"],
location_id=data.get("supply_point", None),
program_id=data.get("program", None))
messages.success(request, "%s added." % data["email"])
else:
track_workflow(request.couch_user.get_email(),
"Sent a project invitation",
{"Sent a project invitation": "yes"})
send_hubspot_form(HUBSPOT_INVITATION_SENT_FORM, request)
messages.success(request, "Invitation sent to %s" % data["email"])
if create_invitation:
data["invited_by"] = request.couch_user.user_id
data["invited_on"] = datetime.utcnow()
data["domain"] = self.domain
invite = Invitation(**data)
invite.save()
invite.send_activation_email()
# Ensure trust is established with Invited User's Identity Provider
if not IdentityProvider.does_domain_trust_user(self.domain, data["email"]):
idp = IdentityProvider.get_active_identity_provider_by_username(data["email"])
idp.create_trust_with_domain(self.domain, self.request.user.username)
return HttpResponseRedirect(reverse(
ListWebUsersView.urlname,
args=[self.domain]
))
return self.get(request, *args, **kwargs)
class BaseUploadUser(BaseUserSettingsView):
def post(self, request, *args, **kwargs):
"""View's dispatch method automatically calls this"""
try:
self.workbook = get_workbook(request.FILES.get('bulk_upload_file'))
except WorkbookJSONError as e:
messages.error(request, str(e))
return self.get(request, *args, **kwargs)
try:
self.user_specs = self.workbook.get_worksheet(title='users')
except WorksheetNotFound:
try:
self.user_specs = self.workbook.get_worksheet()
except WorksheetNotFound:
return HttpResponseBadRequest("Workbook has no worksheets")
try:
self.group_specs = self.workbook.get_worksheet(title='groups')
except WorksheetNotFound:
self.group_specs = []
try:
from corehq.apps.user_importer.importer import check_headers
check_headers(self.user_specs, self.domain, is_web_upload=self.is_web_upload)
except UserUploadError as e:
messages.error(request, _(str(e)))
return HttpResponseRedirect(reverse(self.urlname, args=[self.domain]))
task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None)
if PARALLEL_USER_IMPORTS.enabled(self.domain) and not self.is_web_upload:
if list(self.group_specs):
messages.error(
request,
_("Groups are not allowed with parallel user import. Please upload them separately")
)
return HttpResponseRedirect(reverse(self.urlname, args=[self.domain]))
task = parallel_user_import.delay(
self.domain,
list(self.user_specs),
request.couch_user.user_id
)
else:
upload_record = UserUploadRecord(
domain=self.domain,
user_id=request.couch_user.user_id
)
upload_record.save()
task = import_users_and_groups.delay(
self.domain,
list(self.user_specs),
list(self.group_specs),
request.couch_user.user_id,
upload_record.pk,
self.is_web_upload
)
task_ref.set_task(task)
if self.is_web_upload:
return HttpResponseRedirect(
reverse(
WebUserUploadStatusView.urlname,
args=[self.domain, task_ref.download_id]
)
)
else:
from corehq.apps.users.views.mobile import UserUploadStatusView
return HttpResponseRedirect(
reverse(
UserUploadStatusView.urlname,
args=[self.domain, task_ref.download_id]
)
)
class UploadWebUsers(BaseUploadUser):
template_name = 'hqwebapp/bulk_upload.html'
urlname = 'upload_web_users'
page_title = gettext_noop("Bulk Upload Web Users")
is_web_upload = True
@method_decorator(always_allow_project_access)
@method_decorator(require_can_edit_web_users)
@method_decorator(requires_privilege_with_fallback(privileges.BULK_USER_MANAGEMENT))
def dispatch(self, request, *args, **kwargs):
return super(UploadWebUsers, self).dispatch(request, *args, **kwargs)
@property
def page_context(self):
request_params = self.request.GET if self.request.method == 'GET' else self.request.POST
from corehq.apps.users.views.mobile import get_user_upload_context
return get_user_upload_context(self.domain, request_params, "download_web_users", "web user", "web users")
def post(self, request, *args, **kwargs):
track_workflow(request.couch_user.get_email(), 'Bulk upload web users selected')
return super(UploadWebUsers, self).post(request, *args, **kwargs)
class WebUserUploadStatusView(BaseManageWebUserView):
urlname = 'web_user_upload_status'
page_title = gettext_noop('Web User Upload Status')
def get(self, request, *args, **kwargs):
context = super(WebUserUploadStatusView, self).main_context
context.update({
'domain': self.domain,
'download_id': kwargs['download_id'],
'poll_url': reverse(WebUserUploadJobPollView.urlname, args=[self.domain, kwargs['download_id']]),
'title': _("Web User Upload Status"),
'progress_text': _("Importing your data. This may take some time..."),
'error_text': _("Problem importing data! Please try again or report an issue."),
'next_url': reverse(ListWebUsersView.urlname, args=[self.domain]),
'next_url_text': _("Return to manage web users"),
})
return render(request, 'hqwebapp/soil_status_full.html', context)
def page_url(self):
return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
class UserUploadJobPollView(BaseUserSettingsView):
def get(self, request, domain, download_id):
try:
context = get_download_context(download_id)
except TaskFailedError:
return HttpResponseServerError()
context.update({
'on_complete_short': _('Bulk upload complete.'),
'on_complete_long': _(self.on_complete_long),
'user_type': _(self.user_type),
})
context['result'] = BulkUploadResponseWrapper(context)
return render(request, 'users/mobile/partials/user_upload_status.html', context)
class WebUserUploadJobPollView(UserUploadJobPollView, BaseManageWebUserView):
urlname = "web_user_upload_job_poll"
on_complete_long = 'Web Worker upload has finished'
user_type = 'web users'
@method_decorator(require_can_edit_web_users)
def dispatch(self, request, *args, **kwargs):
return super(WebUserUploadJobPollView, self).dispatch(request, *args, **kwargs)
@require_POST
@always_allow_project_access
@require_permission_to_edit_user
def make_phone_number_default(request, domain, couch_user_id):
user = CouchUser.get_by_user_id(couch_user_id, domain)
if not user.is_current_web_user(request) and not user.is_commcare_user():
raise Http404()
phone_number = request.POST['phone_number']
if not phone_number:
raise Http404('Must include phone number in request.')
user.set_default_phone_number(phone_number)
from corehq.apps.users.views.mobile import EditCommCareUserView
redirect = reverse(EditCommCareUserView.urlname, args=[domain, couch_user_id])
return HttpResponseRedirect(redirect)
@require_POST
@always_allow_project_access
@require_permission_to_edit_user
def delete_phone_number(request, domain, couch_user_id):
user = CouchUser.get_by_user_id(couch_user_id, domain)
if not user.is_current_web_user(request) and not user.is_commcare_user():
raise Http404()
phone_number = request.POST['phone_number']
if not phone_number:
raise Http404('Must include phone number in request.')
user.delete_phone_number(phone_number)
log_user_change(
by_domain=request.domain,
for_domain=user.domain,
couch_user=user,
changed_by_user=request.couch_user,
changed_via=USER_CHANGE_VIA_WEB,
change_messages=UserChangeMessage.phone_numbers_removed([phone_number])
)
from corehq.apps.users.views.mobile import EditCommCareUserView
redirect = reverse(EditCommCareUserView.urlname, args=[domain, couch_user_id])
return HttpResponseRedirect(redirect)
@always_allow_project_access
@require_permission_to_edit_user
def verify_phone_number(request, domain, couch_user_id):
"""
phone_number cannot be passed in the url due to special characters
but it can be passed as %-encoded GET parameters
"""
if 'phone_number' not in request.GET:
raise Http404('Must include phone number in request.')
phone_number = six.moves.urllib.parse.unquote(request.GET['phone_number'])
user = CouchUser.get_by_user_id(couch_user_id, domain)
try:
result = initiate_sms_verification_workflow(user, phone_number)
except BadSMSConfigException as error:
messages.error(request, _('Bad SMS configuration: {error}').format(error=error))
else:
if result == VERIFICATION__ALREADY_IN_USE:
messages.error(request, _('Cannot start verification workflow. Phone number is already in use.'))
elif result == VERIFICATION__ALREADY_VERIFIED:
messages.error(request, _('Phone number is already verified.'))
elif result == VERIFICATION__RESENT_PENDING:
messages.success(request, _('Verification message resent.'))
elif result == VERIFICATION__WORKFLOW_STARTED:
messages.success(request, _('Verification workflow started.'))
from corehq.apps.users.views.mobile import EditCommCareUserView
redirect = reverse(EditCommCareUserView.urlname, args=[domain, couch_user_id])
return HttpResponseRedirect(redirect)
@always_allow_project_access
@require_superuser
@login_and_domain_required
def domain_accounts(request, domain, couch_user_id, template="users/domain_accounts.html"):
context = _users_context(request, domain)
couch_user = WebUser.get_by_user_id(couch_user_id, domain)
if request.method == "POST" and 'domain' in request.POST:
domain = request.POST['domain']
couch_user.add_domain_membership(domain)
couch_user.save()
messages.success(request, 'Domain added')
context.update({"user": request.user})
return render(request, template, context)
@always_allow_project_access
@require_POST
@require_superuser
def add_domain_membership(request, domain, couch_user_id, domain_name):
user = WebUser.get_by_user_id(couch_user_id, domain)
if domain_name:
user.add_domain_membership(domain_name)
user.save()
return HttpResponseRedirect(reverse("user_account", args=(domain, couch_user_id)))
@always_allow_project_access
@sensitive_post_parameters('new_password1', 'new_password2')
@login_and_domain_required
@location_safe
def change_password(request, domain, login_id):
# copied from auth's password_change
commcare_user = CommCareUser.get_by_user_id(login_id, domain)
json_dump = {}
if not commcare_user or not user_can_access_other_user(domain, request.couch_user, commcare_user):
raise Http404()
django_user = commcare_user.get_django_user()
if request.method == "POST":
form = SetUserPasswordForm(request.project, login_id, user=django_user, data=request.POST)
input = request.POST['new_password1']
if input == request.POST['new_password2']:
if form.project.strong_mobile_passwords:
try:
clean_password(input)
except ValidationError:
json_dump['status'] = 'weak'
if form.is_valid():
form.save()
log_user_change(
by_domain=domain,
for_domain=commcare_user.domain,
couch_user=commcare_user,
changed_by_user=request.couch_user,
changed_via=USER_CHANGE_VIA_WEB,
change_messages=UserChangeMessage.password_reset()
)
json_dump['status'] = 'OK'
form = SetUserPasswordForm(request.project, login_id, user='')
else:
json_dump['status'] = 'different'
else:
form = SetUserPasswordForm(request.project, login_id, user=django_user)
json_dump['formHTML'] = render_crispy_form(form)
return HttpResponse(json.dumps(json_dump))
@httpdigest
@login_and_domain_required
def test_httpdigest(request, domain):
return HttpResponse("ok")
@always_allow_project_access
@csrf_exempt
@require_POST
@require_superuser
def register_fcm_device_token(request, domain, couch_user_id, device_token):
user = WebUser.get_by_user_id(couch_user_id)
user.fcm_device_token = device_token
user.save()
return HttpResponse()
| bsd-3-clause | 2476103604305979f62d86950d7e095f | 36.941341 | 116 | 0.630991 | 3.919492 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/domain/views/base.py | 1 | 6137 | from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import redirect, render
from django.urls import reverse
from django.utils.translation import gettext as _
from memoized import memoized
from corehq.apps.accounting.mixins import BillingModalsMixin
from corehq.apps.domain.decorators import (
login_required,
LoginAndDomainMixin,
)
from corehq.apps.domain.models import Domain
from corehq.apps.domain.utils import normalize_domain_name
from corehq.apps.hqwebapp.views import BaseSectionPageView
from corehq.apps.users.models import Invitation
from corehq.util.quickcache import quickcache
def covid19(request):
return select(request, next_view="app_exchange")
# Domain not required here - we could be selecting it for the first time. See notes domain.decorators
# about why we need this custom login_required decorator
@login_required
def select(request, do_not_redirect=False, next_view=None):
if not hasattr(request, 'couch_user'):
return redirect('registration_domain')
# next_view must be a url that expects exactly one parameter, a domain name
next_view = next_view or request.GET.get('next_view')
show_invitations = False
if not next_view:
next_view = "domain_homepage"
show_invitations = True
domain_links = get_domain_links_for_dropdown(request.couch_user, view_name=next_view)
if not domain_links:
return redirect('registration_domain')
domain_links += get_enterprise_links_for_dropdown(request.couch_user, view_name=next_view)
domain_links = sorted(domain_links, key=lambda link: link['display_name'].lower())
email = request.couch_user.get_email()
open_invitations = [e for e in Invitation.by_email(email) if not e.is_expired]
additional_context = {
'domain_links': domain_links,
'invitation_links': [{
'display_name': i.domain,
'url': reverse("domain_accept_invitation", args=[i.domain, i.uuid]) + '?no_redirect=true',
} for i in open_invitations] if show_invitations else [],
'current_page': {'page_name': _('Select A Project')},
}
domain_select_template = "domain/select.html"
last_visited_domain = request.session.get('last_visited_domain')
if open_invitations \
or do_not_redirect \
or not last_visited_domain:
return render(request, domain_select_template, additional_context)
else:
domain_obj = Domain.get_by_name(last_visited_domain)
if domain_obj and domain_obj.is_active:
# mirrors logic in login_and_domain_required
if (
request.couch_user.is_member_of(domain_obj, allow_enterprise=True)
or (request.user.is_superuser and not domain_obj.restrict_superusers)
or domain_obj.is_snapshot
):
try:
return HttpResponseRedirect(reverse(next_view or 'dashboard_default',
args=[last_visited_domain]))
except Http404:
pass
del request.session['last_visited_domain']
return render(request, domain_select_template, additional_context)
@login_required
def accept_all_invitations(request):
user = request.couch_user
invites = Invitation.by_email(user.username)
for invitation in invites:
if not invitation.is_expired:
invitation.accept_invitation_and_join_domain(user)
messages.success(request, _(f'You have been added to the "{invitation.domain}" project space.'))
return HttpResponseRedirect(reverse('domain_select_redirect'))
@quickcache(['couch_user.username'])
def get_domain_links_for_dropdown(couch_user, view_name="domain_homepage"):
# Returns dicts with keys 'name', 'display_name', and 'url'
return _domains_to_links(Domain.active_for_user(couch_user), view_name)
# Returns domains where given user has access only by virtue of enterprise permissions
@quickcache(['couch_user.username'])
def get_enterprise_links_for_dropdown(couch_user, view_name="domain_homepage"):
# Returns dicts with keys 'name', 'display_name', and 'url'
from corehq.apps.enterprise.models import EnterprisePermissions
domain_links_by_name = {d['name']: d for d in get_domain_links_for_dropdown(couch_user)}
subdomain_objects_by_name = {}
for domain_name in domain_links_by_name:
for subdomain in EnterprisePermissions.get_domains(domain_name):
if subdomain not in domain_links_by_name:
subdomain_objects_by_name[subdomain] = Domain.get_by_name(subdomain)
return _domains_to_links(subdomain_objects_by_name.values(), view_name)
def _domains_to_links(domain_objects, view_name):
return sorted([{
'name': o.name,
'display_name': o.display_name(),
'url': reverse(view_name, args=[o.name]),
} for o in domain_objects if o], key=lambda link: link['display_name'].lower())
class DomainViewMixin(object):
"""
Paving the way for a world of entirely class-based views.
Let's do this, guys. :-)
Set strict_domain_fetching to True in subclasses to bypass the cache.
"""
strict_domain_fetching = False
@property
@memoized
def domain(self):
domain = self.args[0] if len(self.args) > 0 else self.kwargs.get('domain', "")
return normalize_domain_name(domain)
@property
@memoized
def domain_object(self):
domain_obj = Domain.get_by_name(self.domain, strict=self.strict_domain_fetching)
if not domain_obj:
raise Http404()
return domain_obj
class BaseDomainView(LoginAndDomainMixin, BillingModalsMixin, BaseSectionPageView, DomainViewMixin):
@property
def main_context(self):
main_context = super(BaseDomainView, self).main_context
main_context.update({
'domain': self.domain,
})
return main_context
@property
@memoized
def page_url(self):
if self.urlname:
return reverse(self.urlname, args=[self.domain])
| bsd-3-clause | 7eb3a73187a492ecbbd94d4440db4f45 | 37.597484 | 108 | 0.676715 | 3.816542 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/segments/migrations/0010_auto_20211123_1533.py | 1 | 1720 | # Generated by Django 2.2.24 on 2021-11-23 14:33
import bluebottle.utils.fields
import bluebottle.utils.validators
import colorfield.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('segments', '0009_auto_20211122_1527'),
]
operations = [
migrations.AddField(
model_name='segment',
name='background_color',
field=colorfield.fields.ColorField(blank=True, default=None, max_length=18, null=True, verbose_name='Background color'),
),
migrations.AddField(
model_name='segment',
name='cover_image',
field=bluebottle.utils.fields.ImageField(blank=True, help_text='Category Logo image', max_length=255, null=True, upload_to='categories/logos/', validators=[bluebottle.utils.validators.FileMimetypeValidator(['image/png', 'image/jpeg', 'image/gif', 'image/svg+xml'], None, 'invalid_mimetype'), bluebottle.utils.validators.validate_file_infection], verbose_name='cover image'),
),
migrations.AddField(
model_name='segment',
name='logo',
field=bluebottle.utils.fields.ImageField(blank=True, help_text='Category Logo image', max_length=255, null=True, upload_to='categories/logos/', validators=[bluebottle.utils.validators.FileMimetypeValidator(['image/png', 'image/jpeg', 'image/gif', 'image/svg+xml'], None, 'invalid_mimetype'), bluebottle.utils.validators.validate_file_infection], verbose_name='logo'),
),
migrations.AddField(
model_name='segment',
name='tag_line',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| bsd-3-clause | 9b1a3c511a492afcdf80b2fbbe42344d | 46.777778 | 386 | 0.663372 | 3.865169 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/app_manager/management/commands/import_app.py | 1 | 2999 | from getpass import getpass
from django.core.management.base import BaseCommand, CommandError
from django.urls import reverse
import requests
from requests.auth import HTTPDigestAuth
from corehq.apps.app_manager.models import import_app
class Command(BaseCommand):
help = ("Import an app from another Commcare instance")
def add_arguments(self, parser):
parser.add_argument(
'domain'
)
parser.add_argument(
'app_id',
)
parser.add_argument(
'-u',
'--username',
action='store',
dest='username',
help='Username',
)
parser.add_argument(
'-p',
'--password',
action='store',
dest='password',
help='Password',
)
parser.add_argument(
'-d',
'--to_domain',
action='store',
dest='to_domain',
help='The domain to import the app into.',
)
parser.add_argument(
'-n',
'--to_name',
action='store',
dest='to_name',
default=None,
help='The name to give to the imported app',
)
parser.add_argument(
'--url',
action='store',
dest='url',
default='https://www.commcarehq.org',
help='The URL of the CommCare instance.',
)
parser.add_argument(
'--2fa', action='store_true', help='Prompt for 2FA token'
)
def _get_required_option(self, name, options):
value = options.get(name)
if not value:
raise CommandError("Option: '--{}' must be specified".format(name))
return value
def handle(self, domain, app_id, **options):
username = self._get_required_option('username', options)
target_domain = self._get_required_option('to_domain', options)
name = options['to_name']
url_base = options['url']
password = options['password']
headers = {}
if not password:
password = getpass("Please enter the password for '{}': ".format(username))
if options['2fa']:
otp = getpass("Please enter your 2FA token: ")
headers['X-COMMCAREHQ-OTP'] = otp
url = reverse('app_source', kwargs={'domain': domain, 'app_id': app_id})
full_url = '{}{}'.format(url_base, url)
print("Making request to: {}".format(full_url))
resp = requests.get(full_url, auth=HTTPDigestAuth(username, password), headers=headers)
if not resp.status_code == 200:
return "Command Failed: {}: {}".format(resp.status_code, resp.text)
app_source = resp.json()
if not name:
name = app_source['name']
app = import_app(app_source, target_domain, {'name': name})
return "Created app '{}' at /a/{}/apps/view/{}/".format(app.name, app.domain, app.id)
| bsd-3-clause | 471c64e54b61ff8b0ea3aaf3f42a731a | 30.904255 | 95 | 0.536179 | 4.212079 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/styleguide/views/__init__.py | 1 | 14864 | import os
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.generic import *
from corehq.apps.styleguide.example_forms import (
BasicCrispyForm,
CheckboxesForm,
)
def styleguide_default(request):
return HttpResponseRedirect(reverse(MainStyleGuideView.urlname))
class MainStyleGuideView(TemplateView):
template_name = 'styleguide/home.html'
urlname = 'styleguide_home'
class BaseStyleGuideArticleView(TemplateView):
template_name = 'styleguide/base_section.html'
@property
def sections(self):
"""This will be inserted into the page context's sections variable
as a list of strings following the format
'styleguide/_includes/<section>.html'
Make sure you create the corresponding template in the styleguide app.
:return: List of the sections in order. Usually organized by
<article>/<section_name>
"""
raise NotImplementedError("please implement 'sections'")
@property
def navigation_name(self):
"""This will be inserted into the page context under
styleguide/_includes/nav/<navigation_name>.html. Make sure
you create the corresponding template in the styleguide app
when you add this.
:return: a string that is the name of the navigation section
"""
raise NotImplementedError("please implement 'navigation_name'")
@property
def section_context(self):
return {
'sections': ['styleguide/_includes/%s.html' % s
for s in self.sections],
'navigation': ('styleguide/_includes/nav/%s.html'
% self.navigation_name),
}
@property
def page_context(self):
"""It's intended that you override this method when necessary to provide
any additional content that's relevant to the view specifically.
:return: a dict
"""
return {}
def example(self, filename):
examples = os.path.join(os.path.dirname(__file__), '..', 'templates', 'styleguide', 'examples')
with open(os.path.join(examples, filename), 'r', encoding='utf-8') as content:
return content.read()
def render_to_response(self, context, **response_kwargs):
context.update(self.section_context)
context.update(self.page_context)
return super(BaseStyleGuideArticleView, self).render_to_response(
context, **response_kwargs)
class AtomsStyleGuideView(BaseStyleGuideArticleView):
urlname = 'styleguide_atoms'
navigation_name = 'atoms'
@property
def sections(self):
return [
'atoms/intro',
'atoms/accessibility',
'atoms/typography',
'atoms/colors',
'atoms/icons',
'atoms/css',
]
@property
def page_context(self):
return {
'common_icons': [
{
'name': 'Common FontAwesome primary icons',
'icons': [
'fa-plus', 'fa-trash', 'fa-remove', 'fa-search',
'fa-angle-double-right', 'fa-angle-double-down',
],
},
{
'name': 'Common FontAwesome secondary icons',
'icons': [
'fa-cloud-download', 'fa-cloud-upload',
'fa-warning', 'fa-info-circle', 'fa-question-circle', 'fa-check',
'fa-external-link',
],
}
],
'custom_icons': [
{
'name': 'Custom HQ icons',
'icons': [
'fcc-flower', 'fcc-applications', 'fcc-commtrack', 'fcc-reports', 'fcc-data', 'fcc-users',
'fcc-settings', 'fcc-help', 'fcc-exchange', 'fcc-messaging', 'fcc-chart-report',
'fcc-form-report', 'fcc-datatable-report', 'fcc-piegraph-report', 'fcc-survey',
'fcc-casemgt', 'fcc-blankapp', 'fcc-globe', 'fcc-app-createform', 'fcc-app-updateform',
'fcc-app-completeform',
],
},
{
'name': 'Custom HQ icons specific to form builder',
'icons': [
'fcc-fd-text', 'fcc-fd-numeric', 'fcc-fd-data', 'fcc-fd-variable', 'fcc-fd-single-select',
'fcc-fd-single-circle', 'fcc-fd-multi-select', 'fcc-fd-multi-box', 'fcc-fd-decimal',
'fcc-fd-long', 'fcc-fd-datetime', 'fcc-fd-audio-capture', 'fcc-fd-android-intent',
'fcc-fd-signature', 'fcc-fd-multi-box', 'fcc-fd-single-circle', 'fcc-fd-hash',
'fcc-fd-external-case', 'fcc-fd-external-case-data', 'fcc-fd-expand', 'fcc-fd-collapse',
'fcc-fd-case-property', 'fcc-fd-edit-form',
],
},
],
'swatches': {
'RED': {
'main': ('e73c27', 'cc-att-neg-mid'),
'shades': [
('fbeae6', 'cc-att-neg-extra-hi'),
('fead9a', 'cc-att-neg-hi'),
('bf0712', 'cc-att-neg-low'),
('340101', 'cc-att-neg-extra-low'),
],
'inverse': True,
'name': 'Error, Negative Attention',
'description': '''
Use to highlight an error, something negative or a critical risk.
Use as text, highlights, banners or destructive buttons. Often called
"danger", as in <code>.btn-danger</code>.
''',
},
'YELLOW': {
'main': ('eec200', 'cc-light-warm-accent-mid'),
'shades': [
('fcf2cd', 'cc-light-warm-accent-extra-hi'),
('ffea8a', 'cc-light-warm-accent-hi'),
('9c6f19', 'cc-light-warm-accent-low'),
('573b00', 'cc-light-warm-accent-extra-low'),
],
'name': 'Attention',
'description': '''
Use for warning-level information, less severe than an error but still in need of
attention. Often called "warning", as in <code>.alert-warning</code>.
''',
},
'GREEN': {
'main': ('4aba32', 'cc-att-pos-mid'),
'shades': [
('e3f1df', 'cc-att-pos-extra-hi'),
('bbe5b3', 'cc-att-pos-hi'),
('118043', 'cc-att-pos-low'),
('173630', 'cc-att-pos-extra-low'),
],
'inverse': True,
'name': 'Success',
'description': '''
Use when an action has been completed successfully, primarily for messaging.
Rarely used for interacactive elements like buttons. Used in classes such as
<code>.alert-success</code>.
''',
},
'BLACK': {
'main': ('1c2126', 'cc-text'),
'inverse': True,
'name': 'Ink Black',
'description': "Default text color. Also used for footer.",
},
'BACKGROUND': {
'main': ('f2f2f1', 'cc-bg'),
'name': 'Background',
'description': '''
Used for backgrounds that are light but distinct from the default white background,
such as panel headers.
''',
},
'ACTION': {
'main': ('5c6ac5', 'call-to-action-mid'),
'shades': [
('f4f5fa', 'call-to-action-extra-hi'),
('b4bcf5', 'call-to-action-hi'),
('212f78', 'call-to-action-low'),
('000639', 'call-to-action-extra-low'),
],
'inverse': True,
'name': 'Call to Action',
'description': '''
Use for buttons, checkmarks, radio buttons or actionable primary icons.
Do not use for text links. Used for <code>.btn-primary</code>.
''',
},
'ACCENT_TEAL': {
'main': ('00bdc5', 'cc-light-cool-accent-mid'),
'shades': [
('ccf3f4', 'cc-light-cool-accent-hi'),
('00799a', 'cc-light-cool-accent-low'),
],
'inverse': True,
'name': 'Accent Teal',
'description': '''
Use for primary button on dark backgrounds.
Use sparingly for secondary buttons, typically buttons indicating a download or upload.
Corresponds with "info" classes like <code>.btn-info</code>.
''',
},
'SIGNUP_PURPLE': {
'main': ('43467F', 'color-purple-dark'),
'inverse': True,
'name': 'Signup Purple',
'description': "Use for banners or interactive elements in the signup and registration flow.",
},
'SIGNUP_PURPLE_INVERSE': {
'main': ('E3D0FF', 'color-purple-dark-inverse'),
'name': '',
'description': "Corresponds to signup purple."
},
'NEUTRAL': {
'main': ('685c53', 'cc-neutral-mid'),
'shades': [
('d6d6d4', 'cc-neutral-hi'),
('373534', 'cc-neutral-low'),
],
'inverse': True,
'name': 'Neutral',
'description': '''
Use for neutral visual indicators, typically borders or backgrounds.
''',
},
'BLUE': {
'main': ('004ebc', 'cc-brand-mid'),
'shades': [
('bcdeff', 'cc-brand-hi'),
('002c5f', 'cc-brand-low'),
],
'inverse': True,
'name': 'Link, Selection',
'description': '''
Use for text links or to indicate that something is selected. Used in <code>.active</code>.
''',
},
'ACCENT_PURPLE': {
'main': ('9060c8', 'cc-dark-cool-accent-mid'),
'shades': [
('d6c5ea', 'cc-dark-cool-accent-hi'),
('5d3f82', 'cc-dark-cool-accent-low'),
],
'inverse': True,
'name': 'Accent Purple',
'description': '''
Avoid. Used occasionally for billing, web apps, and other unusual cases.
''',
},
'ACCENT_ORANGE': {
'main': ('ff8400', 'cc-dark-warm-accent-mid'),
'shades': [
('ffe3c2', 'cc-dark-warm-accent-hi'),
('994f00', 'cc-dark-warm-accent-low'),
],
'inverse': True,
'name': 'Accent Orange',
'description': '''
Avoid. Used occasionally for billing, web apps, and other unusual cases.
''',
},
},
}
class MoleculesStyleGuideView(BaseStyleGuideArticleView):
urlname = 'styleguide_molecules'
navigation_name = 'molecules'
@property
def sections(self):
return [
'molecules/intro',
'molecules/buttons',
'molecules/selections',
'molecules/checkboxes',
'molecules/modals',
'molecules/pagination',
'molecules/search_box',
'molecules/inline_edit',
'molecules/feedback',
]
@property
def page_context(self):
return {
'checkboxes_form': CheckboxesForm(),
'examples': {
'selections': {
'button_group': self.example('button_group.html'),
'select2': self.example('select2.html'),
'multiselect': self.example('multiselect.html'),
},
'checkbox_in_form': self.example('checkbox_in_form.html'),
'lonely_checkbox': self.example('lonely_checkbox.html'),
'modals': self.example('modals.html'),
'pagination': self.example('pagination.html'),
'search_box': self.example('search_box.html'),
'inline_edit': self.example('inline_edit.html'),
'feedback': self.example('feedback.html'),
},
}
class OrganismsStyleGuideView(BaseStyleGuideArticleView):
urlname = 'styleguide_organisms'
navigation_name = 'organisms'
@property
def sections(self):
return [
'organisms/intro',
'organisms/forms',
'organisms/tables',
]
@property
def page_context(self):
return {
'basic_crispy_form': BasicCrispyForm(),
'examples': {
'html_form': self.example('html_form.html'),
'error_form': self.example('error_form.html'),
'basic_table': self.example('basic_table.html'),
'complex_table': self.example('complex_table.html'),
},
}
class PagesStyleGuideView(BaseStyleGuideArticleView):
urlname = 'styleguide_pages'
navigation_name = 'pages'
@property
def sections(self):
return [
'pages/intro',
'pages/navigation',
'pages/class_based',
'pages/functional',
]
@property
def page_context(self):
return {
'examples': {
'header': self.example('header.html'),
'panels': self.example('panels.html'),
'tabs': self.example('tabs.html'),
},
}
| bsd-3-clause | 2fa468578564ef5da3cfa16ea44ad862 | 38.849866 | 115 | 0.457818 | 4.395033 | false | false | false | false |
dimagi/commcare-hq | corehq/util/workbook_reading/adapters/generic.py | 1 | 1156 | from contextlib import contextmanager
from corehq.util.workbook_reading import SpreadsheetFileExtError
from .csv import open_csv_workbook
from .xls import open_xls_workbook
from .xlsx import open_xlsx_workbook
extensions_to_functions_dict = {
'csv': open_csv_workbook,
'xls': open_xls_workbook,
'xlsx': open_xlsx_workbook,
}
valid_extensions = extensions_to_functions_dict.keys()
@contextmanager
def open_any_workbook(filename):
"""Call the relevant function from extensions_to_functions_dict, based on the filename."""
file_has_valid_extension = False
if '.' in filename:
extension = filename.split('.')[-1]
if extension in valid_extensions:
file_has_valid_extension = True
function_to_open_workbook = extensions_to_functions_dict[extension]
with function_to_open_workbook(filename) as workbook:
yield workbook
if not file_has_valid_extension:
error = 'File {} does not have a valid extension. Valid extensions are: {}'.format(
filename,
', '.join(valid_extensions),
)
raise SpreadsheetFileExtError(error)
| bsd-3-clause | 90fa3a7eee8804a567d1cb907025d5e9 | 34.030303 | 94 | 0.679066 | 4.070423 | false | false | false | false |
dimagi/commcare-hq | corehq/form_processor/submission_post.py | 1 | 29766 | import logging
from collections import namedtuple
from ddtrace import tracer
from django.db import IntegrityError
from django.http import (
HttpRequest,
HttpResponse,
HttpResponseForbidden,
)
from django.conf import settings
from django.urls import reverse
from django.utils.translation import gettext as _
import sys
from casexml.apps.case.xform import close_extension_cases
from casexml.apps.phone.restore_caching import AsyncRestoreTaskIdCache, RestorePayloadPathCache
import couchforms
from casexml.apps.case.exceptions import PhoneDateValueError, IllegalCaseId, UsesReferrals, InvalidCaseIndex, \
CaseValueError
from corehq.apps.receiverwrapper.rate_limiter import report_case_usage, report_submission_usage
from corehq.const import OPENROSA_VERSION_3
from corehq.middleware import OPENROSA_VERSION_HEADER
from corehq.toggles import ASYNC_RESTORE, SUMOLOGIC_LOGS, NAMESPACE_OTHER
from corehq.apps.cloudcare.const import DEVICE_ID as FORMPLAYER_DEVICE_ID
from corehq.apps.commtrack.exceptions import MissingProductId
from corehq.apps.domain_migration_flags.api import any_migrations_in_progress
from corehq.apps.es.client import BulkActionItem
from corehq.apps.users.models import CouchUser
from corehq.apps.users.permissions import has_permission_to_view_report
from corehq.form_processor.exceptions import PostSaveError, XFormSaveError
from corehq.form_processor.interfaces.processor import FormProcessorInterface
from corehq.form_processor.models import XFormInstance
from corehq.form_processor.parsers.form import process_xform_xml
from corehq.form_processor.system_action import SYSTEM_ACTION_XMLNS, handle_system_action
from corehq.form_processor.utils.metadata import scrub_meta
from corehq.form_processor.submission_process_tracker import unfinished_submission
from corehq.util.metrics import metrics_counter
from corehq.util.metrics.load_counters import form_load_counter
from corehq.util.global_request import get_request
from corehq.util.timer import TimingContext
from couchforms import openrosa_response
from couchforms.const import DEVICE_LOG_XMLNS
from couchforms.models import DefaultAuthContext, UnfinishedSubmissionStub
from couchforms.signals import successful_form_received
from couchforms.util import legacy_notification_assert
from couchforms.openrosa_response import OpenRosaResponse, ResponseNature
from dimagi.utils.logging import notify_exception, log_signal_errors
from phonelog.utils import process_device_log, SumoLogicLog
CaseStockProcessingResult = namedtuple(
'CaseStockProcessingResult',
'case_result, case_models, stock_result'
)
class FormProcessingResult(namedtuple('FormProcessingResult', 'response xform cases ledgers submission_type')):
@property
def case(self):
assert len(self.cases) == 1
return self.cases[0]
class SubmissionPost(object):
def __init__(self, instance=None, attachments=None, auth_context=None,
domain=None, app_id=None, build_id=None, path=None,
location=None, submit_ip=None, openrosa_headers=None,
last_sync_token=None, received_on=None, date_header=None,
partial_submission=False, case_db=None, force_logs=False,
timing_context=None):
assert domain, "'domain' is required"
assert instance, instance
assert not isinstance(instance, HttpRequest), instance
self.domain = domain
self.app_id = app_id
self.build_id = build_id
# get_location has good default
self.location = location or couchforms.get_location()
self.received_on = received_on
self.date_header = date_header
self.submit_ip = submit_ip
self.last_sync_token = last_sync_token
self.openrosa_headers = openrosa_headers or {}
self.instance = instance
self.attachments = attachments or {}
self.auth_context = auth_context or DefaultAuthContext()
self.path = path
self.interface = FormProcessorInterface(domain)
self.partial_submission = partial_submission
# always None except in the case where a system form is being processed as part of another submission
# e.g. for closing extension cases
self.case_db = case_db
if case_db:
assert case_db.domain == domain
self.force_logs = force_logs
self.is_openrosa_version3 = self.openrosa_headers.get(OPENROSA_VERSION_HEADER, '') == OPENROSA_VERSION_3
self.track_load = form_load_counter("form_submission", domain)
self.timing_context = timing_context or TimingContext()
def _set_submission_properties(self, xform):
# attaches shared properties of the request to the document.
# used on forms and errors
xform.submit_ip = self.submit_ip
xform.path = self.path
xform.openrosa_headers = self.openrosa_headers
xform.last_sync_token = self.last_sync_token
if self.received_on:
xform.received_on = self.received_on
if self.date_header:
xform.date_header = self.date_header
xform.app_id = self.app_id
xform.build_id = self.build_id
xform.export_tag = ["domain", "xmlns"]
xform.partial_submission = self.partial_submission
return xform
def _handle_known_error(self, error, instance, xforms):
# errors we know about related to the content of the form
# log the error and respond with a success code so that the phone doesn't
# keep trying to send the form
xforms[0] = _transform_instance_to_error(self.interface, error, instance)
# this is usually just one document, but if an edit errored we want
# to save the deprecated form as well
self.interface.save_processed_models(xforms)
def _handle_basic_failure_modes(self):
if any_migrations_in_progress(self.domain):
# keep submissions on the phone
# until ready to start accepting again
return HttpResponse(status=503)
if not self.auth_context.is_valid():
return HttpResponseForbidden('Bad auth')
def _post_process_form(self, xform):
self._set_submission_properties(xform)
found_old = scrub_meta(xform)
legacy_notification_assert(not found_old, 'Form with old metadata submitted', xform.form_id)
def _get_success_message(self, instance, cases=None):
'''
Formplayer requests get a detailed success message pointing to the form/case affected.
All other requests get a generic message.
Message is formatted with markdown.
'''
if instance.metadata and (not instance.metadata.userID or not instance.metadata.instanceID):
metrics_counter('commcare.xform_submissions.partial_metadata', tags={
'domain': instance.domain,
})
elif not instance.metadata:
metrics_counter('commcare.xform_submissions.no_metadata', tags={
'domain': instance.domain,
})
return ' √ '
if instance.metadata.deviceID != FORMPLAYER_DEVICE_ID:
return ' √ '
messages = []
user = CouchUser.get_by_user_id(instance.user_id)
if not user or not user.is_web_user():
return _('Form successfully saved!')
from corehq.apps.export.views.list import CaseExportListView, FormExportListView
from corehq.apps.export.views.utils import can_view_case_exports, can_view_form_exports
from corehq.apps.reports.standard.cases.case_data import CaseDataView
from corehq.apps.reports.views import FormDataView
form_link = case_link = form_export_link = case_export_link = None
form_view = 'corehq.apps.reports.standard.inspect.SubmitHistory'
if has_permission_to_view_report(user, instance.domain, form_view):
form_link = reverse(FormDataView.urlname, args=[instance.domain, instance.form_id])
case_view = 'corehq.apps.reports.standard.cases.basic.CaseListReport'
if cases and has_permission_to_view_report(user, instance.domain, case_view):
if len(cases) == 1:
case_link = reverse(CaseDataView.urlname, args=[instance.domain, cases[0].case_id])
else:
case_link = ", ".join(["[{}]({})".format(
c.name, reverse(CaseDataView.urlname, args=[instance.domain, c.case_id])
) for c in cases])
if can_view_form_exports(user, instance.domain):
form_export_link = reverse(FormExportListView.urlname, args=[instance.domain])
if cases and can_view_case_exports(user, instance.domain):
case_export_link = reverse(CaseExportListView.urlname, args=[instance.domain])
# Start with generic message
messages.append(_('Form successfully saved!'))
# Add link to form/case if possible
if form_link and case_link:
if len(cases) == 1:
messages.append(
_("You submitted [this form]({}), which affected [this case]({}).")
.format(form_link, case_link))
else:
messages.append(
_("You submitted [this form]({}), which affected these cases: {}.")
.format(form_link, case_link))
elif form_link:
messages.append(_("You submitted [this form]({}).").format(form_link))
elif case_link:
if len(cases) == 1:
messages.append(_("Your form affected [this case]({}).").format(case_link))
else:
messages.append(_("Your form affected these cases: {}.").format(case_link))
# Add link to all form/case exports
if form_export_link and case_export_link:
messages.append(
_("Click to export your [case]({}) or [form]({}) data.")
.format(case_export_link, form_export_link))
elif form_export_link:
messages.append(_("Click to export your [form data]({}).").format(form_export_link))
elif case_export_link:
messages.append(_("Click to export your [case data]({}).").format(case_export_link))
return "\n\n".join(messages)
def run(self):
self.track_load()
with self.timing_context("process_xml"):
report_submission_usage(self.domain)
failure_response = self._handle_basic_failure_modes()
if failure_response:
return FormProcessingResult(failure_response, None, [], [], 'known_failures')
result = process_xform_xml(self.domain, self.instance, self.attachments, self.auth_context.to_json())
submitted_form = result.submitted_form
self._post_process_form(submitted_form)
self._invalidate_caches(submitted_form)
if submitted_form.is_submission_error_log:
logging.info('Processing form %s as a submission error', submitted_form.form_id)
XFormInstance.objects.save_new_form(submitted_form)
response = None
try:
xml = self.instance.decode()
except UnicodeDecodeError:
pass
else:
if 'log_subreport' in xml:
response = self.get_exception_response_and_log(
'Badly formed device log', submitted_form, self.path
)
if not response:
response = self.get_exception_response_and_log(
'Problem receiving submission', submitted_form, self.path
)
return FormProcessingResult(response, None, [], [], 'submission_error_log')
if submitted_form.xmlns == SYSTEM_ACTION_XMLNS:
logging.info('Processing form %s as a system action', submitted_form.form_id)
with self.timing_context("process_system_action"):
return self.handle_system_action(submitted_form)
if submitted_form.xmlns == DEVICE_LOG_XMLNS:
logging.info('Processing form %s as a device log', submitted_form.form_id)
with self.timing_context("process_device_log"):
return self.process_device_log(submitted_form)
# Begin Normal Form Processing
self._log_form_details(submitted_form)
cases = []
ledgers = []
submission_type = 'unknown'
openrosa_kwargs = {}
with result.get_locked_forms() as xforms:
if len(xforms) > 1:
self.track_load(len(xforms) - 1)
if self.case_db:
case_db_cache = self.case_db
case_db_cache.cached_xforms.extend(xforms)
else:
case_db_cache = self.interface.casedb_cache(
domain=self.domain, lock=True, deleted_ok=True,
xforms=xforms, load_src="form_submission",
)
with case_db_cache as case_db:
instance = xforms[0]
if instance.is_duplicate:
with self.timing_context("process_duplicate"), tracer.trace('submission.process_duplicate'):
submission_type = 'duplicate'
existing_form = xforms[1]
stub = UnfinishedSubmissionStub.objects.filter(
domain=instance.domain,
xform_id=existing_form.form_id
).first()
result = None
if stub:
from corehq.form_processor.reprocess import reprocess_unfinished_stub_with_form
result = reprocess_unfinished_stub_with_form(stub, existing_form, lock=False)
elif existing_form.is_error:
from corehq.form_processor.reprocess import reprocess_form
result = reprocess_form(existing_form, lock_form=False)
if result and result.error:
submission_type = 'error'
openrosa_kwargs['error_message'] = result.error
if existing_form.is_error:
openrosa_kwargs['error_nature'] = ResponseNature.PROCESSING_FAILURE
else:
openrosa_kwargs['error_nature'] = ResponseNature.POST_PROCESSING_FAILURE
else:
self.interface.save_processed_models([instance])
elif not instance.is_error:
submission_type = 'normal'
try:
case_stock_result = self.process_xforms_for_cases(xforms, case_db, self.timing_context)
except (IllegalCaseId, UsesReferrals, MissingProductId,
PhoneDateValueError, InvalidCaseIndex, CaseValueError) as e:
self._handle_known_error(e, instance, xforms)
submission_type = 'error'
openrosa_kwargs['error_nature'] = ResponseNature.PROCESSING_FAILURE
except Exception as e:
# handle / log the error and reraise so the phone knows to resubmit
# note that in the case of edit submissions this won't flag the previous
# submission as having been edited. this is intentional, since we should treat
# this use case as if the edit "failed"
handle_unexpected_error(self.interface, instance, e)
raise
else:
instance.initial_processing_complete = True
openrosa_kwargs['error_message'] = self.save_processed_models(case_db, xforms,
case_stock_result)
if openrosa_kwargs['error_message']:
openrosa_kwargs['error_nature'] = ResponseNature.POST_PROCESSING_FAILURE
cases = case_stock_result.case_models
ledgers = case_stock_result.stock_result.models_to_save
report_case_usage(self.domain, len(cases))
openrosa_kwargs['success_message'] = self._get_success_message(instance, cases=cases)
elif instance.is_error:
submission_type = 'error'
self._log_form_completion(instance, submission_type)
response = self._get_open_rosa_response(instance, **openrosa_kwargs)
return FormProcessingResult(response, instance, cases, ledgers, submission_type)
def _log_form_details(self, form):
attachments = form.attachments if hasattr(form, 'attachments') else {}
logging.info('Received Form %s with %d attachments',
form.form_id, len(attachments))
for index, (name, attachment) in enumerate(attachments.items()):
attachment_msg = 'Form %s, Attachment %s: %s'
attachment_props = [form.form_id, index, name]
if hasattr(attachment, 'has_size') and attachment.has_size():
attachment_msg = attachment_msg + ' (%d bytes)'
attachment_props.append(attachment.raw_content.size)
logging.info(attachment_msg, *attachment_props)
def _log_form_completion(self, form, submission_type):
# Orig_id doesn't exist on all couch forms, only XFormError and XFormDeprecated
if hasattr(form, 'orig_id') and form.orig_id is not None:
logging.info('Finished %s processing for Form %s with original id %s',
submission_type, form.form_id, form.orig_id)
else:
logging.info('Finished %s processing for Form %s', submission_type, form.form_id)
def _conditionally_send_device_logs_to_sumologic(self, instance):
url = getattr(settings, 'SUMOLOGIC_URL', None)
if url and SUMOLOGIC_LOGS.enabled(instance.form_data.get('device_id'), NAMESPACE_OTHER):
SumoLogicLog(self.domain, instance).send_data(url)
def _invalidate_caches(self, xform):
for device_id in {None, xform.metadata.deviceID if xform.metadata else None}:
self._invalidate_restore_payload_path_cache(xform, device_id)
if ASYNC_RESTORE.enabled(self.domain):
self._invalidate_async_restore_task_id_cache(xform, device_id)
def _invalidate_restore_payload_path_cache(self, xform, device_id):
"""invalidate cached initial restores"""
restore_payload_path_cache = RestorePayloadPathCache(
domain=self.domain,
user_id=xform.user_id,
sync_log_id=xform.last_sync_token,
device_id=device_id,
)
restore_payload_path_cache.invalidate()
def _invalidate_async_restore_task_id_cache(self, xform, device_id):
async_restore_task_id_cache = AsyncRestoreTaskIdCache(
domain=self.domain,
user_id=xform.user_id,
sync_log_id=self.last_sync_token,
device_id=device_id,
)
task_id = async_restore_task_id_cache.get_value()
if task_id is not None:
from corehq.apps.celery import app
app.control.revoke(task_id)
async_restore_task_id_cache.invalidate()
@tracer.wrap(name='submission.save_models')
def save_processed_models(self, case_db, xforms, case_stock_result):
instance = xforms[0]
try:
with self.timing_context("save_models"), unfinished_submission(instance) as unfinished_submission_stub:
try:
self.interface.save_processed_models(
xforms,
case_stock_result.case_models,
case_stock_result.stock_result
)
except PostSaveError:
# mark the stub as saved if there's a post save error
# but re-raise the error so that the re-processing queue picks it up
unfinished_submission_stub.submission_saved()
raise
else:
unfinished_submission_stub.submission_saved()
with self.timing_context("post_save_actions"):
self.do_post_save_actions(case_db, xforms, case_stock_result)
except PostSaveError:
return "Error performing post save operations"
@staticmethod
@tracer.wrap(name='submission.post_save_actions')
def do_post_save_actions(case_db, xforms, case_stock_result):
instance = xforms[0]
case_db.clear_changed()
try:
case_stock_result.stock_result.finalize()
SubmissionPost.index_case_search(instance, case_stock_result.case_models)
SubmissionPost._fire_post_save_signals(instance, case_stock_result.case_models)
close_extension_cases(
case_db,
case_stock_result.case_models,
"SubmissionPost-%s-close_extensions" % instance.form_id,
instance.last_sync_token
)
except PostSaveError:
raise
except Exception:
notify_exception(get_request(), "Error performing post save actions during form processing", {
'domain': instance.domain,
'form_id': instance.form_id,
})
raise PostSaveError
@staticmethod
def index_case_search(instance, case_models):
if not instance.metadata or instance.metadata.deviceID != FORMPLAYER_DEVICE_ID:
return
from corehq.apps.case_search.models import case_search_synchronous_web_apps_for_domain
if not case_search_synchronous_web_apps_for_domain(instance.domain):
return
from corehq.pillows.case_search import transform_case_for_elasticsearch
from corehq.apps.es.case_search import case_search_adapter
actions = [
BulkActionItem.index(transform_case_for_elasticsearch(case_model.to_json()))
for case_model in case_models
]
try:
_, errors = case_search_adapter.bulk(actions, raise_on_error=False, raise_on_exception=False)
except Exception as e:
errors = [str(e)]
if errors:
# Notify but otherwise ignore all errors - the regular case search pillow is going to reprocess these
notify_exception(None, "Error updating case_search ES index during form processing", details={
'xml': instance,
'domain': instance.domain,
'errors': errors,
})
@staticmethod
@tracer.wrap(name='submission.process_cases_and_stock')
def process_xforms_for_cases(xforms, case_db, timing_context=None):
from casexml.apps.case.xform import process_cases_with_casedb
from corehq.apps.commtrack.processing import process_stock
timing_context = timing_context or TimingContext()
instance = xforms[0]
with timing_context("process_cases"):
case_result = process_cases_with_casedb(xforms, case_db)
with timing_context("process_ledgers"):
stock_result = process_stock(xforms, case_db)
stock_result.populate_models()
modified_on_date = instance.received_on
if getattr(instance, 'edited_on', None) and instance.edited_on > instance.received_on:
modified_on_date = instance.edited_on
with timing_context("check_cases_before_save"):
cases = case_db.get_cases_for_saving(modified_on_date)
return CaseStockProcessingResult(
case_result=case_result,
case_models=cases,
stock_result=stock_result,
)
def get_response(self):
return self.run().response
@staticmethod
def _fire_post_save_signals(instance, cases):
from corehq.form_processor.signals import sql_case_post_save
error_message = "Error occurred during form submission post save (%s)"
error_details = {'domain': instance.domain, 'form_id': instance.form_id}
results = successful_form_received.send_robust(None, xform=instance)
has_errors = log_signal_errors(results, error_message, error_details)
for case in cases:
results = sql_case_post_save.send_robust(case.__class__, case=case)
has_errors |= log_signal_errors(results, error_message, error_details)
if has_errors:
raise PostSaveError
def _get_open_rosa_response(self, instance, success_message=None, error_message=None, error_nature=None):
if self.is_openrosa_version3:
instance_ok = instance.is_normal or instance.is_duplicate
has_error = error_message or error_nature
if instance_ok and not has_error:
response = openrosa_response.get_openarosa_success_response(message=success_message)
else:
error_message = error_message or instance.problem
response = self.get_v3_error_response(error_message, error_nature)
else:
if instance.is_normal:
response = openrosa_response.get_openarosa_success_response()
else:
response = self.get_v2_submit_error_response(instance)
# this hack is required for ODK
response["Location"] = self.location
# this is a magic thing that we add
response['X-CommCareHQ-FormID'] = instance.form_id
return response
@staticmethod
def get_v2_submit_error_response(doc):
return OpenRosaResponse(
message=doc.problem, nature=ResponseNature.SUBMIT_ERROR, status=201,
).response()
@staticmethod
def get_v3_error_response(message, nature):
"""Returns a 422(Unprocessable Entity) response
- if nature == 'processing_failure' the mobile device will quarantine this form and not retry it
- any other value of `nature` will result in the form being marked as a failure and retrying
"""
return OpenRosaResponse(
message=message, nature=nature, status=422,
).response()
@staticmethod
def get_exception_response_and_log(msg, error_instance, path):
logging.warning(
msg,
extra={
'submission_path': path,
'form_id': error_instance.form_id,
'error_message': error_instance.problem
}
)
# This are generally badly formed XML resulting from file corruption, encryption errors
# or other errors on the device which can not be recovered from.
# To prevent retries of these errors we submit a 422 response with `processing_failure` nature.
return OpenRosaResponse(
message="There was an error processing the form: %s" % error_instance.problem,
nature=ResponseNature.PROCESSING_FAILURE,
status=422,
).response()
@tracer.wrap(name='submission.handle_system_action')
def handle_system_action(self, form):
handle_system_action(form, self.auth_context)
self.interface.save_processed_models([form])
response = HttpResponse(status=201)
return FormProcessingResult(response, form, [], [], 'system-action')
@tracer.wrap(name='submission.process_device_log')
def process_device_log(self, device_log_form):
self._conditionally_send_device_logs_to_sumologic(device_log_form)
ignore_device_logs = settings.SERVER_ENVIRONMENT in settings.NO_DEVICE_LOG_ENVS
if self.force_logs or not ignore_device_logs:
try:
process_device_log(self.domain, device_log_form, self.force_logs)
except Exception as e:
notify_exception(None, "Error processing device log", details={
'xml': self.instance,
'domain': self.domain
})
e.sentry_capture = False
raise
response = self._get_open_rosa_response(device_log_form)
return FormProcessingResult(response, device_log_form, [], [], 'device-log')
def _transform_instance_to_error(interface, exception, instance):
error_message = '{}: {}'.format(type(exception).__name__, str(exception))
return interface.xformerror_from_xform_instance(instance, error_message)
def handle_unexpected_error(interface, instance, exception):
instance = _transform_instance_to_error(interface, exception, instance)
notify_submission_error(instance, instance.problem, sys.exc_info())
try:
XFormInstance.objects.save_new_form(instance)
except IntegrityError:
# handle edge case where saving duplicate form fails
instance = interface.xformerror_from_xform_instance(instance, instance.problem, with_new_id=True)
XFormInstance.objects.save_new_form(instance)
except XFormSaveError:
# try a simple save
instance.save()
def notify_submission_error(instance, message, exec_info=None):
from corehq.util.global_request.api import get_request
exec_info = exec_info or sys.exc_info()
domain = getattr(instance, 'domain', '---')
details = {
'domain': domain,
'error form ID': instance.form_id,
}
request = get_request()
notify_exception(request, message, details=details, exec_info=exec_info)
| bsd-3-clause | d751c6be6445679796dc6db74ce98214 | 44.717358 | 115 | 0.6219 | 4.207238 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/time_based/messages.py | 1 | 22939 | # -*- coding: utf-8 -*-
from datetime import timedelta
from django.contrib.admin.options import get_content_type_for_model
from django.template import defaultfilters
from django.utils.timezone import get_current_timezone, now
from django.utils.translation import pgettext_lazy as pgettext
from pytz import timezone
from bluebottle.clients.utils import tenant_url
from bluebottle.notifications.messages import TransitionMessage
from bluebottle.notifications.models import Message
from bluebottle.time_based.models import (
DateParticipant, SlotParticipant,
PeriodParticipant, DateActivitySlot
)
def get_slot_info(slot):
if slot.location and not slot.is_online:
tz = timezone(slot.location.timezone)
else:
tz = get_current_timezone()
start = slot.start.astimezone(tz)
end = slot.end.astimezone(tz)
return {
'title': slot.title or str(slot),
'is_online': slot.is_online,
'online_meeting_url': slot.online_meeting_url,
'location': slot.location.formatted_address if slot.location else '',
'location_hint': slot.location_hint,
'start_date': defaultfilters.date(start),
'start_time': defaultfilters.time(start),
'end_time': defaultfilters.time(end),
'timezone': start.strftime('%Z')
}
class TimeBasedInfoMixin(object):
def get_context(self, recipient):
context = super().get_context(recipient)
if isinstance(self.obj, (DateParticipant, PeriodParticipant)):
participant = self.obj
elif isinstance(self.obj, DateActivitySlot):
participant = self.obj.activity.participants.get(user=recipient)
elif isinstance(self.obj, SlotParticipant):
participant = self.obj.participant
else:
participant = self.obj.participants.get(user=recipient)
if isinstance(participant, DateParticipant):
slots = []
for slot_participant in participant.slot_participants.filter(
status='registered'
):
slots.append(get_slot_info(slot_participant.slot))
context.update({'slots': slots})
elif isinstance(participant, PeriodParticipant):
context.update({
'start': participant.activity.start,
'end': participant.activity.deadline,
})
return context
class DeadlineChangedNotification(TransitionMessage):
"""
The deadline of the activity changed
"""
subject = pgettext('email', 'The deadline for your activity "{title}" changed')
template = 'messages/deadline_changed'
context = {
'title': 'title',
}
@property
def action_link(self):
return self.obj.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
participant.user for participant in self.obj.accepted_participants
]
def get_context(self, recipient):
context = super().get_context(recipient)
if self.obj.start:
context['start'] = pgettext(
'emai', 'on {start}'
).format(start=defaultfilters.date(self.obj.start))
else:
context['start'] = pgettext('emai', 'immediately')
if self.obj.deadline:
context['end'] = pgettext(
'emai', 'ends on {end}'
).format(end=defaultfilters.date(self.obj.deadline))
else:
context['end'] = pgettext('emai', 'runs indefinitely')
return context
class ReminderSingleDateNotification(TimeBasedInfoMixin, TransitionMessage):
"""
Reminder notification for a single date activity
"""
subject = pgettext('email', 'The activity "{title}" will take place in a few days!')
template = 'messages/reminder_single_date'
send_once = True
context = {
'title': 'title',
}
@property
def action_link(self):
return self.obj.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
participant.user for participant in self.obj.accepted_participants
]
class ReminderSlotNotification(TimeBasedInfoMixin, TransitionMessage):
"""
Reminder notification for a date activity slot
"""
subject = pgettext('email', 'The activity "{title}" will take place in a few days!')
template = 'messages/reminder_slot'
send_once = True
context = {
'title': 'activity.title',
}
def get_slots(self, recipient):
days_ago = now() - timedelta(days=5)
return self.obj.activity.slots.filter(
start__date=self.obj.start.date(),
slot_participants__participant__user=recipient,
slot_participants__status__in=['registered'],
slot_participants__participant__created__lt=days_ago,
status__in=['open', 'full']
).all()
def already_send(self, recipient):
slot_ids = self.get_slots(recipient).values_list('id', flat=True)
if slot_ids.count() == 0:
return True
return Message.objects.filter(
template=self.get_template(),
recipient=recipient,
content_type=get_content_type_for_model(self.obj),
object_id__in=slot_ids
).count() > 0
def get_context(self, recipient):
context = super().get_context(recipient)
slots = self.get_slots(recipient).all()
context['slots'] = [get_slot_info(slot) for slot in slots]
return context
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
participant.user for participant in self.obj.accepted_participants
]
class ReminderTeamSlotNotification(TransitionMessage):
"""
Reminder notification for a team activity slot
"""
subject = pgettext('email', 'The team activity "{title}" will take place in a few days!')
template = 'messages/reminder_team_slot'
send_once = True
context = {
'title': 'activity.title',
'team_name': 'team',
'start': 'start',
'duration': 'duration',
'end': 'end',
'timezone': 'timezone',
'location': 'location',
}
def already_send(self, recipient):
return Message.objects.filter(
template=self.get_template(),
recipient=recipient,
content_type=get_content_type_for_model(self.obj),
object_id=self.obj.id
).count() > 0
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
participant.user for participant in self.obj.team.accepted_participants
]
class ChangedSingleDateNotification(TimeBasedInfoMixin, TransitionMessage):
"""
Notification when slot details (date, time or location) changed for a single date activity
"""
subject = pgettext('email', 'The details of activity "{title}" have changed')
template = 'messages/changed_single_date'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
participant.user for participant in self.obj.activity.accepted_participants
]
class ChangedMultipleDateNotification(TimeBasedInfoMixin, TransitionMessage):
"""
Notification when slot details (date, time or location) changed for a single date activity
"""
subject = pgettext('email', 'The details of activity "{title}" have changed')
template = 'messages/changed_multiple_dates'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
slot_participant.participant.user for slot_participant
in self.obj.slot_participants.all()
if (
slot_participant.status == 'registered' and
slot_participant.participant.status == 'accepted'
)
]
class TeamSlotChangedNotification(TransitionMessage):
"""
Notification when slot details (date, time or location) changed for a team activity
"""
subject = pgettext('email', 'The details of the team activity "{title}" have changed')
template = 'messages/changed_team_date'
context = {
'title': 'activity.title',
'team_name': 'team',
'start': 'start',
'duration': 'duration',
'end': 'end',
'is_online': 'activity.is_online',
'location': 'location.formatted_address',
'timezone': 'timezone',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""team members"""
return [
participant.user for participant in self.obj.team.accepted_participants
]
class ActivitySucceededManuallyNotification(TransitionMessage):
"""
The activity was set to succeeded manually
"""
subject = pgettext('email', 'The activity "{title}" has succeeded 🎉')
template = 'messages/activity_succeeded_manually'
context = {
'title': 'title',
}
@property
def action_link(self):
return self.obj.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participants that signed up"""
return [
participant.user for participant in self.obj.accepted_participants
]
class ParticipantAddedNotification(TransitionMessage):
"""
A participant was added manually (through back-office)
"""
subject = pgettext('email', 'You have been added to the activity "{title}" 🎉')
template = 'messages/participant_added'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
if self.obj.user:
return [self.obj.user]
else:
return []
class TeamParticipantAddedNotification(TransitionMessage):
"""
A participant was added to a team manually (through back-office)
"""
subject = pgettext('email', 'You have been added to a team for "{title}" 🎉')
template = 'messages/team_participant_added'
context = {
'title': 'activity.title',
'team_name': 'team.name',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
if self.obj.user:
return [self.obj.user]
else:
return []
class ParticipantCreatedNotification(TransitionMessage):
"""
A participant applied for the activity and should be reviewed
"""
subject = pgettext('email', 'You have a new participant for your activity "{title}" 🎉')
template = 'messages/participant_created'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'Open your activity')
def get_recipients(self):
"""activity owner"""
return [self.obj.activity.owner]
class NewParticipantNotification(TransitionMessage):
"""
A participant joined the activity (no review required)
"""
subject = pgettext('email', 'A new participant has joined your activity "{title}" 🎉')
template = 'messages/new_participant'
context = {
'title': 'activity.title',
'applicant_name': 'user.full_name'
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'Open your activity')
def get_recipients(self):
"""activity owner"""
if self.obj.user:
return [self.obj.activity.owner]
else:
return []
class ParticipantNotification(TimeBasedInfoMixin, TransitionMessage):
"""
A participant was added manually (through back-office)
"""
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class ParticipantJoinedNotification(TimeBasedInfoMixin, TransitionMessage):
"""
The participant joined
"""
subject = pgettext('email', 'You have joined the activity "{title}"')
template = 'messages/participant_joined'
context = {
'title': 'activity.title',
}
delay = 60
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class TeamParticipantJoinedNotification(TransitionMessage):
"""
The participant joined
"""
subject = pgettext('email', 'You have registered your team for "{title}"')
template = 'messages/team_participant_joined'
context = {
'title': 'activity.title',
}
delay = 60
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""team captain"""
return [self.obj.owner]
class ParticipantChangedNotification(TimeBasedInfoMixin, TransitionMessage):
"""
The participant withdrew or applied to a slot when already applied to other slots
"""
subject = pgettext('email', 'You have changed your application on the activity "{title}"')
template = 'messages/participant_changed'
context = {
'title': 'activity.title',
}
delay = 55
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
@property
def task_id(self):
return f'{self.__class__.__name__}-{self.obj.participant.id}'
def get_recipients(self):
"""participant"""
joined_message = ParticipantJoinedNotification(self.obj.participant)
applied_message = ParticipantAppliedNotification(self.obj.participant)
changed_message = ParticipantChangedNotification(self.obj)
participant = DateParticipant.objects.get(pk=self.obj.participant.pk)
if (
participant.status == 'withdrawn' or
joined_message.is_delayed or
changed_message.is_delayed or applied_message.is_delayed
):
return []
return [self.obj.participant.user]
class ParticipantAppliedNotification(TimeBasedInfoMixin, TransitionMessage):
"""
The participant joined
"""
subject = pgettext('email', 'You have applied to the activity "{title}"')
template = 'messages/participant_applied'
context = {
'title': 'activity.title',
}
delay = 60
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class TeamParticipantAppliedNotification(TimeBasedInfoMixin, TransitionMessage):
"""
The participant joined as a team joined
"""
subject = pgettext('email', 'You have registered your team for "{title}"')
template = 'messages/team_participant_applied'
context = {
'title': 'activity.title',
}
delay = 60
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class TeamMemberJoinedNotification(TimeBasedInfoMixin, TransitionMessage):
"""
The participant joined as a team joined
"""
subject = pgettext('email', 'You have joined {team_name} for "{title}"')
template = 'messages/team_member_joined'
context = {
'title': 'activity.title',
'team_name': 'team.name'
}
# delay = 60
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class ParticipantAcceptedNotification(TimeBasedInfoMixin, TransitionMessage):
"""
The participant got accepted after review
"""
subject = pgettext('email', 'You have been selected for the activity "{title}" 🎉')
template = 'messages/participant_accepted'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class ParticipantRejectedNotification(TransitionMessage):
"""
The participant got rejected after review
"""
subject = pgettext('email', 'You have not been selected for the activity "{title}"')
template = 'messages/participant_rejected'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return tenant_url('/initiatives/activities/list')
action_title = pgettext('email', 'View all activities')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class ParticipantRemovedNotification(TransitionMessage):
"""
The participant was removed from the activity
"""
subject = pgettext('email', 'You have been removed as participant for the activity "{title}"')
template = 'messages/participant_removed'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return tenant_url('/initiatives/activities/list')
action_title = pgettext('email', 'View all activities')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class TeamParticipantRemovedNotification(TransitionMessage):
"""
The participant was removed from the activity
"""
subject = pgettext('email', 'Your team participation in ‘{title}’ has been cancelled')
template = 'messages/team_participant_removed'
context = {
'title': 'activity.title',
'team_name': 'team.name',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class ParticipantFinishedNotification(TransitionMessage):
"""
The participant was finished
"""
subject = pgettext('email', 'Your contribution to the activity "{title}" is successful 🎉')
template = 'messages/participant_finished'
context = {
'title': 'activity.title',
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'View activity')
def get_recipients(self):
"""participant"""
return [self.obj.user]
class ParticipantWithdrewNotification(TransitionMessage):
"""
A participant withdrew from your activity
"""
subject = pgettext('email', 'A participant has withdrawn from your activity "{title}"')
template = 'messages/participant_withdrew'
context = {
'title': 'activity.title',
'applicant_name': 'user.full_name'
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'Open your activity')
def get_recipients(self):
"""activity owner"""
return [self.obj.activity.owner]
class ParticipantAddedOwnerNotification(TransitionMessage):
"""
A participant added notify owner
"""
subject = pgettext('email', 'A participant has been added to your activity "{title}" 🎉')
template = 'messages/participant_added_owner'
context = {
'title': 'activity.title',
'participant_name': 'user.full_name'
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'Open your activity')
def get_recipients(self):
"""activity owner"""
if self.obj.user:
return [self.obj.activity.owner]
else:
return []
class ParticipantRemovedOwnerNotification(TransitionMessage):
"""
A participant removed notify owner
"""
subject = pgettext('email', 'A participant has been removed from your activity "{title}"')
template = 'messages/participant_removed_owner'
context = {
'title': 'activity.title',
'participant_name': 'user.full_name'
}
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'Open your activity')
def get_recipients(self):
"""activity owner"""
return [self.obj.activity.owner]
class SlotCancelledNotification(TransitionMessage):
"""
The activity slot got cancelled
"""
subject = pgettext('email', 'A slot for your activity "{title}" has been cancelled')
template = 'messages/slot_cancelled'
context = {
'title': 'activity.title',
}
def get_context(self, recipient):
context = super().get_context(recipient)
context['slots'] = [get_slot_info(self.obj)]
return context
def get_recipients(self):
"""participants that signed up"""
return [
self.obj.activity.owner
]
@property
def action_link(self):
return self.obj.activity.get_absolute_url()
action_title = pgettext('email', 'Open your activity')
| bsd-3-clause | 84d587a00d12e4a04645c584532080ea | 27.63875 | 98 | 0.628388 | 4.210033 | false | false | false | false |
dimagi/commcare-hq | corehq/ex-submodules/casexml/apps/case/xml/generator.py | 1 | 8779 | import settings
from casexml.apps.case.xml import V1, V2, V3, check_version, V2_NAMESPACE
from lxml import etree as ElementTree
import logging
from dimagi.utils.parsing import json_format_datetime, json_format_date
from dateutil.parser import parse as parse_datetime
from corehq.toggles import MM_CASE_PROPERTIES
from corehq.util.quickcache import quickcache
def datetime_to_xml_string(datetime_string):
if isinstance(datetime_string, bytes):
datetime_string = datetime_string.decode('utf-8')
if isinstance(datetime_string, str):
return datetime_string
return json_format_datetime(datetime_string)
def safe_element(tag, text=None):
# shortcut for commonly used functionality
# bad! copied from the phone's XML module
if text:
e = ElementTree.Element(tag)
e.text = str(text)
return e
else:
return ElementTree.Element(tag)
def date_to_xml_string(date):
if not date:
return ''
if isinstance(date, bytes):
date = date.decode('utf-8')
if isinstance(date, str):
date = parse_datetime(date)
return json_format_date(date)
def get_dynamic_element(key, val):
"""
Gets an element from a key/value pair assumed to be pulled from
a case object (usually in the dynamic properties)
"""
element = ElementTree.Element(key)
if isinstance(val, dict):
element.text = str(val.get('#text', ''))
element.attrib.update({
k[1:]: str(v) for k, v in val.items() if k and k.startswith("@")})
else:
# assume it's a string. Hopefully this is valid
element.text = str(val)
return element
class CaseXMLGeneratorBase(object):
# The breakdown of functionality here is a little sketchy, but basically
# everything that changed from v1 to v2 gets a split. The rest is
# attempted to be as DRY as possible
def __init__(self, case):
self.case = case
# Force subclasses to override any methods that we don't explictly
# want to implement in the base class. However fill in a lot ourselves.
def _ni(self):
raise NotImplementedError("That method must be overridden by subclass!")
def get_root_element(self):
self._ni()
def get_create_element(self):
return safe_element("create")
def get_update_element(self):
return safe_element("update")
def get_close_element(self):
return safe_element("close")
def get_index_element(self, index):
elem = safe_element(index.identifier, index.referenced_id)
elem.set("case_type", index.referenced_type)
if getattr(index, 'relationship') and index.relationship == "extension":
elem.attrib.update({"relationship": index.relationship})
return elem
def get_case_type_element(self):
self._ni()
def get_user_id_element(self):
return safe_element("user_id", self.case.user_id)
def get_case_name_element(self):
return safe_element("case_name", self.case.name)
def get_external_id_element(self):
return safe_element("external_id", self.case.external_id)
def add_base_properties(self, element):
element.append(self.get_case_type_element())
element.append(self.get_case_name_element())
def add_custom_properties(self, element):
for k, v, in self.case.dynamic_case_properties().items():
element.append(get_dynamic_element(k, v))
def add_indices(self, element):
self._ni()
class V1CaseXMLGenerator(CaseXMLGeneratorBase):
def get_root_element(self):
root = safe_element("case")
# moved to attrs in v2
root.append(safe_element("case_id", self.case.case_id))
if self.case.modified_on:
root.append(safe_element("date_modified",
datetime_to_xml_string(self.case.modified_on)))
return root
def get_case_type_element(self):
return safe_element("case_type_id", self.case.type)
def add_base_properties(self, element):
element.append(self.get_case_type_element())
# moved in v2
element.append(self.get_user_id_element())
element.append(self.get_case_name_element())
# deprecated in v2
element.append(self.get_external_id_element())
def add_custom_properties(self, element):
if self.case.owner_id:
element.append(safe_element('owner_id', self.case.owner_id))
if self.case.opened_on:
element.append(safe_element('date_opened', date_to_xml_string(self.case.opened_on)))
super(V1CaseXMLGenerator, self).add_custom_properties(element)
def add_indices(self, element):
# intentionally a no-op
if self.case.indices:
logging.info("Tried to add indices to version 1 CaseXML restore. This is not supported. "
"The case id is %s, domain %s." % (self.case.case_id, self.case.domain))
def add_attachments(self, element):
pass
class V2CaseXMLGenerator(CaseXMLGeneratorBase):
def get_root_element(self):
root = safe_element("case")
root.attrib.update({
"xmlns": V2_NAMESPACE,
"case_id": self.case.case_id,
"user_id": self.case.user_id or '',
})
if self.case.modified_on:
root.set("date_modified", datetime_to_xml_string(self.case.modified_on))
return root
def get_case_type_element(self):
# case_type_id --> case_type
return safe_element("case_type", self.case.type)
def add_base_properties(self, element):
super(V2CaseXMLGenerator, self).add_base_properties(element)
from corehq.apps.users.cases import get_owner_id
element.append(safe_element('owner_id', get_owner_id(self.case)))
def add_custom_properties(self, element):
if self.case.external_id:
element.append(safe_element('external_id', self.case.external_id))
if self.case.location_id:
element.append(safe_element('location_id', self.case.location_id))
if self.case.opened_on:
element.append(safe_element("date_opened", date_to_xml_string(self.case.opened_on)))
super(V2CaseXMLGenerator, self).add_custom_properties(element)
def add_indices(self, element):
if self.case.indices:
indices = []
index_elem = safe_element("index")
for i in self.case.indices:
indices.append(self.get_index_element(i))
indices.sort(key=lambda elem: elem.tag)
for index in indices:
index_elem.append(index) # .extend() only works in python 2.7
element.append(index_elem)
def add_attachments(self, element):
if _sync_attachments(self.case.domain):
if self.case.case_attachments:
attachment_elem = safe_element("attachment")
for k, a in self.case.case_attachments.items():
aroot = safe_element(k)
# moved to attrs in v2
aroot.attrib.update({
"src": self.case.get_attachment_server_url(k),
"from": "remote"
})
attachment_elem.append(aroot)
element.append(attachment_elem)
@quickcache(['domain'],
skip_arg=lambda _: settings.UNIT_TESTING,
memoize_timeout=12 * 60 * 60,
timeout=12 * 60 * 60)
def _sync_attachments(domain):
return MM_CASE_PROPERTIES.enabled(domain)
def get_generator(version, case):
check_version(version)
return GENERATOR_MAP[version](case)
GENERATOR_MAP = {
V1: V1CaseXMLGenerator,
V2: V2CaseXMLGenerator,
V3: V2CaseXMLGenerator
}
class CaseDBXMLGenerator(V2CaseXMLGenerator):
def __init__(self, case):
self.case = case
def get_root_element(self):
from corehq.apps.users.cases import get_owner_id
root = safe_element("case")
root.attrib.update({
"case_id": self.case.case_id,
"case_type": self.case.type,
"owner_id": get_owner_id(self.case),
"status": "closed" if self.case.closed else "open",
})
return root
def add_base_properties(self, element):
element.append(self.get_case_name_element())
if self.case.modified_on:
element.append(safe_element("last_modified", datetime_to_xml_string(self.case.modified_on)))
def get_element(self):
element = self.get_root_element()
self.add_base_properties(element)
self.add_custom_properties(element)
self.add_indices(element)
self.add_attachments(element)
return element
| bsd-3-clause | 0d35b33eff8586e8092a699cfdbbb116 | 32.895753 | 104 | 0.626267 | 3.784052 | false | false | false | false |
dimagi/commcare-hq | custom/abt/reports/filters_2020.py | 1 | 3236 | from django.utils.translation import gettext_lazy
from corehq.apps.reports.filters.base import BaseSingleOptionFilter
from custom.abt.reports.fixture_utils import (
get_data_type_ids_by_tag,
get_fixture_dicts,
)
class VectorLinkLocFilter(BaseSingleOptionFilter):
default_text = 'All'
def get_level_2s(self, level_1_ids):
data_types_by_tag = get_data_type_ids_by_tag(self.domain)
return get_fixture_dicts(
self.domain,
data_types_by_tag["level_2_eco"],
filter_in={'level_1_eco': level_1_ids},
filter_out={'other': '1'},
)
def get_level_3s(self, level_2_ids):
data_types_by_tag = get_data_type_ids_by_tag(self.domain)
return get_fixture_dicts(
self.domain,
data_types_by_tag["level_3_eco"],
filter_in={'level_2_eco': level_2_ids},
filter_out={'other': '1'},
)
class LevelOneFilter(VectorLinkLocFilter):
slug = 'level_1'
label = gettext_lazy('Level 1')
@property
def options(self):
data_types_by_tag = get_data_type_ids_by_tag(self.domain)
level_1s = get_fixture_dicts(
self.domain,
data_types_by_tag["level_1_eco"],
filter_out={'other': '1'},
)
return [(loc['id'], loc['name']) for loc in level_1s]
class LevelTwoFilter(VectorLinkLocFilter):
slug = 'level_2'
label = gettext_lazy('Level 2')
@property
def options(self):
level_1 = self.request.GET.get('level_1')
l1_ids = [level_1] if level_1 else None
return [(loc['id'], loc['name']) for loc in self.get_level_2s(l1_ids)]
class LevelThreeFilter(VectorLinkLocFilter):
slug = 'level_3'
label = gettext_lazy('Level 3')
@property
def options(self):
level_1 = self.request.GET.get('level_1')
level_2 = self.request.GET.get('level_2')
if level_2:
l2_ids = [level_2]
elif level_1:
l1_ids = [level_1]
l2_ids = [loc['id'] for loc in self.get_level_2s(l1_ids)]
else:
l2_ids = None
return [(loc['id'], loc['name']) for loc in self.get_level_3s(l2_ids)]
class LevelFourFilter(VectorLinkLocFilter):
slug = 'level_4'
label = gettext_lazy('Level 4')
@property
def options(self):
level_1 = self.request.GET.get('level_1')
level_2 = self.request.GET.get('level_2')
level_3 = self.request.GET.get('level_3')
if level_3:
l3_ids = [level_3]
elif level_2:
l2_ids = [level_2]
l3_ids = [loc['id'] for loc in self.get_level_3s(l2_ids)]
elif level_1:
l1_ids = [level_1]
l2_ids = [loc['id'] for loc in self.get_level_2s(l1_ids)]
l3_ids = [loc['id'] for loc in self.get_level_3s(l2_ids)]
else:
l3_ids = None
data_types_by_tag = get_data_type_ids_by_tag(self.domain)
level_4s = get_fixture_dicts(
self.domain,
data_types_by_tag["level_4_eco"],
filter_in={'level_3_eco': l3_ids},
filter_out={'other': '1'},
)
return [(loc['id'], loc['name']) for loc in level_4s]
| bsd-3-clause | 60f92e41f5820bb46d60a35a8e743d8a | 30.417476 | 78 | 0.560569 | 3.1387 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/auditcare/utils/export.py | 1 | 6550 | import csv
from datetime import datetime, timedelta
from itertools import chain
from django.contrib.auth.models import User
from django.db.models import ForeignKey, Min
import attr
from dimagi.utils.parsing import string_to_datetime
from corehq.apps.users.models import Invitation, WebUser
from corehq.util.models import ForeignValue
from ..models import AccessAudit, NavigationEventAudit
def navigation_events_by_user(user, start_date=None, end_date=None):
where = get_date_range_where(start_date, end_date)
query = NavigationEventAudit.objects.filter(user=user, **where)
return AuditWindowQuery(query)
def write_log_events(writer, user, domain=None, override_user=None, start_date=None, end_date=None):
start_date = string_to_datetime(start_date).replace(tzinfo=None) if start_date else None
end_date = string_to_datetime(end_date).replace(tzinfo=None) if end_date else None
for event in navigation_events_by_user(user, start_date, end_date):
if not domain or domain == event.domain:
write_log_event(writer, event, override_user)
def write_log_event(writer, event, override_user=None):
if override_user:
event.user = override_user
writer.writerow([
event.event_date,
event.user,
event.domain,
event.ip_address,
event.request_method,
event.request_path
])
def get_users_for_domain(domain):
users = {u.username for u in WebUser.by_domain(domain)}
super_users = {u['username'] for u in User.objects.filter(is_superuser=True).values('username')}
users_who_accepted_invitations = set(Invitation.objects.filter(
is_accepted=True,
domain=domain).values_list('email', flat=True)
)
removed_users = users_who_accepted_invitations - users
super_users = super_users - users
return users, removed_users, super_users
def get_all_log_events(start_date=None, end_date=None):
where = get_date_range_where(start_date, end_date)
return chain(
AuditWindowQuery(AccessAudit.objects.filter(**where)),
AuditWindowQuery(NavigationEventAudit.objects.filter(**where)),
)
def get_domain_first_access_times(domains, start_date=None, end_date=None):
"""Query NavigationEventAudit events for _first event matching any of
`domains` within each authenticated session_.
NOTE: This function does _not_ query couch.
NOTE: This function may return multiple "access events" from the same
session (if multiple `domains` were accessed in the same session).
Resulting SQL query:
```sql
SELECT
"user",
domain,
MIN(event_date) AS access_time
FROM auditcare_navigationeventaudit
WHERE (
domain IN ( {domains} )
AND event_date > {start_date}
AND event_date <= {end_date}
AND "user" IS NOT NULL
AND session_key IS NOT NULL
)
GROUP BY ("user", domain, session_key)
ORDER BY access_time ASC;
```
"""
where = get_date_range_where(start_date, end_date)
where["domain__in"] = domains
where["user__isnull"] = False
where["session_key__isnull"] = False
return (NavigationEventAudit.objects
.values("user", "domain", "session_key") # GROUP BY fields
.annotate(access_time=Min("event_date"))
.values("user", "domain", "access_time") # SELECT fields
.filter(**where)
.order_by("access_time")
.iterator())
def write_generic_log_event(writer, event):
action = ''
resource = ''
if event.doc_type == 'NavigationEventAudit':
action = event.request_method
resource = event.request_path
else:
assert event.doc_type == 'AccessAudit'
action = event.access_type
resource = event.path
writer.writerow([
event.event_date,
event.doc_type,
event.user,
event.domain,
event.ip_address,
action,
resource,
event.description,
])
def write_export_from_all_log_events(file_obj, start, end):
writer = csv.writer(file_obj)
writer.writerow(['Date', 'Type', 'User', 'Domain', 'IP Address', 'Action', 'Resource', 'Description'])
for event in get_all_log_events(start, end):
write_generic_log_event(writer, event)
def get_date_range_where(start_date, end_date):
"""Get ORM filter kwargs for inclusive event_date range"""
where = {}
if start_date:
where["event_date__gt"] = start_date.date()
if end_date:
where["event_date__lt"] = end_date.date() + timedelta(days=1)
return where
@attr.s(cmp=False)
class AuditWindowQuery:
query = attr.ib()
window_size = attr.ib(default=10000)
def __iter__(self):
"""Windowed query generator using WHERE/LIMIT
Adapted from https://github.com/sqlalchemy/sqlalchemy/wiki/WindowedRangeQuery
"""
query = self.query
last_date = None
last_ids = set()
while True:
qry = query
if last_date is not None:
qry = query.filter(event_date__gte=last_date).exclude(id__in=last_ids)
rec = None
for rec in qry.order_by("event_date")[:self.window_size]:
yield NoForeignQuery(rec)
if rec.event_date != last_date:
last_date = rec.event_date
last_ids = {rec.id}
else:
last_ids.add(rec.id)
if rec is None:
break
def get_foreign_names(model):
names = {f.name for f in model._meta.fields if isinstance(f, ForeignKey)}
names.update(ForeignValue.get_names(model))
return names
@attr.s
class NoForeignQuery:
"""Raise an error if a foreign key field is accessed
This is a hack to prevent downstream code from accessing related
objects, inadvertently triggering many extra queries.
See also: https://stackoverflow.com/questions/66496443
If a need arises for downstream code to access related fields,
`navigation_events_by_user` should be updated to use
`query.select_related` and/or `query.prefetch_related`, and this
class should be refactored accordingly.
"""
_obj = attr.ib()
def __attrs_post_init__(self):
self._fks = get_foreign_names(type(self._obj))
def __getattr__(self, name):
if name in self._fks:
raise ForeignKeyAccessError(name)
return getattr(self._obj, name)
class ForeignKeyAccessError(AttributeError):
pass
| bsd-3-clause | d92f3303c4defcad394173ad42ec53f8 | 30.642512 | 106 | 0.64229 | 3.751432 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/funding/migrations/0005_auto_20190604_1501.py | 1 | 1369 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-06-04 13:01
from __future__ import unicode_literals
import bluebottle.utils.fields
from decimal import Decimal
from django.db import migrations
import djmoney.models.fields
class Migration(migrations.Migration):
dependencies = [
('funding', '0004_auto_20190604_1501'),
]
operations = [
migrations.AlterField(
model_name='donation',
name='amount',
field=bluebottle.utils.fields.MoneyField(currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=Decimal('0.0'), max_digits=12),
),
migrations.AlterField(
model_name='donation',
name='amount_currency',
field=djmoney.models.fields.CurrencyField(choices=[(b'EUR', 'Euro')], default='EUR', editable=False, max_length=50),
),
migrations.AlterField(
model_name='funding',
name='target',
field=bluebottle.utils.fields.MoneyField(currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=Decimal('0.0'), max_digits=12),
),
migrations.AlterField(
model_name='funding',
name='target_currency',
field=djmoney.models.fields.CurrencyField(choices=[(b'EUR', 'Euro')], default='EUR', editable=False, max_length=50),
),
]
| bsd-3-clause | df3324a0f1244a92a41e8116677ec3c7 | 35.026316 | 149 | 0.611395 | 3.792244 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/bb_projects/migrations/0001_initial.py | 2 | 2322 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-23 13:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ProjectPhase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=200, unique=True)),
('name', models.CharField(max_length=100, unique=True)),
('description', models.CharField(blank=True, max_length=400)),
('sequence', models.IntegerField(help_text='For ordering phases.', unique=True)),
('active', models.BooleanField(default=True, help_text='Whether this phase is in use or has been discarded.')),
('editable', models.BooleanField(default=True, help_text='Whether the project owner can change the details of theproject.')),
('viewable', models.BooleanField(default=True, help_text='Whether this phase, and projects in it show up at the website')),
('owner_editable', models.BooleanField(default=False, help_text='The owner can manually select between these phases')),
],
options={
'ordering': ['sequence'],
},
),
migrations.CreateModel(
name='ProjectTheme',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, unique=True, verbose_name='name')),
('name_nl', models.CharField(max_length=100, unique=True, verbose_name='name NL')),
('slug', models.SlugField(max_length=100, unique=True, verbose_name='slug')),
('description', models.TextField(blank=True, verbose_name='description')),
('disabled', models.BooleanField(default=False, verbose_name='disabled')),
],
options={
'ordering': ['name'],
'verbose_name': 'project theme',
'verbose_name_plural': 'project themes',
},
),
]
| bsd-3-clause | 617ce548f3e7483ca869d3df1be7a45e | 46.387755 | 141 | 0.580534 | 4.51751 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/collect/migrations/0002_auto_20210920_0917.py | 1 | 1432 | # Generated by Django 2.2.24 on 2021-09-20 07:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('collect', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='CollectActivity',
options={'permissions': (
('api_read_collectactivity', 'Can view collect activity through the API'),
('api_add_collectactivity', 'Can add collect activity through the API'),
('api_change_collectactivity', 'Can change collect activity through the API'),
('api_delete_collectactivity', 'Can delete collect activity through the API'),
('api_read_own_collectactivity', 'Can view own collect activity through the API'),
('api_add_own_collectactivity', 'Can add own collect activity through the API'),
('api_change_own_collectactivity', 'Can change own collect activity through the API'),
('api_delete_own_collectactivity', 'Can delete own collect activity through the API')
),
'verbose_name': 'Collect Activity', 'verbose_name_plural': 'Collect Activities'},
),
migrations.AddField(
model_name='collectcontributor',
name='value',
field=models.DecimalField(blank=True, decimal_places=5, max_digits=12, null=True),
),
]
| bsd-3-clause | 8102dcefbd3de0967594a08e328ef0c1 | 43.75 | 102 | 0.61243 | 4.649351 | false | false | false | false |
dimagi/commcare-hq | corehq/motech/fhir/admin.py | 1 | 3276 | import json
from django.contrib import admin
from corehq.motech.fhir.models import (
FHIRImportConfig,
FHIRImportResourceProperty,
FHIRImportResourceType,
FHIRResourceProperty,
FHIRResourceType,
ResourceTypeRelationship,
)
class FHIRResourcePropertyInline(admin.TabularInline):
model = FHIRResourceProperty
verbose_name_plural = 'FHIR resource properties'
fields = ('calculated_value_source', 'value_source_config',)
readonly_fields = ('calculated_value_source',)
def calculated_value_source(self, obj):
if not (obj.case_property and obj.jsonpath):
return ''
value_source_config = {
'case_property': obj.case_property.name,
'jsonpath': obj.jsonpath,
}
if obj.value_map:
value_source_config['value_map'] = obj.value_map
return json.dumps(value_source_config, indent=2)
class FHIRResourceTypeAdmin(admin.ModelAdmin):
model = FHIRResourceType
list_display = (
'domain',
'name',
'case_type',
)
list_display_links = (
'domain',
'name',
'case_type',
)
list_filter = ('domain',)
# Allows for creating resource properties without having to deal
# with domains.
inlines = [FHIRResourcePropertyInline]
def has_add_permission(self, request):
# Domains are difficult to manage with this interface. Create
# using the Data Dictionary, and edit in Admin.
return False
class FHIRImportConfigAdmin(admin.ModelAdmin):
list_display = (
'domain',
'connection_settings',
'frequency',
)
list_display_links = (
'domain',
'connection_settings',
'frequency',
)
list_filter = ('domain',)
list_select_related = ('connection_settings',)
class FHIRImportResourcePropertyInline(admin.TabularInline):
model = FHIRImportResourceProperty
verbose_name_plural = 'FHIR Importer resource properties'
fields = ('value_source_config',)
class FHIRImportResourceTypeAdmin(admin.ModelAdmin):
model = FHIRImportResourceType
list_display = (
'domain',
'name',
'case_type',
)
list_display_links = (
'domain',
'name',
'case_type',
)
list_filter = ('import_config__domain',)
list_select_related = ('import_config',)
inlines = [FHIRImportResourcePropertyInline]
def domain(self, obj):
return obj.import_config.domain
class ResourceTypeRelationshipAdmin(admin.ModelAdmin):
model = ResourceTypeRelationship
list_display = (
'domain',
'resource_type',
'related_resource_type',
)
list_display_links = (
'domain',
'resource_type',
'related_resource_type',
)
list_filter = ('resource_type__import_config__domain',)
list_select_related = ('resource_type__import_config',)
def domain(self, obj):
return obj.resource_type.domain
admin.site.register(FHIRResourceType, FHIRResourceTypeAdmin)
admin.site.register(FHIRImportConfig, FHIRImportConfigAdmin)
admin.site.register(FHIRImportResourceType, FHIRImportResourceTypeAdmin)
admin.site.register(ResourceTypeRelationship, ResourceTypeRelationshipAdmin)
| bsd-3-clause | f2eb5808507439f37113fe236cdcae6a | 25.852459 | 76 | 0.653846 | 3.932773 | false | true | false | false |
onepercentclub/bluebottle | bluebottle/segments/tests/test_admin.py | 1 | 6150 | from django.contrib.admin.sites import AdminSite
from django.urls import reverse
from bluebottle.segments.admin import SegmentAdmin
from bluebottle.segments.models import Segment
from bluebottle.segments.tests.factories import SegmentTypeFactory, SegmentFactory
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.utils import BluebottleAdminTestCase
from bluebottle.time_based.admin import DateActivityAdmin
from bluebottle.time_based.models import DateActivity
from bluebottle.time_based.tests.factories import DateActivityFactory
class TestSegmentAdmin(BluebottleAdminTestCase):
extra_environ = {}
csrf_checks = False
setup_auth = True
def setUp(self):
super(TestSegmentAdmin, self).setUp()
self.app.set_user(self.superuser)
self.client.force_login(self.superuser)
self.site = AdminSite()
self.segment_admin = SegmentAdmin(Segment, self.site)
self.event_admin = DateActivityAdmin(DateActivity, self.site)
def test_activity_segment_admin(self):
activity = DateActivityFactory.create()
activity_url = reverse('admin:time_based_dateactivity_change', args=(activity.id,))
response = self.client.get(activity_url)
self.assertNotContains(response, 'Segments')
self.assertNotContains(response, 'Department:')
segment_type = SegmentTypeFactory.create(name="Department")
SegmentFactory.create_batch(5, segment_type=segment_type)
response = self.client.get(activity_url)
self.assertContains(response, 'Segments')
self.assertContains(response, 'Department:')
def test_segment_admin(self):
segment_type = SegmentTypeFactory.create(name='Job title')
SegmentFactory.create_batch(5, segment_type=segment_type)
segment_url = reverse('admin:segments_segmenttype_change', args=(segment_type.id,))
response = self.client.get(segment_url)
self.assertContains(response, 'Segment')
list_url = reverse('admin:segments_segmenttype_changelist')
response = self.client.get(list_url)
self.assertContains(response, 'Number of segments')
self.assertContains(response, 'Job title')
def test_segment_email_domain(self):
segment_type = SegmentTypeFactory.create()
segment = SegmentFactory.create(segment_type=segment_type)
segment_url = reverse('admin:segments_segment_change', args=(segment.id, ))
page = self.app.get(segment_url)
form = page.forms['segment_form']
form['email_domains'] = 'test.com'
page = form.submit()
segment.refresh_from_db()
self.assertEqual(segment.email_domains, ['test.com'])
class TestSegmentTypeAdmin(BluebottleAdminTestCase):
extra_environ = {}
csrf_checks = False
setup_auth = True
def setUp(self):
super(TestSegmentTypeAdmin, self).setUp()
self.app.set_user(self.superuser)
self.client.force_login(self.superuser)
self.site = AdminSite()
def test_required_segment_types_no_segments(self):
member_settings_url = reverse('admin:members_memberplatformsettings_change')
page = self.app.get(member_settings_url)
self.assertFalse('Mark segment types as required' in page.text)
SegmentTypeFactory.create(name='Department')
SegmentTypeFactory.create(name='Hobbies')
page = self.app.get(member_settings_url)
self.assertTrue('Required fields' in page.text)
self.assertTrue('no segment types are marked as required' in page.text)
page = page.click('segment type overview')
page = page.click('Department')
form = page.forms[0]
form.fields['required'][0].checked = True
page = form.submit().follow()
page = page.click('Hobbies')
form = page.forms[0]
form.fields['required'][0].checked = True
page = form.submit().follow()
self.assertTrue(page.forms[0]['form-0-required'].checked)
self.assertTrue(page.forms[0]['form-1-required'].checked)
page = self.app.get(member_settings_url)
self.assertFalse('no segment types are marked as required' in page.text)
self.assertTrue('<b>Department</b>' in page.text)
self.assertTrue('<b>Hobbies</b>' in page.text)
class TestMemberSegmentAdmin(BluebottleAdminTestCase):
extra_environ = {}
csrf_checks = False
setup_auth = True
def setUp(self):
super(TestMemberSegmentAdmin, self).setUp()
self.app.set_user(self.superuser)
self.site = AdminSite()
department = SegmentTypeFactory.create(name='department')
self.engineering = SegmentFactory.create(name='engineering', segment_type=department)
SegmentFactory.create(name='product', segment_type=department)
title = SegmentTypeFactory.create(name='title')
SegmentFactory.create_batch(3, segment_type=title)
self.member = BlueBottleUserFactory.create()
def test_member_segments_admin(self):
activity = DateActivityFactory.create(owner=self.member)
member_url = reverse('admin:members_member_change', args=(self.member.id,))
page = self.app.get(member_url)
form = page.forms['member_form']
form['segment__department'] = self.engineering.id
form.submit()
self.member.refresh_from_db()
self.assertEqual(self.member.segments.first(), self.engineering)
activity.refresh_from_db()
self.assertEqual(activity.segments.first(), self.engineering)
def test_segment_email_domain(self):
segment_type = SegmentTypeFactory.create()
segment = SegmentFactory.create(segment_type=segment_type)
segment_url = reverse('admin:segments_segment_change', args=(segment.id, ))
page = self.app.get(segment_url)
form = page.forms['segment_form']
form['email_domains'] = ['test.com', 'test2.com']
form.submit()
segment.refresh_from_db()
self.assertEqual(segment.email_domains[0], 'test.com')
self.assertEqual(segment.email_domains[1], 'test2.com')
| bsd-3-clause | 6d00e1b923b3e6e5dd89e73609a42ca4 | 40.275168 | 93 | 0.684065 | 3.972868 | false | true | false | false |
dimagi/commcare-hq | corehq/sql_db/jsonops.py | 1 | 3554 | """
JSON and JSONB functions and operators for PostgreSQL
Reference: https://www.postgresql.org/docs/10/functions-json.html
In particular, `JsonGet`, `JsonSet`, and `JsonDelete` can be nested to perform
multiple transformation operations on a JSONB value.
For example, the following expression renames the top-level "values" key to
"items" and removes the "score" key/value pair from a table column named
"json_data":
```py
transformed_data = JsonDelete(
JsonSet(
JsonDelete("json_data", "values"),
["items"],
JsonGet("json_data", "values"),
),
"score",
)
SomeModel.objects.filter(...).update(json_data=transformed_data)
# Example "json_data" before/after above transformation
# Before: {"values": [1, 2, 3], "other": "something", "score": 3}
# After: {"items": [1, 2, 3], "other": "something"}
```
"""
import json
from django.db.models.expressions import Expression, Func, Value
from django.db.models import JSONField
class JsonDelete(Func):
"""Delete item(s) from JSONB value
A Django expression for the `-` operator as it pertains to JSONB values.
Example: '{"a": "b", "c": "d"}'::jsonb - '{a,c}'::text[]
:param expression: JSONB field name or value expression.
:param *keys: Names of items to delete.
"""
arg_joiner = " - "
template = "%(expressions)s"
arity = 2
def __init__(self, expression, *keys):
_validate(keys, [str])
keys = list(keys)
if isinstance(expression, JsonDelete):
expression, keys_value = expression.source_expressions
keys = keys_value.value + keys
super().__init__(expression, Value(keys), output_field=JSONField())
class JsonGet(Func):
"""Get item from JSON or JSONB value
A Django expression for the `->` operator as it pertains to JSON and
JSONB values.
:param expression: JSON/JSONB field name or value expression.
:param field: Name or index of item to get.
"""
arg_joiner = "->"
template = "%(expressions)s"
arity = 2
def __init__(self, expression, field):
super().__init__(expression, Value(field), output_field=JSONField())
class JsonSet(Func):
"""Replace value at path in JSONB value
A Django expression for the `jsonb_set()` function.
:param expression: JSONB field name or value expression.
:param path: Sequence of keys or indexes representing the path of
the value in `expression` to be replaced.
:param new_value: JSON-serializable value or expression.
:param create_missing: Add value if it is missing (default: True).
"""
function = "jsonb_set"
template = "%(function)s(%(expressions)s, %(create_missing)s)"
arity = 3
def __init__(self, expression, path, new_value, create_missing=True):
_validate(path, (int, str))
path_items = [_int_str(p) for p in path]
if not isinstance(new_value, Expression):
new_value = Value(json.dumps(new_value))
super().__init__(
expression,
Value(path_items),
new_value,
output_field=JSONField(),
create_missing="true" if create_missing else "false",
)
def _validate(values, types):
types = tuple(types)
if not all(isinstance(i, types) for i in values):
expect = "|".join(t.__name__ for t in types)
got = ", ".join(type(v).__name__ for v in values)
raise ValueError(f"expected {expect} value(s), got ({got})")
def _int_str(arg):
return str(arg) if isinstance(arg, int) else arg
| bsd-3-clause | f19b45db08778c436a6b9f695dfcf9c0 | 30.451327 | 78 | 0.634778 | 3.741053 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/userreports/reports/builder/__init__.py | 1 | 2248 |
DEFAULT_CASE_PROPERTY_DATATYPES = {
"name": "string",
"modified_on": "datetime",
"opened_on": "datetime",
"owner_id": "string",
"user_id": "string",
}
FORM_QUESTION_DATATYPE_MAP = {
"Select": "single",
"MSelect": "multiple"
}
FORM_METADATA_PROPERTIES = [
('username', 'Text'),
('userID', 'Text'),
('timeStart', 'DateTime'),
('timeEnd', 'DateTime'),
('deviceID', 'Text'),
]
def make_case_property_indicator(property_name, column_id=None, datatype=None):
"""
Return a data source indicator configuration (a dict) for the given case
property. This will expand index case references if provided in the format
parent/host/foo
"""
datatype = datatype or DEFAULT_CASE_PROPERTY_DATATYPES.get(property_name, "string")
parts = property_name.split('/')
root_field = parts.pop()
expression = {
'type': 'property_name',
'property_name': root_field,
}
if parts:
case_expression = {
'type': 'identity',
}
for index in parts:
case_expression = {
'type': 'indexed_case',
'case_expression': case_expression,
'index': index
}
expression = {
'type': 'nested',
'argument_expression': case_expression,
'value_expression': expression
}
return {
"type": "expression",
"column_id": column_id or property_name,
"datatype": datatype,
"display_name": property_name,
"expression": expression,
}
def make_multiselect_question_indicator(question, column_id=None):
path = question['value'].split('/')
return {
"type": "choice_list",
"column_id": column_id or question['value'],
"display_name": path[-1],
"property_path": ['form'] + path[2:],
"select_style": "multiple",
"choices": [o['value'] for o in question['options']],
}
def get_filter_format_from_question_type(question_type):
return {
"Date": 'date',
"DateTime": "date",
"Text": "dynamic_choice_list",
"Int": "numeric",
"Double": "numeric",
}.get(question_type, "dynamic_choice_list")
| bsd-3-clause | ea62bcfd44fd6dc39c1d874ff8daafd1 | 25.761905 | 87 | 0.557384 | 3.810169 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/funding/migrations/0042_auto_20191104_1154.py | 1 | 1231 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-04 10:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('funding', '0041_payout_currency'),
]
operations = [
migrations.AlterField(
model_name='budgetline',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='fundraiser',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='payment',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='payoutaccount',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='reward',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| bsd-3-clause | 26d2cfba8c75e584c855e0a1ce61400d | 29.02439 | 74 | 0.590577 | 4.509158 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/projects/migrations/0023_auto_20170323_1227.py | 1 | 1431 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-03-23 11:27
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0022_project_reviewer'),
]
operations = [
migrations.AlterField(
model_name='project',
name='created',
field=models.DateTimeField(auto_now_add=True, help_text='When this project was created.', verbose_name='created'),
),
migrations.AlterField(
model_name='project',
name='organization',
field=models.ForeignKey(blank=True, help_text='Project organization', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='projects', to='organizations.Organization', verbose_name='organization'),
),
migrations.AlterField(
model_name='project',
name='reviewer',
field=models.ForeignKey(blank=True, help_text='Project Reviewer', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='reviewer', to=settings.AUTH_USER_MODEL, verbose_name='reviewer'),
),
migrations.AlterField(
model_name='project',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='updated'),
),
]
| bsd-3-clause | 1cede2dc112ecd4a71f17f8082a6c3f5 | 37.675676 | 225 | 0.638714 | 4.271642 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/hqadmin/views/data.py | 1 | 3591 | import json
from django.http import HttpResponse
from django.shortcuts import render
from django.utils.translation import gettext as _
from corehq.apps.domain.decorators import require_superuser
from corehq.apps.es.es_query import ESQuery
from corehq.apps.es.registry import get_registry
from corehq.apps.hqwebapp.doc_lookup import lookup_id_in_databases, get_databases, get_db_from_db_name
from corehq.form_processor.models import XFormInstance
from corehq.util.json import CommCareJSONEncoder
@require_superuser
def doc_in_es(request):
doc_id = request.GET.get("id")
if not doc_id:
return render(request, "hqadmin/doc_in_es.html", {})
def to_json(doc):
return json.dumps(doc, indent=4, sort_keys=True) if doc else "NOT FOUND!"
found_indices = {}
es_doc_type = None
for index in get_registry():
es_doc = lookup_doc_in_es(doc_id, index)
if es_doc:
found_indices[index] = to_json(es_doc)
es_doc_type = es_doc_type or es_doc.get('doc_type')
context = {
"doc_id": doc_id,
"es_info": {
"status": "found" if found_indices else "NOT FOUND IN ELASTICSEARCH!",
"doc_type": es_doc_type,
"found_indices": found_indices,
},
"couch_info": raw_doc_lookup(doc_id),
}
return render(request, "hqadmin/doc_in_es.html", context)
def lookup_doc_in_es(doc_id, index):
res = ESQuery(index).doc_id([doc_id]).run()
if res.total == 1:
return res.hits[0]
@require_superuser
def raw_doc(request):
doc_id = request.GET.get("id")
db_name = request.GET.get("db_name", None)
if db_name and "__" in db_name:
db_name = db_name.split("__")[-1]
context = raw_doc_lookup(doc_id, db_name) if doc_id else {}
if request.GET.get("raw", False):
if 'doc' in context:
return HttpResponse(context['doc'], content_type="application/json")
else:
return HttpResponse(json.dumps({"status": "missing"}),
content_type="application/json", status=404)
context['all_databases'] = [db for db in get_databases()]
return render(request, "hqadmin/raw_doc.html", context)
def raw_doc_lookup(doc_id, db_name=None):
if db_name:
dbs = [get_db_from_db_name(db_name)]
else:
dbs = list(get_databases().values())
result, db_results = lookup_id_in_databases(doc_id, dbs, find_first=False)
response = {"db_results": db_results}
if result:
serialized_doc = result.get_serialized_doc()
if isinstance(result.doc, XFormInstance):
errors, raw_data = check_form_for_errors(result.doc, serialized_doc)
response["errors"] = errors
response["raw_data"] = raw_data
result_dict = result.asdict()
result_dict["doc"] = json.dumps(
serialized_doc, indent=4, sort_keys=True, cls=CommCareJSONEncoder
)
response.update(result_dict)
if db_name:
response['selected_db'] = db_name
return response
def check_form_for_errors(form, form_doc):
errors = []
raw_data = None
if 'form' not in form_doc:
errors.append(_('Missing Form XML'))
elif not form_doc['form']:
errors.append(_('Form XML not valid. See "Raw Data" section below.'))
raw_data = form.get_xml()
if not isinstance(raw_data, str):
try:
raw_data = raw_data.decode()
except (UnicodeDecodeError, AttributeError):
raw_data = repr(raw_data)
return errors, raw_data
| bsd-3-clause | 12c1bd36a367be55a6f49945bedd8990 | 32.560748 | 102 | 0.617098 | 3.442953 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/payments_voucher/migrations/0002_auto_20210302_1417.py | 1 | 1084 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2021-03-02 13:17
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('payments_logger', '0002_auto_20210302_1417'),
('payments', '0007_auto_20210302_1417'),
('payments_voucher', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='voucher',
name='order',
),
migrations.RemoveField(
model_name='voucher',
name='receiver',
),
migrations.RemoveField(
model_name='voucher',
name='sender',
),
migrations.RemoveField(
model_name='voucherpayment',
name='payment_ptr',
),
migrations.RemoveField(
model_name='voucherpayment',
name='voucher',
),
migrations.DeleteModel(
name='Voucher',
),
migrations.DeleteModel(
name='VoucherPayment',
),
]
| bsd-3-clause | 449f27b968b603bdd616a1148bf2c3a6 | 24.209302 | 55 | 0.52952 | 4.42449 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/hqadmin/management/commands/cchq_prbac_revoke_privs.py | 1 | 5173 | import logging
from django.core.management.base import BaseCommand
from django_prbac.models import Role
from corehq.apps.accounting.models import SoftwarePlanVersion
from corehq.apps.accounting.utils import revoke_privs_for_grantees
from corehq.apps.hqadmin.management.commands.cchq_prbac_grandfather_privs import _confirm, _get_role_edition
from corehq.privileges import MAX_PRIVILEGES
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Revoke privileges'
def handle(self, privs, **kwargs):
dry_run = kwargs.get('dry_run')
verbose = kwargs.get('verbose')
noinput = kwargs.get('noinput')
skip_edition = kwargs.get('skip_edition')
delete_revoked_privs = kwargs.get('delete_privs')
check_privs_exist = kwargs.get('check_privs_exist')
logger.setLevel(logging.INFO if verbose else logging.WARNING)
dry_run_tag = "[DRY RUN] " if dry_run else ""
query = SoftwarePlanVersion.objects
skipped_editions = []
if skip_edition:
skipped_editions = skip_edition.split(',')
query = query.exclude(plan__edition__in=skipped_editions)
all_role_slugs = set(
query.distinct('role__slug').values_list('role__slug', flat=True)
)
all_plan_slugs = (
all_role_slugs -
set(MAX_PRIVILEGES) - # no privileges should be in software plan roles, this is just a safeguard
set(plan_slug.strip() for plan_slug in kwargs.get('skip', '').split(','))
)
# make sure that these roles are not attached to SoftwarePlanEditions
# that they aren't meant to be attached to. e.g. thw pro_plan_v1 role
# attached to a SoftwarePlanVersion under the Advanced edition.
# see https://dimagi-dev.atlassian.net/browse/SAASP-10124
all_plan_slugs = [
plan_slug for plan_slug in all_plan_slugs if _get_role_edition(plan_slug) not in skipped_editions
]
if not dry_run and not noinput and not _confirm('Are you sure you want to revoke {} for {}?'.format(
', '.join(privs),
', '.join(all_plan_slugs),
)):
logger.error('Aborting')
return
if check_privs_exist and not all(priv in MAX_PRIVILEGES for priv in privs):
logger.error('Not all specified privileges are valid: {}'.format(', '.join(privs)))
return
privs_to_revoke = ((role_slug, privs) for role_slug in all_plan_slugs)
revoke_privs_for_grantees(privs_to_revoke, dry_run=dry_run, verbose=verbose)
if delete_revoked_privs:
if skipped_editions:
logger.error(
"Cannot safely delete revoked privileges until ensuring they have been revoked for all "
"editions. If you are sure you want to delete the privilege, run again without specifying any "
"skipped editions."
)
return
for priv in privs:
try:
role_to_delete = Role.objects.get(slug=priv)
if not dry_run:
role_to_delete.delete()
except Role.DoesNotExist:
logger.warning(f"{dry_run_tag}Role for privilege {priv} does not exist. Nothing to delete.")
else:
logger.info(
f"{dry_run_tag}Deleted role for privilege {priv} from database. To ensure the role is not "
f"recreated, remove remaining references in the codebase."
)
def add_arguments(self, parser):
parser.add_argument(
'privs',
nargs='+',
)
parser.add_argument(
'--dry-run',
action='store_true',
default=False,
help='Do not actually modify the database, just verbosely log what would happen'
),
parser.add_argument(
'--noinput',
action='store_true',
default=False,
help='Whether to skip confirmation dialogs'
),
parser.add_argument(
"-s",
"--skip",
dest="skip",
default="",
help="A comma separated list of plan roles to skip if any",
),
parser.add_argument(
"--skip-edition",
dest="skip_edition",
help="A comma separated list of plan editions to skip if any",
),
parser.add_argument(
"--verbose",
action='store_false',
dest="verbose",
help="Verbose logging",
default=True,
),
parser.add_argument(
'--delete-privs',
action='store_true',
default=False,
help='If privilege has been revoked for all plans, delete the Role object associated with it'
),
parser.add_argument(
'--check-privs-exist',
action='store_true',
default=True,
help='Ensure all privileges are valid before attempting to revoke.'
)
| bsd-3-clause | ba9a91dcf261ebbce2a4e74e50d756d7 | 37.894737 | 115 | 0.568916 | 4.141713 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/organizations/serializers.py | 1 | 2351 | from builtins import object
from rest_framework import serializers
from bluebottle.organizations.models import Organization, OrganizationContact
from bluebottle.bluebottle_drf2.serializers import (
ImageSerializer
)
from rest_framework_json_api.serializers import ModelSerializer
from bluebottle.utils.fields import ValidationErrorsField, RequiredErrorsField
from bluebottle.utils.serializers import NoCommitMixin, ResourcePermissionField
class OrganizationSerializer(NoCommitMixin, ModelSerializer):
description = serializers.CharField(required=False, allow_blank=True)
slug = serializers.SlugField(allow_null=True, required=False)
name = serializers.CharField(required=True)
website = serializers.CharField(allow_blank=True, required=False)
logo = ImageSerializer(required=False, allow_null=True)
permissions = ResourcePermissionField('organization_detail', view_args=('pk',))
errors = ValidationErrorsField()
required = RequiredErrorsField()
included_serializers = {
'owner': 'bluebottle.initiatives.serializers.MemberSerializer',
}
class Meta(object):
model = Organization
fields = (
'id', 'name', 'slug', 'description', 'website', 'owner', 'logo',
'required', 'errors',
)
meta_fields = ['created', 'updated', 'errors', 'required', 'permissions']
class JSONAPIMeta(object):
resource_name = 'organizations'
included_resources = ['owner', ]
class OrganizationContactSerializer(NoCommitMixin, ModelSerializer):
name = serializers.CharField(required=False, allow_blank=True, allow_null=True)
email = serializers.CharField(required=False, allow_blank=True, allow_null=True)
phone = serializers.CharField(required=False, allow_blank=True, allow_null=True)
errors = ValidationErrorsField()
required = RequiredErrorsField()
included_serializers = {
'owner': 'bluebottle.initiatives.serializers.MemberSerializer',
}
class Meta(object):
model = OrganizationContact
fields = (
'id', 'name', 'email', 'phone',
'required', 'errors',
)
meta_fields = ['created', 'updated', 'errors', 'required']
class JSONAPIMeta(object):
resource_name = 'organization-contacts'
included_resources = ['owner', ]
| bsd-3-clause | 56e04abd647eccbda8266b19317a6c32 | 33.573529 | 84 | 0.702254 | 4.386194 | false | false | false | false |
dimagi/commcare-hq | corehq/util/workbook_json/excel_importer.py | 1 | 2744 | import logging
from datetime import datetime, timedelta
from django.conf import settings
from soil import DownloadBase
from corehq.util.workbook_json.excel import WorkbookJSONReader
class UnknownFileRefException(Exception):
pass
class ExcelImporter(object):
"""
Base class for `SingleExcelImporter` and `MultiExcelImporter`.
This is not meant to be used directly.
"""
def __init__(self, task, file_ref_id):
self.start = self.last_update = datetime.now()
self.task = task
self.progress = 0
self.total_rows = 100
if getattr(settings, 'CELERY_TASK_ALWAYS_EAGER', False):
# Log progress since tasks are executed synchronously when
# CELERY_TASK_ALWAYS_EAGER is true
self.log = logging.getLogger(__name__).info
else:
self.log = lambda *a, **k: None
if self.task:
DownloadBase.set_progress(self.task, 0, 100)
download_ref = DownloadBase.get(file_ref_id)
if download_ref is None:
raise UnknownFileRefException("Could not find file wih ref %s. It may have expired" % file_ref_id)
self.workbook = WorkbookJSONReader(download_ref.get_filename())
def mark_complete(self):
if self.task:
DownloadBase.set_progress(self.task, 100, 100)
self.log("processed %s / %s in %s",
self.progress, self.total_rows, datetime.now() - self.start)
def add_progress(self, count=1):
self.progress += count
if self.task:
DownloadBase.set_progress(self.task, self.progress, self.total_rows)
if datetime.now() > self.last_update + timedelta(seconds=5):
self.log("processed %s / %s", self.progress, self.total_rows)
self.last_update = datetime.now()
class SingleExcelImporter(ExcelImporter):
"""
Manage importing from an excel file with only one
worksheet.
"""
def __init__(self, task, file_ref_id):
super(SingleExcelImporter, self).__init__(task, file_ref_id)
self.worksheet = self.workbook.worksheets[0]
self.total_rows = self.worksheet.worksheet.max_row
class MultiExcelImporter(ExcelImporter):
"""
Manage importing from an excel file with multiple
relevant worksheets.
"""
def __init__(self, task, file_ref_id):
super(MultiExcelImporter, self).__init__(task, file_ref_id)
self.worksheets = self.workbook.worksheets
self.add_progress(2) # Show the user we're on it
total_rows = sum(ws.worksheet.max_row for ws in self.worksheets)
# That took a non-negligible amount of time. Give the user some feedback.
self.add_progress(3)
self.total_rows = total_rows
| bsd-3-clause | 40fd0362c964e1fe02535555b5a82edd | 33.3 | 110 | 0.64395 | 3.784828 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/looker/migrations/0003_init_looker_embeds.py | 1 | 1212 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-06-06 10:25
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def generate_looker_embeds(apps, schema_editor):
ProjectPlatformSettings = apps.get_model('projects', 'ProjectPlatformSettings')
project_settings, created = ProjectPlatformSettings.objects.get_or_create()
LookerEmbed = apps.get_model('looker', 'LookerEmbed')
LookerEmbed.objects.all().delete()
call_command('loaddata', 'looker_projects')
if 'sourcing' in project_settings.create_types:
call_command('loaddata', 'looker_activities')
if 'funding' in project_settings.create_types:
call_command('loaddata', 'looker_giving')
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('looker', '0002_auto_20180328_1054'),
('projects', '0054_auto_20171122_1415'),
('social_django', '0002_add_related_name'), # No idea why, but this fixes a nasty error about pending trigger events during the migrations
]
operations = [
# migrations.RunPython(generate_looker_embeds, dummy)
]
| bsd-3-clause | ce451cd38dac0da1069ebbbbb9379485 | 28.560976 | 147 | 0.693894 | 3.61791 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/payouts/migrations/0005_auto_20160721_1114.py | 1 | 1163 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-21 09:14
from __future__ import unicode_literals
from django.db import migrations
def add_financial_group(apps, schema_editor):
ProjectPayout = apps.get_model('payouts', 'ProjectPayout')
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
new_group, created = Group.objects.get_or_create(name='Financial')
ct = ContentType.objects.get_for_model(ProjectPayout)
# Now what - Say I want to add 'Can add project' permission to new_group?
permission, created = Permission.objects.get_or_create(
codename='change_projectpayout',
name='Can change project payout',
content_type=ct
)
new_group.permissions.add(permission)
def remove_financial_group(a, b):
from django.contrib.auth.models import Group
Group.objects.get(name='Financial').delete()
class Migration(migrations.Migration):
dependencies = [
('payouts', '0004_projectpayout_currency'),
]
operations = [
migrations.RunPython(add_financial_group, remove_financial_group),
]
| bsd-3-clause | dcfa1c1d79bc98abec742fbe132c78b0 | 28.075 | 77 | 0.700774 | 3.73955 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/app_manager/tests/test_bulk_ui_translation.py | 1 | 4676 | from distutils.version import StrictVersion
from io import BytesIO
from django.test import SimpleTestCase
from couchexport.export import export_raw
from corehq.apps.app_manager.models import Application
from corehq.apps.app_manager.ui_translations import (
get_default_translations_for_download,
process_ui_translation_upload,
)
class BulkUiTranslation(SimpleTestCase):
def setUp(self):
super(BulkUiTranslation, self).setUp()
self.app = Application.new_app("test-domain", "Test App")
self.app.langs = ["en", "fra"]
def _build_translation_download_file(self, headers, data=None):
if data is None:
data = []
translations = get_default_translations_for_download(self.app, 'latest')
for translation_key, translation_value in translations.items():
data.append((translation_key, translation_value))
data = (('translations', tuple(data)),)
temp = BytesIO()
export_raw(headers, data, temp)
temp.seek(0) # .read() is used somewhere so this needs to be at the begininng
return temp
def test_no_change(self):
headers = (('translations', ('property', 'en')),)
f = self._build_translation_download_file(headers)
translations, error_properties, warnings = process_ui_translation_upload(self.app, f)
self.assertEqual(dict(translations), {})
self.assertEqual(len(error_properties), 0)
def test_translation(self):
headers = (('translations', ('property', 'en', 'fra')),)
# on an update to 2.31.0, the keys date.tomorrow, entity.sort.title,
# activity.locationcapture.Longitude were no longer present in messages_en-2.txt
# They were replaced by other randomly selected strings in that file.
# Leaving this note here in case this issue comes up again. --B
data = (('key.manage.title', 'wobble', ''),
('bulk.send.dialog.progress', 'wabble ${0}', ''),
('connection.test.access.settings', '', 'wibble'),
('bulk.send.dialog.progress', '', 'wubble ${0}'),
('home.start.demo', 'Ding', 'Dong'),
('unknown_string', 'Ding', 'Dong'),
('updates.found', 'I am missing a parameter', 'I have ${0} an ${1} extra ${2} parameter'),
('sync.progress', 'It is fine to ${1} reorder ${0} params', 'But use ${x0} correct syntax $ {1}'))
f = self._build_translation_download_file(headers, data)
translations, error_properties, warnings = process_ui_translation_upload(self.app, f)
self.assertEqual(
dict(translations),
{
'en': {
'key.manage.title': 'wobble',
'bulk.send.dialog.progress': 'wabble ${0}',
'home.start.demo': 'Ding',
'unknown_string': 'Ding',
'updates.found': 'I am missing a parameter',
'sync.progress': 'It is fine to ${1} reorder ${0} params',
},
'fra': {
'connection.test.access.settings': 'wibble',
'bulk.send.dialog.progress': 'wubble ${0}',
'home.start.demo': 'Dong',
'unknown_string': 'Dong',
'updates.found': 'I have ${0} an ${1} extra ${2} parameter',
'sync.progress': 'But use ${x0} correct syntax $ {1}',
}
}
)
self.assertEqual(len(error_properties), 2)
self.assertEqual([e.strip() for e in error_properties], [
"Could not understand '${x0}' in fra value of 'sync.progress'.",
"Could not understand '$ {1}' in fra value of 'sync.progress'.",
])
self.assertEqual(len(warnings), 3)
self.assertEqual([e.strip() for e in warnings], [
"Property 'unknown_string' is not a known CommCare UI string, but we added it anyway.",
"Property 'updates.found' should contain ${0}, ${1} but en value contains no parameters.",
"Property 'updates.found' should contain ${0}, ${1} but fra value contains ${0}, ${1}, ${2}.",
])
# test existing translations get updated correctly
data = (('home.start.demo', 'change_1', 'change_2'))
f = self._build_translation_download_file(headers, data)
translations, error_properties, warnings = process_ui_translation_upload(self.app, f)
self.assertEqual(translations["fra"]["home.start.demo"], "change_2")
self.assertEqual(translations["en"]["home.start.demo"], "change_1")
| bsd-3-clause | eb6cb6efd404ff888f9f23ea61601653 | 45.76 | 114 | 0.576133 | 4.076722 | false | true | false | false |
dimagi/commcare-hq | corehq/apps/userreports/migrations/0009_rename_ucr_tables.py | 1 | 2693 | import sys
from collections import defaultdict
from django.db import migrations
from corehq.apps.userreports.models import (
DataSourceConfiguration,
StaticDataSourceConfiguration,
)
from corehq.apps.userreports.util import (
LEGACY_UCR_TABLE_PREFIX,
get_table_name,
)
from corehq.sql_db.connections import connection_manager
from corehq.util.django_migrations import skip_on_fresh_install
GIT_COMMIT_WITH_MANAGEMENT_COMMAND = "8ec458ce9dd6a690c0b48bba07ffee2455f267d2"
AUTO_MIGRATE_FAILED_MESSAGE = """
A migration must be performed before this environment can be upgraded to the
latest version of CommCareHQ. To perform the migration you will need to do the following:
* Checkout an older version of CommCareHQ:
git checkout {commit}
* Stop all UCR pillow processes
* Run the following management commands:
python manage.py rename_ucr_tables create-views --verbose --execute --noconfirm
python manage.py rename_ucr_tables rename-tables --verbose --execute --noconfirm
""".format(commit=GIT_COMMIT_WITH_MANAGEMENT_COMMAND)
def table_exists(connection, table_name):
res = connection.execute("select 1 from pg_tables where tablename = %s", table_name)
return bool(list(res))
def get_legacy_table_name(data_source):
return get_table_name(
data_source.domain, data_source.table_id, max_length=63, prefix=LEGACY_UCR_TABLE_PREFIX
)
def _data_sources_by_engine_id():
by_engine_id = defaultdict(list)
for ds in StaticDataSourceConfiguration.all():
ds_engine_id = ds['engine_id']
by_engine_id[ds_engine_id].append(ds)
for ds in DataSourceConfiguration.all():
ds_engine_id = ds['engine_id']
by_engine_id[ds_engine_id].append(ds)
return by_engine_id
@skip_on_fresh_install
def _assert_migrated(apps, schema_editor):
for engine_id, data_sources in _data_sources_by_engine_id().items():
with connection_manager.get_engine(engine_id).begin() as conn:
for data_source in data_sources:
legacy_table_name = get_legacy_table_name(data_source)
new_table_name = get_table_name(data_source.domain, data_source.table_id)
if (
table_exists(conn, legacy_table_name)
and not table_exists(conn, new_table_name)
):
print("")
print(AUTO_MIGRATE_FAILED_MESSAGE)
sys.exit(1)
class Migration(migrations.Migration):
dependencies = [
('userreports', '0008_new_table_name_views'),
]
operations = [
migrations.RunPython(_assert_migrated, migrations.RunPython.noop, elidable=True)
]
| bsd-3-clause | 4f1cec3532f4cb7d0ec5eb3e85ac299a | 30.313953 | 95 | 0.683253 | 3.644114 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/domain/forms.py | 1 | 105740 | import datetime
import io
import json
import logging
import uuid
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import SetPasswordForm
from django.contrib.auth.hashers import UNUSABLE_PASSWORD_PREFIX
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import transaction
from django.forms.fields import (
BooleanField,
CharField,
ChoiceField,
Field,
ImageField,
IntegerField,
SelectMultiple,
)
from django.forms.widgets import Select
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.encoding import force_bytes, smart_str
from django.utils.functional import cached_property, lazy
from django.utils.http import urlsafe_base64_encode
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy, gettext_noop
from captcha.fields import ReCaptchaField
from crispy_forms import bootstrap as twbscrispy
from crispy_forms import layout as crispy
from crispy_forms.bootstrap import StrictButton
from crispy_forms.layout import Layout, Submit
from dateutil.relativedelta import relativedelta
from django_countries.data import COUNTRIES
from memoized import memoized
from PIL import Image
from corehq import privileges
from corehq.apps.accounting.exceptions import SubscriptionRenewalError
from corehq.apps.accounting.models import (
BillingAccount,
BillingAccountType,
BillingContactInfo,
CreditAdjustmentReason,
CreditLine,
Currency,
DefaultProductPlan,
EntryPoint,
FeatureType,
FundingSource,
PreOrPostPay,
ProBonoStatus,
SoftwarePlanEdition,
Subscription,
SubscriptionAdjustmentMethod,
SubscriptionType,
)
from corehq.apps.accounting.utils import (
cancel_future_subscriptions,
domain_has_privilege,
get_account_name_from_default_name,
is_downgrade,
log_accounting_error,
)
from corehq.apps.app_manager.const import (
AMPLIFIES_NO,
AMPLIFIES_NOT_SET,
AMPLIFIES_YES,
)
from corehq.apps.app_manager.dbaccessors import (
get_app,
get_apps_in_domain,
get_brief_apps_in_domain,
get_version_build_id,
)
from corehq.apps.app_manager.exceptions import BuildNotFoundException
from corehq.apps.app_manager.models import (
Application,
AppReleaseByLocation,
LatestEnabledBuildProfiles,
RemoteApp,
)
from corehq.apps.callcenter.views import (
CallCenterOptionsController,
CallCenterOwnerOptionsView,
)
from corehq.apps.domain.auth import get_active_users_by_email
from corehq.apps.domain.extension_points import validate_password_rules
from corehq.apps.domain.models import (
AREA_CHOICES,
BUSINESS_UNITS,
DATA_DICT,
LOGO_ATTACHMENT,
RESTRICTED_UCR_EXPRESSIONS,
SUB_AREA_CHOICES,
AllowedUCRExpressionSettings,
OperatorCallLimitSettings,
SMSAccountConfirmationSettings,
TransferDomainRequest,
all_restricted_ucr_expressions,
)
from corehq.apps.hqwebapp import crispy as hqcrispy
from corehq.apps.hqwebapp.crispy import HQFormHelper
from corehq.apps.hqwebapp.fields import MultiCharField
from corehq.apps.hqwebapp.tasks import send_html_email_async
from corehq.apps.hqwebapp.widgets import (
BootstrapCheckboxInput,
GeoCoderInput,
Select2Ajax,
)
from corehq.apps.sms.phonenumbers_helper import parse_phone_number
from corehq.apps.users.models import CouchUser, WebUser
from corehq.toggles import (
HIPAA_COMPLIANCE_CHECKBOX,
MOBILE_UCR,
SECURE_SESSION_TIMEOUT,
TWO_STAGE_USER_PROVISIONING_BY_SMS,
)
from corehq.util.timezones.fields import TimeZoneField
from corehq.util.timezones.forms import TimeZoneChoiceField
mark_safe_lazy = lazy(mark_safe, str) # TODO: Use library method
# used to resize uploaded custom logos, aspect ratio is preserved
LOGO_SIZE = (211, 32)
def tf_choices(true_txt, false_txt):
return (('false', false_txt), ('true', true_txt))
class ProjectSettingsForm(forms.Form):
"""
Form for updating a user's project settings
"""
global_timezone = forms.CharField(
initial="UTC",
label="Project Timezone",
widget=forms.HiddenInput
)
override_global_tz = forms.BooleanField(
initial=False,
required=False,
label="",
widget=BootstrapCheckboxInput(
inline_label=gettext_lazy("Override project's timezone setting just for me.")
)
)
user_timezone = TimeZoneChoiceField(
label="My Timezone",
initial=global_timezone.initial
)
def __init__(self, *args, **kwargs):
super(ProjectSettingsForm, self).__init__(*args, **kwargs)
self.helper = hqcrispy.HQFormHelper(self)
self.helper.form_id = 'my-project-settings-form'
self.helper.all().wrap_together(crispy.Fieldset, _('My Timezone'))
self.helper.layout = crispy.Layout(
crispy.Fieldset(
_('My Timezone'),
crispy.Field('global_timezone', css_class='input-xlarge'),
twbscrispy.PrependedText(
'override_global_tz',
'',
id='override_global_tz',
data_bind='checked: override_tz, event: {change: updateForm}'
),
crispy.Div(
crispy.Field(
'user_timezone',
css_class='input-xlarge',
data_bind='event: {change: updateForm}'
),
data_bind='visible: override_tz'
)
),
hqcrispy.FormActions(
StrictButton(
_("Update My Settings"),
type="submit",
css_id="update-proj-settings",
css_class='btn-primary',
data_bind="disable: disableUpdateSettings"
)
)
)
def clean_user_timezone(self):
data = self.cleaned_data['user_timezone']
timezone_field = TimeZoneField()
timezone_field.run_validators(data)
return smart_str(data)
def save(self, user, domain):
timezone = self.cleaned_data['global_timezone']
override = self.cleaned_data['override_global_tz']
if override:
timezone = self.cleaned_data['user_timezone']
dm = user.get_domain_membership(domain)
dm.timezone = timezone
dm.override_global_tz = override
user.save()
return True
class TransferDomainFormErrors(object):
USER_DNE = gettext_lazy('The user being transferred to does not exist')
DOMAIN_MISMATCH = gettext_lazy('Mismatch in domains when confirming')
class TransferDomainForm(forms.ModelForm):
class Meta(object):
model = TransferDomainRequest
fields = ['domain', 'to_username']
def __init__(self, domain, from_username, *args, **kwargs):
super(TransferDomainForm, self).__init__(*args, **kwargs)
self.current_domain = domain
self.from_username = from_username
self.fields['domain'].label = _('Type the name of the project to confirm')
self.fields['to_username'].label = _('New owner\'s CommCare username')
self.helper = hqcrispy.HQFormHelper()
self.helper.layout = crispy.Layout(
'domain',
'to_username',
StrictButton(
_("Transfer Project"),
type="submit",
css_class='btn-danger',
)
)
def save(self, commit=True):
instance = super(TransferDomainForm, self).save(commit=False)
instance.from_username = self.from_username
if commit:
instance.save()
return instance
def clean_domain(self):
domain = self.cleaned_data['domain']
if domain != self.current_domain:
raise forms.ValidationError(TransferDomainFormErrors.DOMAIN_MISMATCH)
return domain
def clean_to_username(self):
username = self.cleaned_data['to_username']
if not WebUser.get_by_username(username):
raise forms.ValidationError(TransferDomainFormErrors.USER_DNE)
return username
class SubAreaMixin(object):
def clean_sub_area(self):
area = self.cleaned_data['area']
sub_area = self.cleaned_data['sub_area']
if sub_area:
if not area:
raise forms.ValidationError(_('You may not specify a sub area when the project has no specified '
'area'))
else:
return None
sub_areas = []
for a in DATA_DICT["area"]:
if a["name"] == area:
sub_areas = a["sub_areas"]
if sub_area not in sub_areas:
raise forms.ValidationError(_('This is not a valid sub-area for the area %s') % area)
return sub_area
USE_LOCATION_CHOICE = "user_location"
USE_PARENT_LOCATION_CHOICE = 'user_parent_location'
class CallCenterOwnerWidget(Select2Ajax):
def set_domain(self, domain):
self.domain = domain
def render(self, name, value, attrs=None, renderer=None):
value_to_render = CallCenterOptionsController.convert_owner_id_to_select_choice(value, self.domain)
return super(CallCenterOwnerWidget, self).render(name, value_to_render, attrs=attrs, renderer=renderer)
class DomainGlobalSettingsForm(forms.Form):
LOCATION_CHOICES = [USE_LOCATION_CHOICE, USE_PARENT_LOCATION_CHOICE]
CASES_AND_FIXTURES_CHOICE = "cases_and_fixtures"
CASES_ONLY_CHOICE = "cases_only"
hr_name = forms.CharField(
label=gettext_lazy("Project Name"),
help_text=gettext_lazy("This name will appear in the upper right corner "
"when you are in this project. Changing this name "
"will not change the URL of the project.")
)
project_description = forms.CharField(
label=gettext_lazy("Project Description"),
widget=forms.Textarea(attrs={"class": "vertical-resize"}),
required=False,
max_length=1000,
help_text=gettext_lazy(
"Please provide a short description of your project (Max 1000 characters)."
)
)
default_timezone = TimeZoneChoiceField(label=gettext_noop("Default Timezone"), initial="UTC")
default_geocoder_location = Field(
widget=GeoCoderInput(attrs={'placeholder': gettext_lazy('Select a location')}),
label=gettext_noop("Default project location"),
required=False,
help_text=gettext_lazy("Please select your project's default location.")
)
logo = ImageField(
label=gettext_lazy("Custom Logo"),
required=False,
help_text=gettext_lazy("Upload a custom image to display instead of the "
"CommCare HQ logo. It will be automatically resized to "
"a height of 32 pixels.")
)
delete_logo = BooleanField(
label=gettext_lazy("Delete Logo"),
required=False,
help_text=gettext_lazy("Delete your custom logo and use the standard one.")
)
call_center_enabled = BooleanField(
label=gettext_lazy("Call Center Application"),
required=False,
help_text=gettext_lazy("Call Center mode is a CommCareHQ module for managing "
"call center workflows. It is still under "
"active development. Do not enable for your domain unless "
"you're actively piloting it.")
)
call_center_type = ChoiceField(
label=gettext_lazy("Call Center Type"),
initial=CASES_AND_FIXTURES_CHOICE,
choices=[
(CASES_AND_FIXTURES_CHOICE, "Create cases and indicators"),
(CASES_ONLY_CHOICE, "Create just cases"),
],
help_text=gettext_lazy(
"""
If "Create cases and indicators" is selected, each user will have a case associated with it,
and fixtures will be synced containing indicators about each user. If "Create just cases"
is selected, the fixtures will not be created.
"""
),
required=False,
)
call_center_case_owner = Field(
widget=CallCenterOwnerWidget(attrs={'placeholder': gettext_lazy('Select an Owner...')}),
label=gettext_lazy("Call Center Case Owner"),
required=False,
help_text=gettext_lazy("Select the person who will be listed as the owner "
"of all cases created for call center users.")
)
call_center_case_type = CharField(
label=gettext_lazy("Call Center Case Type"),
required=False,
help_text=gettext_lazy("Enter the case type to be used for FLWs in call center apps")
)
mobile_ucr_sync_interval = IntegerField(
label=gettext_lazy("Default mobile report sync delay (hours)"),
required=False,
help_text=gettext_lazy(
"""
Default time to wait between sending updated mobile report data to users.
Can be overridden on a per user basis.
"""
)
)
confirmation_link_expiry = IntegerField(
label=gettext_lazy("Account confirmation link expiry"),
required=True,
help_text=gettext_lazy(
"""
Default time (in days) for which account confirmation link will be valid.
"""
)
)
operator_call_limit = IntegerField(
label=gettext_lazy("Call limit"),
required=True,
help_text=gettext_lazy(
"""
Limit on number of calls allowed to an operator for each call type.
"""
)
)
confirmation_sms_project_name = CharField(
label=gettext_lazy("Confirmation SMS project name"),
required=True,
help_text=gettext_lazy("Name of the project to be used in SMS sent for account confirmation to users.")
)
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('domain', None)
self.domain = self.project.name
self.can_use_custom_logo = kwargs.pop('can_use_custom_logo', False)
super(DomainGlobalSettingsForm, self).__init__(*args, **kwargs)
self.helper = hqcrispy.HQFormHelper(self)
self.helper[5] = twbscrispy.PrependedText('delete_logo', '')
self.helper[6] = twbscrispy.PrependedText('call_center_enabled', '')
self.helper.all().wrap_together(crispy.Fieldset, _('Edit Basic Information'))
self.helper.layout.append(
hqcrispy.FormActions(
StrictButton(
_("Update Basic Info"),
type="submit",
css_class='btn-primary',
)
)
)
self.fields['default_timezone'].label = gettext_lazy('Default timezone')
if not self.can_use_custom_logo:
del self.fields['logo']
del self.fields['delete_logo']
if self.project:
if not self.project.call_center_config.enabled:
del self.fields['call_center_enabled']
del self.fields['call_center_type']
del self.fields['call_center_case_owner']
del self.fields['call_center_case_type']
else:
owner_field = self.fields['call_center_case_owner']
owner_field.widget.set_url(
reverse(CallCenterOwnerOptionsView.url_name, args=(self.domain,))
)
owner_field.widget.set_domain(self.domain)
if not MOBILE_UCR.enabled(self.domain):
del self.fields['mobile_ucr_sync_interval']
self._handle_call_limit_visibility()
self._handle_account_confirmation_by_sms_settings()
def _handle_account_confirmation_by_sms_settings(self):
if not TWO_STAGE_USER_PROVISIONING_BY_SMS.enabled(self.domain):
del self.fields['confirmation_link_expiry']
del self.fields['confirmation_sms_project_name']
else:
settings_obj = SMSAccountConfirmationSettings.get_settings(self.domain)
min_value_expiry = SMSAccountConfirmationSettings.CONFIRMATION_LINK_EXPIRY_DAYS_MINIMUM
max_value_expiry = SMSAccountConfirmationSettings.CONFIRMATION_LINK_EXPIRY_DAYS_MAXIMUM
self.fields['confirmation_link_expiry'].initial = settings_obj.confirmation_link_expiry_time
self._add_range_validation_to_integer_input(
"confirmation_link_expiry", min_value_expiry, max_value_expiry
)
project_max_length = SMSAccountConfirmationSettings.PROJECT_NAME_MAX_LENGTH
self.fields['confirmation_sms_project_name'].initial = settings_obj.project_name
self.fields['confirmation_sms_project_name'].max_length = project_max_length
def _handle_call_limit_visibility(self):
if self.domain not in OperatorCallLimitSettings.objects.values_list('domain', flat=True):
del self.fields['operator_call_limit']
return
existing_limit_setting = OperatorCallLimitSettings.objects.get(domain=self.domain)
self.fields['operator_call_limit'].initial = existing_limit_setting.call_limit
self._add_range_validation_to_integer_input(
"operator_call_limit", OperatorCallLimitSettings.CALL_LIMIT_MINIMUM,
OperatorCallLimitSettings.CALL_LIMIT_MAXIMUM
)
def _add_range_validation_to_integer_input(self, settings_name, min_value, max_value):
setting = self.fields.get(settings_name)
min_validator = MinValueValidator(min_value)
max_validator = MaxValueValidator(max_value)
setting.validators.extend([min_validator, max_validator])
def clean_default_timezone(self):
data = self.cleaned_data['default_timezone']
timezone_field = TimeZoneField()
timezone_field.run_validators(data)
return smart_str(data)
def clean_default_geocoder_location(self):
data = self.cleaned_data.get('default_geocoder_location')
if isinstance(data, dict):
return data
return json.loads(data or '{}')
def clean_confirmation_link_expiry(self):
data = self.cleaned_data['confirmation_link_expiry']
return DomainGlobalSettingsForm.validate_integer_value(data, "Confirmation link expiry")
def clean_operator_call_limit(self):
data = self.cleaned_data['operator_call_limit']
return DomainGlobalSettingsForm.validate_integer_value(data, "Operator call limit")
@staticmethod
def validate_integer_value(value, value_name):
try:
return int(value)
except ValueError:
raise forms.ValidationError(_("{} should be an integer.").format(value_name))
def clean(self):
cleaned_data = super(DomainGlobalSettingsForm, self).clean()
if (cleaned_data.get('call_center_enabled')
and (not cleaned_data.get('call_center_case_type')
or not cleaned_data.get('call_center_case_owner')
or not cleaned_data.get('call_center_type'))):
raise forms.ValidationError(_(
'You must choose a Call Center Type, Owner, and Case Type to use the call center application. '
'Please uncheck the "Call Center Application" setting or enter values for the other fields.'
))
return cleaned_data
def _save_logo_configuration(self, domain):
"""
:raises IOError: if unable to save (e.g. PIL is unable to save PNG in CMYK mode)
"""
if self.can_use_custom_logo:
logo = self.cleaned_data['logo']
if logo:
input_image = Image.open(io.BytesIO(logo.read()))
input_image.load()
input_image.thumbnail(LOGO_SIZE)
# had issues trying to use a BytesIO instead
tmpfilename = "/tmp/%s_%s" % (uuid.uuid4(), logo.name)
input_image.save(tmpfilename, 'PNG')
with open(tmpfilename, 'rb') as tmpfile:
domain.put_attachment(tmpfile, name=LOGO_ATTACHMENT)
elif self.cleaned_data['delete_logo']:
domain.delete_attachment(LOGO_ATTACHMENT)
def _save_call_center_configuration(self, domain):
cc_config = domain.call_center_config
cc_config.enabled = self.cleaned_data.get('call_center_enabled', False)
if cc_config.enabled:
domain.internal.using_call_center = True
cc_config.use_fixtures = self.cleaned_data['call_center_type'] == self.CASES_AND_FIXTURES_CHOICE
owner = self.cleaned_data.get('call_center_case_owner', None)
if owner in self.LOCATION_CHOICES:
cc_config.call_center_case_owner = None
cc_config.use_user_location_as_owner = True
cc_config.user_location_ancestor_level = 1 if owner == USE_PARENT_LOCATION_CHOICE else 0
else:
cc_config.case_owner_id = owner
cc_config.use_user_location_as_owner = False
cc_config.case_type = self.cleaned_data.get('call_center_case_type', None)
def _save_timezone_configuration(self, domain):
global_tz = self.cleaned_data['default_timezone']
if domain.default_timezone != global_tz:
domain.default_timezone = global_tz
users = WebUser.by_domain(domain.name)
users_to_save = []
for user in users:
dm = user.get_domain_membership(domain.name)
if not dm.override_global_tz and dm.timezone != global_tz:
dm.timezone = global_tz
users_to_save.append(user)
if users_to_save:
WebUser.bulk_save(users_to_save)
def _save_account_confirmation_settings(self, domain):
if TWO_STAGE_USER_PROVISIONING_BY_SMS.enabled(domain.name):
settings = SMSAccountConfirmationSettings.get_settings(domain.name)
settings.project_name = self.cleaned_data.get('confirmation_sms_project_name')
settings.confirmation_link_expiry_time = self.cleaned_data.get('confirmation_link_expiry')
settings.save()
def save(self, request, domain):
domain.hr_name = self.cleaned_data['hr_name']
domain.project_description = self.cleaned_data['project_description']
domain.default_mobile_ucr_sync_interval = self.cleaned_data.get('mobile_ucr_sync_interval', None)
domain.default_geocoder_location = self.cleaned_data.get('default_geocoder_location')
if self.cleaned_data.get("operator_call_limit"):
setting_obj = OperatorCallLimitSettings.objects.get(domain=self.domain)
setting_obj.call_limit = self.cleaned_data.get("operator_call_limit")
setting_obj.save()
try:
self._save_logo_configuration(domain)
except IOError as err:
messages.error(request, _('Unable to save custom logo: {}').format(err))
self._save_call_center_configuration(domain)
self._save_timezone_configuration(domain)
self._save_account_confirmation_settings(domain)
domain.save()
return True
class DomainMetadataForm(DomainGlobalSettingsForm):
cloudcare_releases = ChoiceField(
label=gettext_lazy("Web Apps should use"),
initial=None,
required=False,
choices=(
('stars', gettext_lazy('Latest starred version')),
('nostars', gettext_lazy('Highest numbered version (not recommended)')),
),
help_text=gettext_lazy("Choose whether Web Apps should use the latest starred build or highest numbered "
"build in your application.")
)
def __init__(self, *args, **kwargs):
super(DomainMetadataForm, self).__init__(*args, **kwargs)
if self.project.cloudcare_releases == 'default' \
or not domain_has_privilege(self.domain, privileges.CLOUDCARE):
# if the cloudcare_releases flag was just defaulted, don't bother showing
# this setting at all
del self.fields['cloudcare_releases']
if not domain_has_privilege(self.domain, privileges.GEOCODER):
del self.fields['default_geocoder_location']
def save(self, request, domain):
res = DomainGlobalSettingsForm.save(self, request, domain)
if not res:
return False
try:
cloudcare_releases = self.cleaned_data.get('cloudcare_releases')
if cloudcare_releases and domain.cloudcare_releases != 'default':
# you're never allowed to change from default
domain.cloudcare_releases = cloudcare_releases
domain.save()
return True
except Exception as e:
logging.exception("couldn't save project settings - error is %s" % e)
return False
def tuple_of_copies(a_list, blank=True):
ret = [(item, item) for item in a_list]
if blank:
ret.insert(0, ('', '---'))
return tuple(ret)
class PrivacySecurityForm(forms.Form):
restrict_superusers = BooleanField(
label=gettext_lazy("Restrict Dimagi Staff Access"),
required=False,
help_text=gettext_lazy(
"CommCare support staff sometimes require access "
"to your project space to provide rapid, in-depth support. "
"Checking this box will restrict the degree of support they "
"will be able to provide in the event that you report an issue. "
"You may also miss out on important communications and updates. "
"Regardless of whether this option is checked, "
"Commcare support staff will have access "
"to your billing information and project metadata; "
"and CommCare system administrators will also have direct access "
"to data infrastructure strictly for the purposes of system administration "
"as outlined in our "
'<a href="https://www.dimagi.com/terms/latest/privacy/">Privacy Policy</a>.'
)
)
secure_submissions = BooleanField(
label=gettext_lazy("Secure submissions"),
required=False,
help_text=mark_safe_lazy(gettext_lazy( # nosec: no user input
"Secure Submissions prevents others from impersonating your mobile workers. "
"This setting requires all deployed applications to be using secure "
"submissions as well. "
"<a href='https://help.commcarehq.org/display/commcarepublic/Project+Space+Settings'>"
"Read more about secure submissions here</a>"))
)
secure_sessions = BooleanField(
label=gettext_lazy("Shorten Inactivity Timeout"),
required=False,
help_text=gettext_lazy("All web users on this project will be logged out after {} minutes "
"of inactivity").format(settings.SECURE_TIMEOUT)
)
secure_sessions_timeout = IntegerField(
label=gettext_lazy("Inactivity Timeout Length"),
required=False,
help_text=gettext_lazy("Override the default {}-minute length of the inactivity timeout. Has no effect "
"unless inactivity timeout is on. Note that when this is updated, users may need "
"to log out and back in for it to take effect.").format(settings.SECURE_TIMEOUT)
)
allow_domain_requests = BooleanField(
label=gettext_lazy("Web user requests"),
required=False,
help_text=gettext_lazy("Allow unknown users to request web access to the domain."),
)
hipaa_compliant = BooleanField(
label=gettext_lazy("HIPAA compliant"),
required=False,
)
two_factor_auth = BooleanField(
label=gettext_lazy("Two Factor Authentication"),
required=False,
help_text=gettext_lazy("All users on this project will be required to enable two factor authentication")
)
strong_mobile_passwords = BooleanField(
label=gettext_lazy("Require Strong Passwords for Mobile Workers"),
required=False,
help_text=gettext_lazy("All mobile workers in this project will be required to have a strong password")
)
ga_opt_out = BooleanField(
label=gettext_lazy("Disable Google Analytics"),
required=False,
)
disable_mobile_login_lockout = BooleanField(
label=gettext_lazy("Disable Mobile Worker Lockout"),
required=False,
help_text=gettext_lazy("Mobile Workers will never be locked out of their account, regardless"
"of the number of failed attempts")
)
def __init__(self, *args, **kwargs):
user_name = kwargs.pop('user_name')
domain = kwargs.pop('domain')
super(PrivacySecurityForm, self).__init__(*args, **kwargs)
excluded_fields = []
if not domain_has_privilege(domain, privileges.ADVANCED_DOMAIN_SECURITY):
excluded_fields.append('ga_opt_out')
excluded_fields.append('strong_mobile_passwords')
excluded_fields.append('two_factor_auth')
excluded_fields.append('secure_sessions')
if not HIPAA_COMPLIANCE_CHECKBOX.enabled(user_name):
excluded_fields.append('hipaa_compliant')
if not SECURE_SESSION_TIMEOUT.enabled(domain):
excluded_fields.append('secure_sessions_timeout')
# PrependedText ensures the label is to the left of the checkbox, and the help text beneath.
# Feels like there should be a better way to apply these styles, as we aren't pre-pending anything
fields = [twbscrispy.PrependedText(field_name, '')
for field_name in self.fields.keys() if field_name not in excluded_fields]
self.helper = hqcrispy.HQFormHelper(self)
self.helper.layout = Layout(
crispy.Fieldset(
_('Edit Privacy Settings'),
*fields
),
hqcrispy.FormActions(
StrictButton(
_('Update Privacy Settings'),
type='submit',
css_class='btn-primary'
)
)
)
def save(self, domain_obj):
domain_obj.restrict_superusers = self.cleaned_data.get('restrict_superusers', False)
domain_obj.allow_domain_requests = self.cleaned_data.get('allow_domain_requests', False)
domain_obj.secure_sessions = self.cleaned_data.get('secure_sessions', False)
domain_obj.secure_sessions_timeout = self.cleaned_data.get('secure_sessions_timeout', None)
domain_obj.two_factor_auth = self.cleaned_data.get('two_factor_auth', False)
domain_obj.strong_mobile_passwords = self.cleaned_data.get('strong_mobile_passwords', False)
secure_submissions = self.cleaned_data.get(
'secure_submissions', False)
apps_to_save = []
if secure_submissions != domain_obj.secure_submissions:
for app in get_apps_in_domain(domain_obj.name):
if app.secure_submissions != secure_submissions:
app.secure_submissions = secure_submissions
apps_to_save.append(app)
domain_obj.secure_submissions = secure_submissions
domain_obj.hipaa_compliant = self.cleaned_data.get('hipaa_compliant', False)
domain_obj.ga_opt_out = self.cleaned_data.get('ga_opt_out', False)
domain_obj.disable_mobile_login_lockout = self.cleaned_data.get('disable_mobile_login_lockout', False)
domain_obj.save()
if apps_to_save:
apps = [app for app in apps_to_save if isinstance(app, Application)]
remote_apps = [app for app in apps_to_save if isinstance(app, RemoteApp)]
if apps:
Application.bulk_save(apps)
if remote_apps:
RemoteApp.bulk_save(remote_apps)
return True
class DomainInternalForm(forms.Form, SubAreaMixin):
sf_contract_id = CharField(label="Salesforce Contract ID", required=False)
sf_account_id = CharField(label="Salesforce Account ID", required=False)
initiative = forms.MultipleChoiceField(label="Initiative",
widget=forms.CheckboxSelectMultiple(),
choices=tuple_of_copies(DATA_DICT["initiatives"], blank=False),
required=False)
workshop_region = CharField(
label="Workshop Region",
required=False,
help_text="e.g. US, LAC, SA, Sub-Saharan Africa, Southeast Asia, etc.")
self_started = ChoiceField(
label="Self Started?",
choices=tf_choices('Yes', 'No'),
required=False,
help_text=(
"The organization built and deployed their app themselves. Dimagi may have provided indirect support"
))
is_test = ChoiceField(
label="Real Project",
choices=(('none', 'Unknown'),
('true', 'Test'),
('false', 'Real'),)
)
area = ChoiceField(
label="Sector*",
required=False,
choices=tuple_of_copies(AREA_CHOICES))
sub_area = ChoiceField(
label="Sub-Sector*",
required=False,
choices=tuple_of_copies(SUB_AREA_CHOICES))
organization_name = CharField(
label="Organization Name*",
required=False,
help_text="Quick 1-2 sentence summary of the project.",
)
notes = CharField(label="Notes*", required=False, widget=forms.Textarea(attrs={"class": "vertical-resize"}))
phone_model = CharField(
label="Device Model",
help_text="Add Web Apps, if this project is using Web Apps as well",
required=False,
)
business_unit = forms.ChoiceField(
label='Business Unit',
choices=tuple_of_copies(BUSINESS_UNITS),
required=False,
)
countries = forms.MultipleChoiceField(
label="Countries",
choices=sorted(list(COUNTRIES.items()), key=lambda x: x[0]),
required=False,
)
commtrack_domain = ChoiceField(
label="CommCare Supply Project",
choices=tf_choices('Yes', 'No'),
required=False,
help_text="This app aims to improve the supply of goods and materials"
)
performance_threshold = IntegerField(
label="Performance Threshold",
required=False,
help_text=(
'The number of forms submitted per month for a user to count as "performing well". '
'The default value is 15.'
)
)
experienced_threshold = IntegerField(
label="Experienced Threshold",
required=False,
help_text=(
"The number of different months in which a worker must submit forms to count as experienced. "
"The default value is 3."
)
)
amplifies_workers = ChoiceField(
label="Service Delivery App",
choices=[(AMPLIFIES_NOT_SET, '* Not Set'), (AMPLIFIES_YES, 'Yes'), (AMPLIFIES_NO, 'No')],
required=False,
help_text=("This application is used for service delivery. Examples: An "
"FLW who uses CommCare to improve counseling and screening of pregnant women. "
"An FLW that uses CommCare Supply to improve their supply of medicines. A teacher "
"who uses CommCare to assess and improve students' performance."
)
)
amplifies_project = ChoiceField(
label="Amplifies Project",
choices=[(AMPLIFIES_NOT_SET, '* Not Set'), (AMPLIFIES_YES, 'Yes'), (AMPLIFIES_NO, 'No')],
required=False,
help_text=("Amplifies the impact of a Frontline Program (FLP). "
"Examples: Programs that use M&E data collected by CommCare. "
"Programs that use CommCare data to make programmatic decisions."
)
)
data_access_threshold = IntegerField(
label="Minimum Monthly Data Accesses",
required=False,
help_text=(
"Minimum number of times project staff are expected to access CommCare data each month. "
"The default value is 20."
)
)
partner_technical_competency = IntegerField(
label="Partner Technical Competency",
required=False,
min_value=1,
max_value=5,
help_text=(
"Please rate the technical competency of the partner on a scale from "
"1 to 5. 1 means low-competency, and we should expect LOTS of basic "
"hand-holding. 5 means high-competency, so if they report a bug it's "
"probably a real issue with CommCareHQ or a really good idea."
),
)
support_prioritization = IntegerField(
label="Support Prioritization",
required=False,
min_value=1,
max_value=3,
help_text=(
"Based on the impact of this project and how good this partner was "
"to work with, how much would you prioritize support for this "
'partner? 1 means "Low. Take your time." You might rate a partner '
'"1" because they\'ve been absolutely terrible to you and low impact. '
'3 means "High priority. Be nice". You might rate a partner "3" '
"because even though they can't afford a PRO plan, you know they "
"are changing the world. Or they are an unusually high priority "
"strategic partner."
),
)
gs_continued_involvement = ChoiceField(
label="GS Continued Involvement",
choices=[(AMPLIFIES_NOT_SET, '* Not Set'), (AMPLIFIES_YES, 'Yes'), (AMPLIFIES_NO, 'No')],
required=False,
help_text=(
"Do you want to continue to be involved in this project? No, please "
"only reach out if absolutely necessary. Yes. I want to see what "
"happens and be kept in the loop."
),
)
technical_complexity = ChoiceField(
label="Technical Complexity",
choices=[(AMPLIFIES_NOT_SET, '* Not Set'), (AMPLIFIES_YES, 'Yes'), (AMPLIFIES_NO, 'No')],
required=False,
help_text=(
"Is this an innovation project involving unusual technology which"
"we expect will require different support than a typical deployment?"
),
)
app_design_comments = CharField(
label="App Design Comments",
widget=forms.Textarea(attrs={"class": "vertical-resize"}),
required=False,
help_text=(
"Unusual workflows or design decisions for others to watch out for."
),
)
training_materials = CharField(
label="Training materials",
required=False,
help_text=(
"Where to find training materials or other relevant resources."
),
)
partner_comments = CharField(
label="Partner Comments",
widget=forms.Textarea,
required=False,
help_text=(
"past or anticipated problems with this partner."
),
)
partner_contact = CharField(
label="Partner contact",
required=False,
help_text=(
"Primary partner point of contact going forward (type username of existing web user)."
),
)
dimagi_contact = CharField(
label="Dimagi contact",
required=False,
help_text=(
"Primary Dimagi point of contact going forward (type username of existing web user)."
),
)
send_handoff_email = forms.BooleanField(
label="Send Hand-off Email",
required=False,
help_text=(
"Check this box to trigger a hand-off email to the partner when this form is submitted."
),
)
use_custom_auto_case_update_hour = forms.ChoiceField(
label=gettext_lazy("Choose specific time for custom auto case update rules to run"),
required=True,
choices=(
('N', gettext_lazy("No")),
('Y', gettext_lazy("Yes")),
),
)
auto_case_update_hour = forms.IntegerField(
label=gettext_lazy("Hour of the day, in UTC, for rules to run (0-23)"),
required=False,
min_value=0,
max_value=23,
)
use_custom_auto_case_update_limit = forms.ChoiceField(
label=gettext_lazy("Set custom auto case update rule limits"),
required=True,
choices=(
('N', gettext_lazy("No")),
('Y', gettext_lazy("Yes")),
),
)
auto_case_update_limit = forms.IntegerField(
label=gettext_lazy("Max allowed updates in a daily run"),
required=False,
min_value=1000,
)
use_custom_odata_feed_limit = forms.ChoiceField(
label=gettext_lazy("Set custom OData Feed Limit? Default is {}.").format(
settings.DEFAULT_ODATA_FEED_LIMIT),
required=True,
choices=(
('N', gettext_lazy("No")),
('Y', gettext_lazy("Yes")),
),
)
odata_feed_limit = forms.IntegerField(
label=gettext_lazy("Max allowed OData Feeds"),
required=False,
min_value=1,
)
granted_messaging_access = forms.BooleanField(
label="Enable Messaging",
required=False,
help_text="Check this box to enable messaging.", # TODO through non-test gateways
)
active_ucr_expressions = forms.MultipleChoiceField(
label="Expressions for SaaS to Manage",
choices=RESTRICTED_UCR_EXPRESSIONS,
required=False,
)
def __init__(self, domain, can_edit_eula, *args, **kwargs):
super(DomainInternalForm, self).__init__(*args, **kwargs)
self.domain = domain
self.can_edit_eula = can_edit_eula
additional_fields = []
if self.can_edit_eula:
additional_fields = ['custom_eula', 'can_use_data']
self.fields['custom_eula'] = ChoiceField(
label="Custom Eula?",
choices=tf_choices(_('Yes'), _('No')),
required=False,
help_text='Set to "yes" if this project has a customized EULA as per their contract.'
)
self.fields['can_use_data'] = ChoiceField(
label="Can use project data?",
choices=tf_choices('Yes', 'No'),
required=False,
help_text='Set to "no" if this project opts out of data usage. Defaults to "yes".'
)
self.helper = hqcrispy.HQFormHelper()
self.helper.layout = crispy.Layout(
crispy.Fieldset(
_("Basic Information"),
'initiative',
'workshop_region',
'self_started',
'is_test',
'area',
'sub_area',
'organization_name',
'notes',
'phone_model',
'business_unit',
'countries',
'commtrack_domain',
'performance_threshold',
'experienced_threshold',
'amplifies_workers',
'amplifies_project',
'data_access_threshold',
crispy.Div(*additional_fields),
),
crispy.Fieldset(
_("Support Hand-off information"),
'partner_technical_competency',
'support_prioritization',
'gs_continued_involvement',
'technical_complexity',
'app_design_comments',
'training_materials',
'partner_comments',
'partner_contact',
'send_handoff_email',
'dimagi_contact',
),
crispy.Fieldset(
_("Project Limits"),
crispy.Field(
'use_custom_auto_case_update_limit',
data_bind='value: use_custom_auto_case_update_limit',
),
crispy.Div(
crispy.Field('auto_case_update_limit'),
data_bind="visible: use_custom_auto_case_update_limit() === 'Y'",
),
crispy.Field(
'use_custom_auto_case_update_hour',
data_bind='value: use_custom_auto_case_update_hour',
),
crispy.Div(
crispy.Field('auto_case_update_hour'),
data_bind="visible: use_custom_auto_case_update_hour() === 'Y'",
),
crispy.Field(
'use_custom_odata_feed_limit',
data_bind="value: use_custom_odata_feed_limit",
),
crispy.Div(
crispy.Field('odata_feed_limit'),
data_bind="visible: use_custom_odata_feed_limit() === 'Y'",
),
'granted_messaging_access',
'active_ucr_expressions',
),
crispy.Fieldset(
_("Salesforce Details"),
'sf_contract_id',
'sf_account_id',
),
hqcrispy.FormActions(
StrictButton(
_("Update Project Information"),
type="submit",
css_class='btn-primary',
),
),
)
@property
def current_values(self):
return {
'use_custom_auto_case_update_hour': self['use_custom_auto_case_update_hour'].value(),
'use_custom_auto_case_update_limit': self['use_custom_auto_case_update_limit'].value(),
'use_custom_odata_feed_limit': self['use_custom_odata_feed_limit'].value()
}
def _get_user_or_fail(self, field):
username = self.cleaned_data[field]
if not username:
return None
user = WebUser.get_by_username(username)
if not user:
msg = "Web user with username '{username}' does not exist"
self.add_error(field, msg.format(username=username))
elif not user.is_member_of(self.domain):
msg = "'{username}' is not the username of a web user in '{domain}'"
self.add_error(field, msg.format(username=username, domain=self.domain))
return user
def clean_active_ucr_expressions(self):
value = self.cleaned_data.get('active_ucr_expressions')
all_expressions = all_restricted_ucr_expressions()
for expr in value:
if expr not in all_expressions:
raise forms.ValidationError(_(f"Unknown expression {expr}"))
return value
def clean_auto_case_update_hour(self):
if self.cleaned_data.get('use_custom_auto_case_update_hour') != 'Y':
return None
value = self.cleaned_data.get('auto_case_update_hour')
if not value:
raise forms.ValidationError(_("This field is required"))
return value
def clean_auto_case_update_limit(self):
if self.cleaned_data.get('use_custom_auto_case_update_limit') != 'Y':
return None
value = self.cleaned_data.get('auto_case_update_limit')
if not value:
raise forms.ValidationError(_("This field is required"))
return value
def clean_odata_feed_limit(self):
if self.cleaned_data.get('use_custom_odata_feed_limit') != 'Y':
return None
value = self.cleaned_data.get('odata_feed_limit')
if not value:
raise forms.ValidationError(_("Please specify a limit for OData feeds."))
return value
def clean(self):
send_handoff_email = self.cleaned_data['send_handoff_email']
partner_user = self._get_user_or_fail('partner_contact')
if not partner_user and send_handoff_email:
msg = "You can't send a hand-off email without specifying a partner contact."
self.add_error('partner_contact', msg)
dimagi_user = self._get_user_or_fail('dimagi_contact')
if send_handoff_email and not dimagi_user:
msg = "You can't send a hand-off email without specifying a contact at dimagi."
self.add_error('dimagi_contact', msg)
elif send_handoff_email and not dimagi_user.full_name:
msg = ("The dimagi user '{}' does not have a name configured, please"
"go to your account settings and add a name before attempting "
"to send an email to the partner.").format(dimagi_user.username)
self.add_error('dimagi_contact', msg)
def save(self, domain):
kwargs = {
"workshop_region": self.cleaned_data["workshop_region"]
} if self.cleaned_data["workshop_region"] else {}
if self.can_edit_eula:
kwargs['custom_eula'] = self.cleaned_data['custom_eula'] == 'true'
kwargs['can_use_data'] = self.cleaned_data['can_use_data'] == 'true'
domain.update_deployment(
countries=self.cleaned_data['countries'],
)
ucr_expressions = self.cleaned_data['active_ucr_expressions']
AllowedUCRExpressionSettings.save_allowed_ucr_expressions(domain.name, ucr_expressions)
domain.is_test = self.cleaned_data['is_test']
domain.auto_case_update_hour = self.cleaned_data['auto_case_update_hour']
domain.auto_case_update_limit = self.cleaned_data['auto_case_update_limit']
domain.odata_feed_limit = self.cleaned_data['odata_feed_limit']
domain.granted_messaging_access = self.cleaned_data['granted_messaging_access']
domain.update_internal(
sf_contract_id=self.cleaned_data['sf_contract_id'],
sf_account_id=self.cleaned_data['sf_account_id'],
initiative=self.cleaned_data['initiative'],
self_started=self.cleaned_data['self_started'] == 'true',
area=self.cleaned_data['area'],
sub_area=self.cleaned_data['sub_area'],
organization_name=self.cleaned_data['organization_name'],
notes=self.cleaned_data['notes'],
phone_model=self.cleaned_data['phone_model'],
commtrack_domain=self.cleaned_data['commtrack_domain'] == 'true',
performance_threshold=self.cleaned_data['performance_threshold'],
experienced_threshold=self.cleaned_data['experienced_threshold'],
amplifies_workers=self.cleaned_data['amplifies_workers'],
amplifies_project=self.cleaned_data['amplifies_project'],
business_unit=self.cleaned_data['business_unit'],
data_access_threshold=self.cleaned_data['data_access_threshold'],
partner_technical_competency=self.cleaned_data['partner_technical_competency'],
support_prioritization=self.cleaned_data['support_prioritization'],
gs_continued_involvement=self.cleaned_data['gs_continued_involvement'],
technical_complexity=self.cleaned_data['technical_complexity'],
app_design_comments=self.cleaned_data['app_design_comments'],
training_materials=self.cleaned_data['training_materials'],
partner_comments=self.cleaned_data['partner_comments'],
partner_contact=self.cleaned_data['partner_contact'],
dimagi_contact=self.cleaned_data['dimagi_contact'],
**kwargs
)
def clean_password(txt):
message = validate_password_rules(txt)
if message:
raise forms.ValidationError(message)
return txt
class NoAutocompleteMixin(object):
def __init__(self, *args, **kwargs):
super(NoAutocompleteMixin, self).__init__(*args, **kwargs)
if settings.DISABLE_AUTOCOMPLETE_ON_SENSITIVE_FORMS:
for field in self.fields.values():
field.widget.attrs.update({'autocomplete': 'off'})
class HQPasswordResetForm(NoAutocompleteMixin, forms.Form):
"""
Only finds users and emails forms where the USERNAME is equal to the
email specified (preventing Mobile Workers from using this form to submit).
This small change is why we can't use the default PasswordReset form.
"""
email = forms.EmailField(label=gettext_lazy("Email"), max_length=254,
widget=forms.TextInput(attrs={'class': 'form-control'}))
if settings.RECAPTCHA_PRIVATE_KEY:
captcha = ReCaptchaField(label="")
error_messages = {
'unknown': gettext_lazy("That email address doesn't have an associated user account. Are you sure you've "
"registered?"),
'unusable': gettext_lazy("The user account associated with this email address cannot reset the "
"password."),
}
def clean_email(self):
UserModel = get_user_model()
email = self.cleaned_data["email"]
matching_users = UserModel._default_manager.filter(username__iexact=email)
# below here is not modified from the superclass
if not len(matching_users):
raise forms.ValidationError(self.error_messages['unknown'])
if not any(user.is_active for user in matching_users):
# none of the filtered users are active
raise forms.ValidationError(self.error_messages['unknown'])
if any((user.password == UNUSABLE_PASSWORD_PREFIX)
for user in matching_users):
raise forms.ValidationError(self.error_messages['unusable'])
return email
def save(self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
# WARNING: Django 1.7 passes this in automatically. do not remove
html_email_template_name=None,
use_https=False, token_generator=default_token_generator,
from_email=None, request=None, **kwargs):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
if settings.IS_SAAS_ENVIRONMENT:
subject_template_name = 'registration/email/password_reset_subject_hq.txt'
email_template_name = 'registration/email/password_reset_email_hq.html'
email = self.cleaned_data["email"]
# this is the line that we couldn't easily override in PasswordForm where
# we specifically filter for the username, not the email, so that
# mobile workers who have the same email set as a web worker don't
# get a password reset email.
active_users = get_active_users_by_email(email)
# the code below is copied from default PasswordForm
for user in active_users:
# Make sure that no email is sent to a user that actually has
# a password marked as unusable
if not user.has_usable_password():
continue
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
couch_user = CouchUser.from_django_user(user)
if not couch_user:
continue
user_email = couch_user.get_email()
if not user_email:
continue
c = {
'email': user_email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'token': token_generator.make_token(user),
'protocol': 'https' if use_https else 'http',
}
subject = render_to_string(subject_template_name, c)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message_plaintext = render_to_string('registration/password_reset_email.html', c)
message_html = render_to_string(email_template_name, c)
send_html_email_async.delay(
subject, user_email, message_html,
text_content=message_plaintext,
email_from=settings.DEFAULT_FROM_EMAIL
)
class ConfidentialPasswordResetForm(HQPasswordResetForm):
def clean_email(self):
try:
return super(ConfidentialPasswordResetForm, self).clean_email()
except forms.ValidationError:
# The base class throws various emails that give away information about the user;
# we can pretend all is well since the save() method is safe for missing users.
return self.cleaned_data['email']
class HQSetPasswordForm(SetPasswordForm):
new_password1 = forms.CharField(
label=gettext_lazy("New password"),
widget=forms.PasswordInput(attrs={'data-bind': "value: password, valueUpdate: 'input'"}),
help_text=mark_safe('<span data-bind="text: passwordHelp, css: color">') # nosec: no user input
)
def save(self, commit=True):
user = super(HQSetPasswordForm, self).save(commit)
couch_user = CouchUser.from_django_user(user)
couch_user.last_password_set = datetime.datetime.utcnow()
if commit:
couch_user.save()
return user
class EditBillingAccountInfoForm(forms.ModelForm):
email_list = forms.CharField(
label=BillingContactInfo._meta.get_field('email_list').verbose_name,
help_text=BillingContactInfo._meta.get_field('email_list').help_text,
widget=forms.SelectMultiple(choices=[]),
)
class Meta(object):
model = BillingContactInfo
fields = ['first_name', 'last_name', 'phone_number', 'company_name', 'first_line',
'second_line', 'city', 'state_province_region', 'postal_code', 'country']
widgets = {'country': forms.Select(choices=[])}
def __init__(self, account, domain, creating_user, data=None, *args, **kwargs):
self.account = account
self.domain = domain
self.creating_user = creating_user
is_ops_user = kwargs.pop('is_ops_user', False)
try:
self.current_country = self.account.billingcontactinfo.country
except Exception:
initial = kwargs.get('initial')
self.current_country = initial.get('country') if initial is not None else None
try:
kwargs['instance'] = self.account.billingcontactinfo
kwargs['initial'] = {
'email_list': self.account.billingcontactinfo.email_list,
}
except BillingContactInfo.DoesNotExist:
pass
super(EditBillingAccountInfoForm, self).__init__(data, *args, **kwargs)
self.helper = hqcrispy.HQFormHelper()
fields = [
'company_name',
'first_name',
'last_name',
crispy.Field('email_list', css_class='input-xxlarge accounting-email-select2',
data_initial=json.dumps(self.initial.get('email_list'))),
'phone_number'
]
if is_ops_user and self.initial.get('email_list'):
fields.insert(4, crispy.Div(
crispy.Div(
css_class='col-sm-3 col-md-2'
),
crispy.Div(
crispy.HTML(", ".join(self.initial.get('email_list'))),
css_class='col-sm-9 col-md-8 col-lg-6'
),
css_id='emails-text',
css_class='collapse form-group'
))
fields.insert(5, crispy.Div(
crispy.Div(
css_class='col-sm-3 col-md-2'
),
crispy.Div(
StrictButton(
_("Show contact emails as text"),
type="button",
css_class='btn btn-default',
css_id='show_emails'
),
crispy.HTML('<p class="help-block">%s</p>' %
_('Useful when you want to copy contact emails')),
css_class='col-sm-9 col-md-8 col-lg-6'
),
css_class='form-group'
))
self.helper.layout = crispy.Layout(
crispy.Fieldset(
_("Basic Information"),
*fields
),
crispy.Fieldset(
_("Mailing Address"),
'first_line',
'second_line',
'city',
'state_province_region',
'postal_code',
crispy.Field('country', css_class="input-large accounting-country-select2",
data_country_code=self.current_country or '',
data_country_name=COUNTRIES.get(self.current_country, '')),
),
hqcrispy.FormActions(
StrictButton(
_("Update Billing Information"),
type="submit",
css_class='btn btn-primary',
),
),
)
def clean_phone_number(self):
data = self.cleaned_data['phone_number']
parsed_number = None
if data:
for country in ["US", "GB", None]:
parsed_number = parse_phone_number(data, country, failhard=False)
if parsed_number is not None:
break
if parsed_number is None:
raise forms.ValidationError(_("It looks like this phone number is invalid. "
"Did you forget the country code?"))
return "+%s%s" % (parsed_number.country_code, parsed_number.national_number)
def clean_email_list(self):
return self.data.getlist('email_list')
# Does not use the commit kwarg.
# TODO - Should support it or otherwise change the function name
@transaction.atomic
def save(self, commit=True):
billing_contact_info = super(EditBillingAccountInfoForm, self).save(commit=False)
billing_contact_info.email_list = self.cleaned_data['email_list']
billing_contact_info.account = self.account
billing_contact_info.save()
self.account.save()
return True
class ConfirmNewSubscriptionForm(EditBillingAccountInfoForm):
plan_edition = forms.CharField(
widget=forms.HiddenInput,
)
def __init__(self, account, domain, creating_user, plan_version, current_subscription, data=None,
*args, **kwargs):
self.plan_version = plan_version
self.current_subscription = current_subscription
super(ConfirmNewSubscriptionForm, self).__init__(account, domain, creating_user, data=data,
*args, **kwargs)
self.fields['plan_edition'].initial = self.plan_version.plan.edition
from corehq.apps.domain.views.accounting import DomainSubscriptionView
self.helper.label_class = 'col-sm-3 col-md-2'
self.helper.field_class = 'col-sm-9 col-md-8 col-lg-6'
self.helper.layout = crispy.Layout(
'plan_edition',
crispy.Fieldset(
_("Basic Information"),
'company_name',
'first_name',
'last_name',
crispy.Field('email_list', css_class='input-xxlarge accounting-email-select2',
data_initial=json.dumps(self.initial.get('email_list'))),
'phone_number',
),
crispy.Fieldset(
_("Mailing Address"),
'first_line',
'second_line',
'city',
'state_province_region',
'postal_code',
crispy.Field('country', css_class="input-large accounting-country-select2",
data_country_code=self.current_country or '',
data_country_name=COUNTRIES.get(self.current_country, ''))
),
hqcrispy.FormActions(
hqcrispy.LinkButton(_("Cancel"),
reverse(DomainSubscriptionView.urlname,
args=[self.domain]),
css_class="btn btn-default"),
StrictButton(_("Subscribe to Plan"),
type="submit",
id='btn-subscribe-to-plan',
css_class='btn btn-primary disable-on-submit-no-spinner '
'add-spinner-on-click'),
),
crispy.Hidden(name="downgrade_email_note", value="", id="downgrade-email-note"),
crispy.Hidden(name="old_plan", value=current_subscription.plan_version.plan.edition),
crispy.Hidden(name="new_plan", value=plan_version.plan.edition)
)
def save(self, commit=True):
try:
with transaction.atomic():
account_save_success = super(ConfirmNewSubscriptionForm, self).save()
if not account_save_success:
return False
cancel_future_subscriptions(self.domain, datetime.date.today(), self.creating_user)
if self.current_subscription is not None:
if self.is_same_edition():
self.current_subscription.update_subscription(
date_start=self.current_subscription.date_start,
date_end=None
)
elif self.is_downgrade_from_paid_plan() and \
self.current_subscription.is_below_minimum_subscription:
self.current_subscription.update_subscription(
date_start=self.current_subscription.date_start,
date_end=self.current_subscription.date_start + datetime.timedelta(days=30)
)
Subscription.new_domain_subscription(
account=self.account,
domain=self.domain,
plan_version=self.plan_version,
date_start=self.current_subscription.date_start + datetime.timedelta(days=30),
web_user=self.creating_user,
adjustment_method=SubscriptionAdjustmentMethod.USER,
service_type=SubscriptionType.PRODUCT,
pro_bono_status=ProBonoStatus.NO,
funding_source=FundingSource.CLIENT,
)
else:
self.current_subscription.change_plan(
self.plan_version,
web_user=self.creating_user,
adjustment_method=SubscriptionAdjustmentMethod.USER,
service_type=SubscriptionType.PRODUCT,
pro_bono_status=ProBonoStatus.NO,
do_not_invoice=False,
no_invoice_reason='',
)
else:
Subscription.new_domain_subscription(
self.account, self.domain, self.plan_version,
web_user=self.creating_user,
adjustment_method=SubscriptionAdjustmentMethod.USER,
service_type=SubscriptionType.PRODUCT,
pro_bono_status=ProBonoStatus.NO,
funding_source=FundingSource.CLIENT,
)
return True
except Exception as e:
log_accounting_error(
"There was an error subscribing the domain '%s' to plan '%s'. Message: %s "
% (self.domain, self.plan_version.plan.name, str(e)),
show_stack_trace=True,
)
return False
def is_same_edition(self):
return self.current_subscription.plan_version.plan.edition == self.plan_version.plan.edition
def is_downgrade_from_paid_plan(self):
if self.current_subscription is None:
return False
elif self.current_subscription.is_trial:
return False
else:
return is_downgrade(
current_edition=self.current_subscription.plan_version.plan.edition,
next_edition=self.plan_version.plan.edition
)
class ConfirmSubscriptionRenewalForm(EditBillingAccountInfoForm):
plan_edition = forms.CharField(
widget=forms.HiddenInput,
)
def __init__(self, account, domain, creating_user, current_subscription,
renewed_version, data=None, *args, **kwargs):
self.current_subscription = current_subscription
super(ConfirmSubscriptionRenewalForm, self).__init__(
account, domain, creating_user, data=data, *args, **kwargs
)
self.renewed_version = renewed_version
self.helper.label_class = 'col-sm-3 col-md-2'
self.helper.field_class = 'col-sm-9 col-md-8 col-lg-6'
self.fields['plan_edition'].initial = renewed_version.plan.edition
from corehq.apps.domain.views.accounting import DomainSubscriptionView
self.helper.layout = crispy.Layout(
'plan_edition',
crispy.Fieldset(
_("Basic Information"),
'company_name',
'first_name',
'last_name',
crispy.Field('email_list', css_class='input-xxlarge accounting-email-select2',
data_initial=json.dumps(self.initial.get('email_list'))),
'phone_number',
),
crispy.Fieldset(
_("Mailing Address"),
'first_line',
'second_line',
'city',
'state_province_region',
'postal_code',
crispy.Field('country', css_class="input-large accounting-country-select2",
data_country_code=self.current_country or '',
data_country_name=COUNTRIES.get(self.current_country, ''))
),
hqcrispy.FormActions(
hqcrispy.LinkButton(
_("Cancel"),
reverse(DomainSubscriptionView.urlname, args=[self.domain]),
css_class="btn btn-default"
),
StrictButton(
_("Renew Plan"),
type="submit",
css_class='btn btn-primary',
),
),
)
def save(self, commit=True):
try:
with transaction.atomic():
account_save_success = super(ConfirmSubscriptionRenewalForm, self).save()
if not account_save_success:
return False
cancel_future_subscriptions(self.domain, self.current_subscription.date_start, self.creating_user)
self.current_subscription.renew_subscription(
web_user=self.creating_user,
adjustment_method=SubscriptionAdjustmentMethod.USER,
service_type=SubscriptionType.PRODUCT,
pro_bono_status=ProBonoStatus.NO,
funding_source=FundingSource.CLIENT,
new_version=self.renewed_version,
)
return True
except SubscriptionRenewalError as e:
log_accounting_error(
"Subscription for %(domain)s failed to renew due to: %(error)s." % {
'domain': self.domain,
'error': e,
}
)
return False
class ProBonoForm(forms.Form):
contact_email = MultiCharField(label=gettext_lazy("Email To"), widget=forms.Select(choices=[]))
organization = forms.CharField(label=gettext_lazy("Organization"))
project_overview = forms.CharField(
widget=forms.Textarea(attrs={"class": "vertical-resize"}), label="Project overview"
)
airtime_expense = forms.CharField(label=gettext_lazy("Estimated annual expenditures on airtime:"))
device_expense = forms.CharField(label=gettext_lazy("Estimated annual expenditures on devices:"))
pay_only_features_needed = forms.CharField(
widget=forms.Textarea(attrs={"class": "vertical-resize"}), label="Pay only features needed"
)
duration_of_project = forms.CharField(help_text=gettext_lazy(
"We grant pro-bono subscriptions to match the duration of your "
"project, up to a maximum of 12 months at a time (at which point "
"you need to reapply)."
))
domain = forms.CharField(label=gettext_lazy("Project Space"))
dimagi_contact = forms.CharField(
help_text=gettext_lazy("If you have already been in touch with someone from "
"Dimagi, please list their name."),
required=False)
num_expected_users = forms.CharField(label=gettext_lazy("Number of expected users"))
def __init__(self, use_domain_field, *args, **kwargs):
super(ProBonoForm, self).__init__(*args, **kwargs)
if not use_domain_field:
self.fields['domain'].required = False
self.helper = hqcrispy.HQFormHelper()
self.helper.layout = crispy.Layout(
crispy.Fieldset(
_('Pro-Bono Application'),
'contact_email',
'organization',
crispy.Div(
'domain',
style=('' if use_domain_field else 'display:none'),
),
'project_overview',
'airtime_expense',
'device_expense',
'pay_only_features_needed',
'duration_of_project',
'num_expected_users',
'dimagi_contact',
),
hqcrispy.FormActions(
crispy.ButtonHolder(
crispy.Submit('submit_pro_bono', _('Submit Pro-Bono Application'))
)
),
)
def clean_contact_email(self):
if 'contact_email' in self.cleaned_data:
copy = self.data.copy()
self.data = copy
copy.update({'contact_email': ", ".join(self.data.getlist('contact_email'))})
return self.data.get('contact_email')
def process_submission(self, domain=None):
try:
params = {
'pro_bono_form': self,
'domain': domain,
}
html_content = render_to_string("domain/email/pro_bono_application.html", params)
text_content = render_to_string("domain/email/pro_bono_application.txt", params)
recipient = settings.PROBONO_SUPPORT_EMAIL
subject = "[Pro-Bono Application]"
if domain is not None:
subject = "%s %s" % (subject, domain)
send_html_email_async.delay(subject, recipient, html_content, text_content=text_content,
email_from=settings.DEFAULT_FROM_EMAIL)
except Exception:
logging.error("Couldn't send pro-bono application email. "
"Contact: %s" % self.cleaned_data['contact_email'])
class InternalSubscriptionManagementForm(forms.Form):
autocomplete_account_types = [
BillingAccountType.CONTRACT,
BillingAccountType.GLOBAL_SERVICES,
BillingAccountType.USER_CREATED,
]
@property
def slug(self):
raise NotImplementedError
@property
def subscription_type(self):
raise NotImplementedError
@property
def account_name(self):
raise NotImplementedError
@property
def account_emails(self):
return []
def process_subscription_management(self):
raise NotImplementedError
@property
@memoized
def next_account(self):
matching_accounts = BillingAccount.objects.filter(
name=self.account_name,
account_type=BillingAccountType.GLOBAL_SERVICES,
).order_by('date_created')
if matching_accounts:
account = matching_accounts[0]
else:
account = BillingAccount(
name=get_account_name_from_default_name(self.account_name),
created_by=self.web_user,
created_by_domain=self.domain,
currency=Currency.get_default(),
dimagi_contact=self.web_user,
account_type=BillingAccountType.GLOBAL_SERVICES,
entry_point=EntryPoint.CONTRACTED,
pre_or_post_pay=PreOrPostPay.POSTPAY
)
account.save()
contact_info, _ = BillingContactInfo.objects.get_or_create(account=account)
for email in self.account_emails:
if email not in contact_info.email_list:
contact_info.email_list.append(email)
contact_info.save()
return account
@property
@memoized
def current_subscription(self):
return Subscription.get_active_subscription_by_domain(self.domain)
@property
@memoized
def should_autocomplete_account(self):
return (
self.current_subscription
and self.current_subscription.account.account_type in self.autocomplete_account_types
)
@property
@memoized
def autocomplete_account_name(self):
if self.should_autocomplete_account:
return self.current_subscription.account.name
return None
@property
@memoized
def current_contact_emails(self):
if self.should_autocomplete_account:
try:
return ','.join(self.current_subscription.account.billingcontactinfo.email_list)
except BillingContactInfo.DoesNotExist:
pass
return None
@property
def subscription_default_fields(self):
return {
'internal_change': True,
'web_user': self.web_user,
}
def __init__(self, domain, web_user, *args, **kwargs):
super(InternalSubscriptionManagementForm, self).__init__(*args, **kwargs)
self.domain = domain
self.web_user = web_user
@property
def form_actions(self):
return (
crispy.Hidden('slug', self.slug),
hqcrispy.FormActions(
crispy.Submit(
self.slug,
gettext_noop('Update'),
css_class='disable-on-submit',
),
),
)
class DimagiOnlyEnterpriseForm(InternalSubscriptionManagementForm):
slug = 'dimagi_only_enterprise'
subscription_type = gettext_noop('Test or Demo Project')
def __init__(self, domain, web_user, *args, **kwargs):
super(DimagiOnlyEnterpriseForm, self).__init__(domain, web_user, *args, **kwargs)
self.helper = hqcrispy.HQFormHelper()
self.helper.layout = crispy.Layout(
crispy.HTML('<div class="alert alert-info">' + gettext_noop(
'<i class="fa fa-info-circle"></i> You will have access to all '
'features for free as soon as you hit "Update". Please make '
'sure this is an internal Dimagi test space, not in use by a '
'partner.<br>Test projects belong to Dimagi and are not subject to '
'Dimagi\'s external terms of service.'
) + '</div>'),
*self.form_actions
)
@transaction.atomic
def process_subscription_management(self):
enterprise_plan_version = DefaultProductPlan.get_default_plan_version(SoftwarePlanEdition.ENTERPRISE)
if self.current_subscription:
self.current_subscription.change_plan(
enterprise_plan_version,
account=self.next_account,
transfer_credits=self.current_subscription.account == self.next_account,
**self.subscription_default_fields
)
else:
Subscription.new_domain_subscription(
self.next_account,
self.domain,
enterprise_plan_version,
**self.subscription_default_fields
)
@property
def subscription_default_fields(self):
fields = super(DimagiOnlyEnterpriseForm, self).subscription_default_fields
fields.update({
'do_not_invoice': True,
'no_invoice_reason': '',
'service_type': SubscriptionType.INTERNAL,
})
return fields
@property
def account_name(self):
return "Dimagi Internal Test Account for Project %s" % self.domain
class AdvancedExtendedTrialForm(InternalSubscriptionManagementForm):
slug = 'advanced_extended_trial'
subscription_type = gettext_noop('Extended Trial')
organization_name = forms.CharField(
label=gettext_noop('Organization Name'),
max_length=BillingAccount._meta.get_field('name').max_length,
)
emails = forms.CharField(
label=gettext_noop('Partner Contact Emails'),
)
trial_length = forms.ChoiceField(
choices=[(days, "%d days" % days) for days in [15, 30, 60, 90]],
label="Trial Length",
)
def __init__(self, domain, web_user, *args, **kwargs):
super(AdvancedExtendedTrialForm, self).__init__(domain, web_user, *args, **kwargs)
self.fields['organization_name'].initial = self.autocomplete_account_name
self.fields['emails'].initial = self.current_contact_emails
self.helper = hqcrispy.HQFormHelper()
self.helper.layout = crispy.Layout(
crispy.Field('organization_name'),
crispy.Field('emails', css_class='input-xxlarge'),
crispy.Field('trial_length', data_bind='value: trialLength'),
crispy.Div(
crispy.Div(
crispy.HTML(_(
'<p><i class="fa fa-info-circle"></i> The trial will begin as soon '
'as you hit "Update" and end on <span data-bind="text: end_date"></span>. '
'On <span data-bind="text: end_date"></span> '
'the project space will be automatically paused.</p>'
)),
css_class='col-sm-offset-3 col-md-offset-2'
),
css_class='form-group'
),
*self.form_actions
)
@transaction.atomic
def process_subscription_management(self):
advanced_trial_plan_version = DefaultProductPlan.get_default_plan_version(
edition=SoftwarePlanEdition.ADVANCED, is_trial=True,
)
if self.current_subscription:
self.current_subscription.change_plan(
advanced_trial_plan_version,
account=self.next_account,
transfer_credits=self.current_subscription.account == self.next_account,
**self.subscription_default_fields
)
else:
Subscription.new_domain_subscription(
self.next_account,
self.domain,
advanced_trial_plan_version,
**self.subscription_default_fields
)
@property
def subscription_default_fields(self):
fields = super(AdvancedExtendedTrialForm, self).subscription_default_fields
fields.update({
'auto_generate_credits': False,
'date_end': datetime.date.today() + relativedelta(days=int(self.cleaned_data['trial_length'])),
'do_not_invoice': False,
'is_trial': True,
'no_invoice_reason': '',
'service_type': SubscriptionType.EXTENDED_TRIAL
})
return fields
@property
def account_name(self):
return self.cleaned_data['organization_name']
@property
def account_emails(self):
return self.cleaned_data['emails'].split(',')
class ContractedPartnerForm(InternalSubscriptionManagementForm):
slug = 'contracted_partner'
subscription_type = gettext_noop('Contracted Partner')
software_plan_edition = forms.ChoiceField(
choices=(
(SoftwarePlanEdition.STANDARD, SoftwarePlanEdition.STANDARD),
(SoftwarePlanEdition.PRO, SoftwarePlanEdition.PRO),
(SoftwarePlanEdition.ADVANCED, SoftwarePlanEdition.ADVANCED),
),
label=gettext_noop('Software Plan'),
)
fogbugz_client_name = forms.CharField(
label=gettext_noop('Fogbugz Client Name'),
max_length=BillingAccount._meta.get_field('name').max_length,
)
emails = forms.CharField(
help_text=gettext_noop(
'This is who will receive invoices if the Client exceeds the user '
'or SMS limits in their plan.'
),
label=gettext_noop('Partner Contact Emails'),
)
start_date = forms.DateField(
help_text=gettext_noop('Date the project needs access to features.'),
label=gettext_noop('Start Date'),
)
end_date = forms.DateField(
help_text=gettext_noop(
'Specify the End Date based on the Start Date plus number of '
'months of software plan in the contract with the Client.'
),
label=gettext_noop('End Date'),
)
sms_credits = forms.DecimalField(
initial=0,
label=gettext_noop('SMS Credits'),
)
user_credits = forms.IntegerField(
initial=0,
label=gettext_noop('User Credits'),
)
def __init__(self, domain, web_user, *args, **kwargs):
super(ContractedPartnerForm, self).__init__(domain, web_user, *args, **kwargs)
self.helper = hqcrispy.HQFormHelper()
self.fields['fogbugz_client_name'].initial = self.autocomplete_account_name
self.fields['emails'].initial = self.current_contact_emails
plan_edition = self.current_subscription.plan_version.plan.edition if self.current_subscription else None
if self.is_uneditable:
self.helper.layout = crispy.Layout(
hqcrispy.B3TextField('software_plan_edition', plan_edition),
hqcrispy.B3TextField('fogbugz_client_name', self.current_subscription.account.name),
hqcrispy.B3TextField('emails', self.current_contact_emails),
hqcrispy.B3TextField('start_date', self.current_subscription.date_start),
hqcrispy.B3TextField('end_date', self.current_subscription.date_end),
crispy.HTML(_(
'<p><i class="fa fa-info-circle"></i> This project is on a contracted Enterprise '
'subscription. You cannot change contracted Enterprise subscriptions here. '
'Please contact the Ops team at %(accounts_email)s to request changes.</p>' % {
'accounts_email': settings.ACCOUNTS_EMAIL,
}
))
)
elif plan_edition not in [
first for first, second in self.fields['software_plan_edition'].choices
]:
self.fields['start_date'].initial = datetime.date.today()
self.fields['end_date'].initial = datetime.date.today() + relativedelta(years=1)
self.helper.layout = crispy.Layout(
hqcrispy.B3TextField('software_plan_edition', plan_edition),
crispy.Field('software_plan_edition'),
crispy.Field('fogbugz_client_name'),
crispy.Field('emails'),
crispy.Field('start_date', css_class='date-picker'),
crispy.Field('end_date', css_class='date-picker'),
crispy.Field('sms_credits'),
crispy.Field('user_credits'),
crispy.Div(
crispy.Div(
crispy.HTML(
_('<p><i class="fa fa-info-circle"></i> '
'Clicking "Update" will set up the '
'subscription in CommCareHQ to one of our '
'standard contracted plans.<br/> If you '
'need to set up a non-standard plan, '
'please email {}.</p>').format(settings.ACCOUNTS_EMAIL)
),
css_class='col-sm-offset-3 col-md-offset-2'
),
css_class='form-group'
),
*self.form_actions
)
else:
self.fields['end_date'].initial = self.current_subscription.date_end
self.fields['software_plan_edition'].initial = plan_edition
self.helper.layout = crispy.Layout(
crispy.Field('software_plan_edition'),
crispy.Field('fogbugz_client_name'),
crispy.Field('emails'),
hqcrispy.B3TextField('start_date', self.current_subscription.date_start),
crispy.Hidden('start_date', self.current_subscription.date_start),
crispy.Field('end_date', css_class='date-picker'),
crispy.Hidden('sms_credits', 0),
crispy.Hidden('user_credits', 0),
crispy.HTML(_(
'<div class="alert alert-warning">'
'<p><strong>Are you sure you want to extend the subscription?</strong></p>'
'<p>If this project is becoming a self-service project and only paying for '
'hosting fees, please have them self-subscribe through the subscription page. '
'Please use this page only to extend the existing services contract.</p>'
'</div>'
)),
*self.form_actions
)
@transaction.atomic
def process_subscription_management(self):
new_plan_version = DefaultProductPlan.get_default_plan_version(
edition=self.cleaned_data['software_plan_edition'],
is_report_builder_enabled=True,
)
if (
self.current_subscription
and self.current_subscription.service_type == SubscriptionType.IMPLEMENTATION
and self.current_subscription.plan_version == new_plan_version
and self.current_subscription.date_start == self.cleaned_data['start_date']
):
contracted_subscription = self.current_subscription
contracted_subscription.account = self.next_account
contracted_subscription.update_subscription(
contracted_subscription.date_start,
**{k: v for k, v in self.subscription_default_fields.items() if k != 'internal_change'}
)
elif not self.current_subscription or self.cleaned_data['start_date'] > datetime.date.today():
contracted_subscription = Subscription.new_domain_subscription(
self.next_account,
self.domain,
new_plan_version,
date_start=self.cleaned_data['start_date'],
**self.subscription_default_fields
)
else:
contracted_subscription = self.current_subscription.change_plan(
new_plan_version,
transfer_credits=self.current_subscription.account == self.next_account,
account=self.next_account,
**self.subscription_default_fields
)
CreditLine.add_credit(
self.cleaned_data['sms_credits'],
feature_type=FeatureType.SMS,
subscription=contracted_subscription,
web_user=self.web_user,
reason=CreditAdjustmentReason.MANUAL,
)
CreditLine.add_credit(
self.cleaned_data['user_credits'],
feature_type=FeatureType.USER,
subscription=contracted_subscription,
web_user=self.web_user,
reason=CreditAdjustmentReason.MANUAL,
)
@property
def is_uneditable(self):
return (
self.current_subscription
and self.current_subscription.plan_version.plan.edition == SoftwarePlanEdition.ENTERPRISE
and self.current_subscription.service_type == SubscriptionType.IMPLEMENTATION
)
@property
def subscription_default_fields(self):
fields = super(ContractedPartnerForm, self).subscription_default_fields
fields.update({
'auto_generate_credits': True,
'date_end': self.cleaned_data['end_date'],
'do_not_invoice': False,
'no_invoice_reason': '',
'service_type': SubscriptionType.IMPLEMENTATION,
})
return fields
@property
def account_name(self):
return self.cleaned_data['fogbugz_client_name']
@property
def account_emails(self):
return self.cleaned_data['emails'].split(',')
def clean_end_date(self):
end_date = self.cleaned_data['end_date']
if end_date < datetime.date.today():
raise forms.ValidationError(_(
'End Date cannot be a past date.'
))
if end_date > datetime.date.today() + relativedelta(years=5):
raise forms.ValidationError(_(
'This contract is too long to be managed in this interface. '
'Please contact %(email)s to manage a contract greater than '
'5 years.'
) % {
'email': settings.ACCOUNTS_EMAIL,
})
return end_date
def clean_sms_credits(self):
return self._clean_credits(self.cleaned_data['sms_credits'], 10000, _('SMS'))
def clean_user_credits(self):
return self._clean_credits(self.cleaned_data['user_credits'], 2000, _('user'))
def _clean_credits(self, credits, max_credits, credits_name):
if credits > max_credits:
raise forms.ValidationError(_(
'You tried to add too much %(credits_name)s credit! Only '
'someone on the operations team can add that much credit. '
'Please reach out to %(email)s.'
) % {
'credits_name': credits_name,
'email': settings.ACCOUNTS_EMAIL,
})
return credits
INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS = [
ContractedPartnerForm,
DimagiOnlyEnterpriseForm,
AdvancedExtendedTrialForm,
]
class SelectSubscriptionTypeForm(forms.Form):
subscription_type = forms.ChoiceField(
choices=[
('', gettext_noop('Select a subscription type...'))
] + [
(form.slug, form.subscription_type)
for form in INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS
],
label=gettext_noop('Subscription Type'),
required=False,
)
def __init__(self, defaults=None, disable_input=False, **kwargs):
defaults = defaults or {}
super(SelectSubscriptionTypeForm, self).__init__(defaults, **kwargs)
self.helper = hqcrispy.HQFormHelper()
if defaults and disable_input:
self.helper.layout = crispy.Layout(
hqcrispy.B3TextField(
'subscription_type', {
form.slug: form.subscription_type
for form in INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS
}[defaults.get('subscription_type')]
),
)
else:
self.helper.layout = crispy.Layout(
crispy.Field(
'subscription_type',
data_bind='value: subscriptionType',
css_class="disabled"
)
)
class ManageReleasesByLocationForm(forms.Form):
app_id = forms.ChoiceField(label=gettext_lazy("Application"), choices=(), required=False)
location_id = forms.CharField(label=gettext_lazy("Location"), widget=Select(choices=[]), required=False)
version = forms.IntegerField(label=gettext_lazy('Version'), required=False, widget=Select(choices=[]))
status = forms.ChoiceField(label=gettext_lazy("Status"),
choices=(
('', gettext_lazy('Select Status')),
('active', gettext_lazy('Active')),
('inactive', gettext_lazy('Inactive'))),
required=False,
help_text=gettext_lazy("Applicable for search only"))
def __init__(self, request, domain, *args, **kwargs):
self.domain = domain
super(ManageReleasesByLocationForm, self).__init__(*args, **kwargs)
self.fields['app_id'].choices = self.app_id_choices()
if request.GET.get('app_id'):
self.fields['app_id'].initial = request.GET.get('app_id')
if request.GET.get('status'):
self.fields['status'].initial = request.GET.get('status')
self.helper = HQFormHelper()
self.helper.form_tag = False
self.helper.layout = crispy.Layout(
crispy.Field('app_id', id='app-id-search-select', css_class="hqwebapp-select2"),
crispy.Field('location_id', id='location_search_select'),
crispy.Field('version', id='version-input'),
crispy.Field('status', id='status-input'),
hqcrispy.FormActions(
crispy.ButtonHolder(
crispy.Button('search', gettext_lazy("Search"), data_bind="click: search"),
crispy.Button('clear', gettext_lazy("Clear"), data_bind="click: clear"),
Submit('submit', gettext_lazy("Add New Restriction"))
)
)
)
def app_id_choices(self):
choices = [(None, _('Select Application'))]
for app in get_brief_apps_in_domain(self.domain):
choices.append((app.id, app.name))
return choices
@cached_property
def version_build_id(self):
app_id = self.cleaned_data['app_id']
version = self.cleaned_data['version']
return get_version_build_id(self.domain, app_id, version)
def clean_app_id(self):
if not self.cleaned_data.get('app_id'):
self.add_error('app_id', _("Please select application"))
return self.cleaned_data.get('app_id')
def clean_location_id(self):
if not self.cleaned_data.get('location_id'):
self.add_error('location_id', _("Please select location"))
return self.cleaned_data.get('location_id')
def clean_version(self):
if not self.cleaned_data.get('version'):
self.add_error('version', _("Please select version"))
return self.cleaned_data.get('version')
def clean(self):
app_id = self.cleaned_data.get('app_id')
version = self.cleaned_data.get('version')
if app_id and version:
try:
self.version_build_id
except BuildNotFoundException as e:
self.add_error('version', e)
def save(self):
location_id = self.cleaned_data['location_id']
version = self.cleaned_data['version']
app_id = self.cleaned_data['app_id']
try:
AppReleaseByLocation.update_status(self.domain, app_id, self.version_build_id, location_id,
version, active=True)
except ValidationError as e:
return False, ','.join(e.messages)
return True, None
class BaseManageReleasesByAppProfileForm(forms.Form):
app_id = forms.ChoiceField(label=gettext_lazy("Application"), choices=(), required=True)
version = forms.IntegerField(label=gettext_lazy('Version'), required=False, widget=Select(choices=[]))
def __init__(self, request, domain, *args, **kwargs):
self.request = request
self.domain = domain
super(BaseManageReleasesByAppProfileForm, self).__init__(*args, **kwargs)
self.fields['app_id'].choices = self.app_id_choices()
self.helper = HQFormHelper()
self.helper.form_tag = False
self.helper.layout = crispy.Layout(
crispy.Fieldset(
"",
*self.form_fields()
),
hqcrispy.FormActions(
crispy.ButtonHolder(
*self._buttons()
)
)
)
def app_id_choices(self):
choices = [(None, _('Select Application'))]
for app in get_brief_apps_in_domain(self.domain):
choices.append((app.id, app.name))
return choices
def form_fields(self):
return [
crispy.Field('app_id', css_class="hqwebapp-select2 app-id-search-select"),
crispy.Field('version', css_class='version-input'),
]
@staticmethod
def _buttons():
raise NotImplementedError
class SearchManageReleasesByAppProfileForm(BaseManageReleasesByAppProfileForm):
app_build_profile_id = forms.ChoiceField(label=gettext_lazy("Build Profile"), choices=(),
required=False)
status = forms.ChoiceField(label=gettext_lazy("Status"),
choices=(
('', gettext_lazy('Select Status')),
('active', gettext_lazy('Active')),
('inactive', gettext_lazy('Inactive'))),
required=False)
def __init__(self, request, domain, *args, **kwargs):
super(SearchManageReleasesByAppProfileForm, self).__init__(request, domain, *args, **kwargs)
if request.GET.get('app_id'):
self.fields['app_id'].initial = request.GET.get('app_id')
if request.GET.get('status'):
self.fields['status'].initial = request.GET.get('status')
def form_fields(self):
form_fields = super(SearchManageReleasesByAppProfileForm, self).form_fields()
form_fields.extend([
crispy.Field('app_build_profile_id', css_class="hqwebapp-select2 app-build-profile-id-select"),
crispy.Field('status', id='status-input')
])
return form_fields
@staticmethod
def _buttons():
return [
crispy.Button('search', gettext_lazy("Search"), data_bind="click: search",
css_class='btn-primary'),
crispy.Button('clear', gettext_lazy("Clear"), data_bind="click: clear"),
]
class CreateManageReleasesByAppProfileForm(BaseManageReleasesByAppProfileForm):
build_profile_id = forms.CharField(label=gettext_lazy('Build Profile'),
required=True, widget=SelectMultiple(choices=[]),)
def save(self):
success_messages = []
error_messages = []
for build_profile_id in self.cleaned_data['build_profile_id']:
try:
LatestEnabledBuildProfiles.update_status(self.build, build_profile_id,
active=True)
success_messages.append(_('Restriction for profile {profile} set successfully.').format(
profile=self.build.build_profiles[build_profile_id]['name'],
))
except ValidationError as e:
error_messages.append(_('Restriction for profile {profile} failed: {message}').format(
profile=self.build.build_profiles[build_profile_id]['name'],
message=', '.join(e.messages)
))
return error_messages, success_messages
@cached_property
def build(self):
return get_app(self.domain, self.version_build_id)
@cached_property
def version_build_id(self):
app_id = self.cleaned_data['app_id']
version = self.cleaned_data['version']
return get_version_build_id(self.domain, app_id, version)
def form_fields(self):
form_fields = super(CreateManageReleasesByAppProfileForm, self).form_fields()
form_fields.extend([
crispy.Field('build_profile_id', id='build-profile-id-input')
])
return form_fields
@staticmethod
def _buttons():
return [Submit('submit', gettext_lazy("Add New Restriction"), css_class='btn-primary')]
def clean(self):
if self.cleaned_data.get('version'):
try:
self.version_build_id
except BuildNotFoundException as e:
self.add_error('version', e)
def clean_build_profile_id(self):
return self.data.getlist('build_profile_id')
def clean_version(self):
# ensure value is present for a post request
if not self.cleaned_data.get('version'):
self.add_error('version', _("Please select version"))
return self.cleaned_data.get('version')
| bsd-3-clause | eb66487f796622f5429f4aba77c547e3 | 39.842024 | 114 | 0.587148 | 4.255645 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/time_based/migrations/0051_auto_20210114_1055.py | 1 | 2436 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2021-01-14 09:55
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('time_based', '0050_auto_20210112_1515'),
]
operations = [
migrations.AlterModelOptions(
name='dateactivityslot',
options={'permissions': (('api_read_dateactivityslot', 'Can view on date activity slots through the API'), ('api_add_dateactivityslot', 'Can add on a date activity slots through the API'), ('api_change_dateactivityslot', 'Can change on a date activity slots through the API'), ('api_delete_dateactivityslot', 'Can delete on a date activity slots through the API'), ('api_read_own_dateactivityslot', 'Can view own on a date activity slots through the API'), ('api_add_own_dateactivityslot', 'Can add own on a date activity slots through the API'), ('api_change_own_dateactivityslot', 'Can change own on a date activity slots through the API'), ('api_delete_own_dateactivityslot', 'Can delete own on a date activity slots through the API')), 'verbose_name': 'slot', 'verbose_name_plural': 'slots'},
),
migrations.AlterModelOptions(
name='periodactivityslot',
options={'permissions': (('api_read_periodactivityslot', 'Can view over a period activity slots through the API'), ('api_add_periodactivityslot', 'Can add over a period activity slots through the API'), ('api_change_periodactivityslot', 'Can change over a period activity slots through the API'), ('api_delete_periodactivityslot', 'Can delete over a period activity slots through the API'), ('api_read_own_periodactivityslot', 'Can view own over a period activity slots through the API'), ('api_add_own_periodactivityslot', 'Can add own over a period activity slots through the API'), ('api_change_own_periodactivityslot', 'Can change own over a period activity slots through the API'), ('api_delete_own_periodactivityslot', 'Can delete own over a period activity slots through the API')), 'verbose_name': 'slot', 'verbose_name_plural': 'slots'},
),
migrations.AlterField(
model_name='slotparticipant',
name='slot',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='slot_participants', to='time_based.DateActivitySlot'),
),
]
| bsd-3-clause | 6f40eb68c0bd6ed58ec06df11f5df3cd | 83 | 858 | 0.706076 | 4.114865 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/app_manager/app_schemas/form_metadata.py | 1 | 19285 | from collections import OrderedDict, defaultdict
from jsonobject import (
BooleanProperty,
DictProperty,
JsonObject,
ListProperty,
ObjectProperty,
StringProperty,
)
from corehq.apps.app_manager.app_schemas.app_case_metadata import (
AppCaseMetadata,
FormQuestionResponse,
LoadSaveProperty,
)
from corehq.apps.app_manager.exceptions import XFormException
REMOVED = 'removed'
ADDED = 'added'
CHANGED = 'changed'
DIFF_STATES = (REMOVED, ADDED, CHANGED)
QUESTION_ATTRIBUTES = (
'label', 'translations', 'type', 'value', 'options', 'calculate',
'relevant', 'required', 'comment', 'setvalue', 'constraint',
'load_properties', 'save_properties'
)
FORM_ATTRIBUTES = (
'name', 'short_comment', 'form_filter'
)
MODULE_ATTRIBUTES = (
'name', 'short_comment', 'module_filter'
)
class _Change(JsonObject):
action = StringProperty(choices=DIFF_STATES)
old_value = StringProperty()
new_value = StringProperty()
class _TranslationChange(_Change):
old_value = DictProperty()
new_value = DictProperty()
class _QuestionDiff(JsonObject):
question = ObjectProperty(_Change)
label = ObjectProperty(_Change)
translations = ObjectProperty(_Change)
type = ObjectProperty(_Change)
value = ObjectProperty(_Change)
calculate = ObjectProperty(_Change)
relevant = ObjectProperty(_Change)
required = ObjectProperty(_Change)
comment = ObjectProperty(_Change)
setvalue = ObjectProperty(_Change)
constraint = ObjectProperty(_Change)
options = DictProperty() # {option: _Change}
load_properties = DictProperty() # {case_type: {property: _Change}}
save_properties = DictProperty() # {case_type: {property: _Change}}
class _FormDiff(JsonObject):
form = ObjectProperty(_Change)
name = ObjectProperty(_Change)
short_comment = ObjectProperty(_Change)
form_filter = ObjectProperty(_Change)
contains_changes = BooleanProperty(default=False)
class _ModuleDiff(JsonObject):
module = ObjectProperty(_Change)
name = ObjectProperty(_Change)
short_comment = ObjectProperty(_Change)
module_filter = ObjectProperty(_Change)
contains_changes = BooleanProperty(default=False)
class _FormMetadataQuestion(FormQuestionResponse):
form_id = StringProperty()
load_properties = ListProperty(LoadSaveProperty)
save_properties = ListProperty(LoadSaveProperty)
changes = ObjectProperty(_QuestionDiff)
class _FormMetadata(JsonObject):
unique_id = StringProperty()
module_uid = StringProperty()
name = DictProperty()
short_comment = StringProperty()
action_type = StringProperty()
form_filter = StringProperty()
questions = ListProperty(_FormMetadataQuestion)
error = DictProperty()
changes = ObjectProperty(_FormDiff)
class _ModuleMetadata(JsonObject):
unique_id = StringProperty()
name = DictProperty()
short_comment = StringProperty()
module_type = StringProperty()
is_surveys = BooleanProperty()
module_filter = StringProperty()
forms = ListProperty(_FormMetadata)
changes = ObjectProperty(_ModuleDiff)
class _AppSummaryFormDataGenerator(object):
def __init__(self, domain, app, include_shadow_forms=True):
self.domain = domain
self.app = app
self.include_shadow_forms = include_shadow_forms
self.errors = []
self._seen_save_to_case = defaultdict(list)
try:
self._case_meta = self.app.get_case_metadata()
except XFormException:
self._case_meta = AppCaseMetadata()
def generate(self):
return [self._compile_module(module) for module in self.app.get_modules()], self.errors
def _compile_module(self, module):
return _ModuleMetadata(**{
'unique_id': module.unique_id,
'name': module.name,
'short_comment': module.short_comment,
'module_type': module.module_type,
'is_surveys': module.is_surveys,
'module_filter': module.module_filter,
'forms': [self._compile_form(form) for form in self._get_pertinent_forms(module)],
})
def _get_pertinent_forms(self, module):
from corehq.apps.app_manager.models import ShadowForm
if not self.include_shadow_forms:
return [form for form in module.get_forms() if not isinstance(form, ShadowForm)]
return module.get_forms()
def _compile_form(self, form):
form_meta = _FormMetadata(**{
'unique_id': form.unique_id,
'module_uid': form.get_module().unique_id,
'name': form.name,
'short_comment': form.short_comment,
'action_type': form.get_action_type(),
'form_filter': form.form_filter,
})
try:
form_meta.questions = self._sort_questions_by_group(form)
except XFormException as exception:
form_meta.error = {
'details': str(exception),
}
self.errors.append(form_meta)
return form_meta
def _sort_questions_by_group(self, form):
questions_by_path = OrderedDict(
(question.value, question)
for raw_question in form.get_questions(self.app.langs, include_triggers=True,
include_groups=True, include_translations=True,
include_fixtures=True)
for question in self._get_question(form.unique_id, raw_question)
)
for path, question in questions_by_path.items():
parent = question.group or question.repeat
if parent:
questions_by_path[parent].children.append(question)
return [question for question in questions_by_path.values()
if not question.group and not question.repeat]
def _get_question(self, form_unique_id, question):
if self._needs_save_to_case_root_node(question, form_unique_id):
yield self._save_to_case_root_node(form_unique_id, question)
yield self._serialized_question(form_unique_id, question)
def _needs_save_to_case_root_node(self, question, form_unique_id):
return (
self._is_save_to_case(question)
and self._save_to_case_root_path(question) not in self._seen_save_to_case[form_unique_id]
)
@staticmethod
def _is_save_to_case(question):
return '/case/' in question['value']
@staticmethod
def _save_to_case_root_path(question):
return question['value'].split('/case/')[0]
def _save_to_case_root_node(self, form_unique_id, question):
"""Add an extra node with the root path of the save to case to attach case properties to
"""
question_path = self._save_to_case_root_path(question)
response = _FormMetadataQuestion(**{
"form_id": form_unique_id,
"label": question_path,
"tag": question_path,
"value": question_path,
"repeat": question['repeat'],
"group": question['group'],
"type": 'SaveToCase',
"hashtagValue": question['hashtagValue'],
"relevant": None,
"required": False,
"comment": None,
"constraint": None,
"load_properties": self._case_meta.get_load_properties(form_unique_id, question_path),
"save_properties": self._case_meta.get_save_properties(form_unique_id, question_path)
})
self._seen_save_to_case[form_unique_id].append(question_path)
return response
def _serialized_question(self, form_unique_id, question):
response = _FormMetadataQuestion(question)
response.form_id = form_unique_id
response.load_properties = self._case_meta.get_load_properties(form_unique_id, question['value'])
response.save_properties = self._case_meta.get_save_properties(form_unique_id, question['value'])
if self._is_save_to_case(question):
response.type = 'SaveToCase'
return response
def get_app_summary_formdata(domain, app, include_shadow_forms=True):
"""Returns formdata formatted for the app summary
"""
return _AppSummaryFormDataGenerator(domain, app, include_shadow_forms).generate()
class _AppDiffGenerator(object):
def __init__(self, app1, app2):
self.first = get_app_summary_formdata(app1.domain, app1)[0]
self.second = get_app_summary_formdata(app2.domain, app2)[0]
self._first_modules_by_id = {}
self._first_forms_by_id = {}
self._first_questions_by_form_id = defaultdict(dict)
self._second_modules_by_id = {}
self._second_forms_by_id = {}
self._second_questions_by_form_id = defaultdict(dict)
self._populate_id_caches()
self._mark_removed_items()
self._mark_retained_items()
def _populate_id_caches(self):
def add_question_to_id_cache(id_cache, form_id, question_path, question):
for child in question.children:
add_question_to_id_cache(id_cache, form_id, child['value'], child)
id_cache[form_id][question_path] = question
for module in self.first:
self._first_modules_by_id[module['unique_id']] = module
for form in module['forms']:
self._first_forms_by_id[form['unique_id']] = form
for question in form['questions']:
add_question_to_id_cache(self._first_questions_by_form_id,
form['unique_id'], question['value'], question)
for module in self.second:
self._second_modules_by_id[module['unique_id']] = module
for form in module['forms']:
self._second_forms_by_id[form['unique_id']] = form
for question in form['questions']:
add_question_to_id_cache(self._second_questions_by_form_id,
form['unique_id'], question['value'], question)
def _mark_removed_items(self):
"""Finds all removed modules, forms, and questions from the second app
"""
for module in self.first:
if module['unique_id'] not in self._second_modules_by_id:
self._mark_item_removed(module, 'module')
continue
for form in module['forms']:
second_form = self._second_forms_by_id.get(form['unique_id'])
if not second_form or form.module_uid != second_form.module_uid:
# Also show moved form as deleted and re-added.
self._mark_item_removed(form, 'form')
continue
self._mark_removed_questions(form['unique_id'], form['questions'])
def _mark_removed_questions(self, unique_id, questions):
for question in questions:
self._mark_removed_questions(unique_id, question.children)
if question.value not in self._second_questions_by_form_id[unique_id]:
self._mark_item_removed(question, 'question')
def _mark_retained_items(self):
"""Looks through each module and form that was not removed in the second app
and marks changes and additions
"""
for second_module in self.second:
try:
first_module = self._first_modules_by_id[second_module['unique_id']]
for attribute in MODULE_ATTRIBUTES:
self._mark_attribute(first_module, second_module, attribute)
self._mark_forms(second_module['forms'])
except KeyError:
self._mark_item_added(second_module, 'module')
def _mark_attribute(self, first_item, second_item, attribute):
translation_changed = (self._is_translatable_property(first_item[attribute],
second_item[attribute])
and (set(second_item[attribute].items())
- set(first_item[attribute].items())))
attribute_changed = first_item[attribute] != second_item[attribute]
attribute_added = second_item[attribute] and not first_item[attribute]
attribute_removed = first_item[attribute] and not second_item[attribute]
if attribute_changed or translation_changed:
self._mark_item_changed(first_item, second_item, attribute)
if attribute_added:
self._mark_item_added(second_item, attribute)
if attribute_removed:
self._mark_item_removed(first_item, attribute)
@staticmethod
def _is_translatable_property(first_property, second_property):
return (isinstance(first_property, dict) and isinstance(second_property, dict))
def _mark_forms(self, second_forms):
for second_form in second_forms:
first_form = self._first_forms_by_id.get(second_form['unique_id'])
if not first_form or first_form.module_uid != second_form.module_uid:
# Also show moved form as deleted and re-added.
self._mark_item_added(second_form, 'form')
else:
first_form = self._first_forms_by_id[second_form['unique_id']]
for attribute in FORM_ATTRIBUTES:
self._mark_attribute(first_form, second_form, attribute)
self._mark_questions(second_form['unique_id'], second_form['questions'])
def _mark_questions(self, form_id, second_questions):
for second_question in second_questions:
self._mark_questions(form_id, second_question.children)
try:
question_path = second_question['value']
first_question = self._first_questions_by_form_id[form_id][question_path]
self._mark_question_attributes(first_question, second_question)
except KeyError:
self._mark_item_added(second_question, 'question')
def _mark_question_attributes(self, first_question, second_question):
for attribute in QUESTION_ATTRIBUTES:
if attribute == 'options':
self._mark_options(first_question, second_question)
elif attribute in ('save_properties', 'load_properties'):
self._mark_case_properties(first_question, second_question, attribute)
else:
self._mark_attribute(first_question, second_question, attribute)
def _mark_options(self, first_question, second_question):
first_option_values = {option.value for option in first_question.options}
second_option_values = {option.value for option in second_question.options}
removed_options = first_option_values - second_option_values
added_options = second_option_values - first_option_values
potentially_changed_options = first_option_values & second_option_values
first_options_by_value = {option.value: option.label for option in first_question.options}
second_options_by_value = {option.value: option.label for option in second_question.options}
changed_options = [
option for option in potentially_changed_options
if first_options_by_value[option] != second_options_by_value[option]
]
for removed_option in removed_options:
first_question.changes['options'][removed_option] = _Change(type=REMOVED).to_json()
for added_option in added_options:
second_question.changes['options'][added_option] = _Change(type=ADDED).to_json()
for changed_option in changed_options:
first_question.changes['options'][changed_option] = _Change(
type=CHANGED,
old_value=first_options_by_value[changed_option]
).to_json()
second_question.changes['options'][changed_option] = _Change(
type=CHANGED,
new_value=second_options_by_value[changed_option]
).to_json()
if removed_options or added_options or changed_options:
self._set_contains_changes(first_question)
self._set_contains_changes(second_question)
def _mark_case_properties(self, first_question, second_question, attribute):
first_props = {(prop.case_type, prop.property) for prop in first_question[attribute]}
second_props = {(prop.case_type, prop.property) for prop in second_question[attribute]}
removed_properties = first_props - second_props
added_properties = second_props - first_props
for removed_property in removed_properties:
first_question.changes[attribute][removed_property[0]] = {
removed_property[1]: _Change(type=REMOVED).to_json()
}
for added_property in added_properties:
second_question.changes[attribute][added_property[0]] = {
added_property[1]: _Change(type=ADDED).to_json()
}
if removed_properties or added_properties:
self._set_contains_changes(first_question)
self._set_contains_changes(second_question)
def _mark_item_removed(self, item, key):
self._set_contains_changes(item)
try:
old_value = item[key]
except KeyError:
old_value = None
if isinstance(old_value, dict):
change_class = _TranslationChange
else:
change_class = _Change
item.changes[key] = change_class(type=REMOVED, old_value=old_value)
def _mark_item_added(self, item, key):
self._set_contains_changes(item)
try:
new_value = item[key]
except KeyError:
new_value = None
if isinstance(new_value, dict):
change_class = _TranslationChange
else:
change_class = _Change
item.changes[key] = change_class(type=ADDED, new_value=new_value)
def _mark_item_changed(self, first_item, second_item, key):
self._set_contains_changes(first_item)
self._set_contains_changes(second_item)
if self._is_translatable_property(first_item[key], second_item[key]):
change_class = _TranslationChange
else:
change_class = _Change
change = change_class(type=CHANGED, old_value=first_item[key], new_value=second_item[key])
first_item.changes[key] = change
second_item.changes[key] = change
def _set_contains_changes(self, item):
"""For forms and modules, set contains_changes to True
For questions, set the form's contains_changes attribute to True
This is used for the "View Changed Items" filter in the UI
"""
try:
item.changes.contains_changes = True
if isinstance(item, _FormMetadataQuestion):
for form in [self._first_forms_by_id.get(item['form_id']),
self._second_forms_by_id.get(item['form_id'])]:
if form:
form.changes.contains_changes = True
except AttributeError:
pass
def get_app_diff(app1, app2):
diff = _AppDiffGenerator(app1, app2)
return diff.first, diff.second
| bsd-3-clause | bb18ffe96cb574c00391a2a6573912ef | 39.77167 | 105 | 0.61789 | 4.071142 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/app_manager/migrations/0003_auto_20190326_0853.py | 1 | 1407 | # Generated by Django 1.11.20 on 2019-03-26 08:53
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('locations', '0017_locationrelation_last_modified'),
('app_manager', '0002_latestenabledbuildprofiles'),
]
operations = [
migrations.CreateModel(
name='AppReleaseByLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('domain', models.CharField(max_length=255)),
('app_id', models.CharField(max_length=255)),
('build_id', models.CharField(max_length=255)),
('version', models.IntegerField()),
('active', models.BooleanField(default=True)),
('activated_on', models.DateTimeField(blank=True, null=True)),
('deactivated_on', models.DateTimeField(blank=True, null=True)),
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
to='locations.SQLLocation', to_field='location_id')),
],
),
migrations.AlterUniqueTogether(
name='appreleasebylocation',
unique_together=set([('domain', 'build_id', 'location', 'version')]),
),
]
| bsd-3-clause | 1c82a294b2df4ef9ad1c92de359dd57a | 40.382353 | 114 | 0.576404 | 4.438486 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/time_based/migrations/0023_auto_20201103_1510.py | 1 | 1861 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-05-24 09:59
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.utils.utils import update_group_permissions
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
def add_group_permissions(apps, schema_editor):
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
group_perms = {
'Staff': {
'perms': (
'add_onadateapplication', 'change_onadateapplication',
'delete_onadateapplication',
'add_periodapplication', 'change_periodapplication',
'delete_periodapplication',
)
},
'Anonymous': {
'perms': (
'api_read_onadateapplication',
'api_read_periodapplication',
) if not properties.CLOSED_SITE else ()
},
'Authenticated': {
'perms': (
'api_read_onadateapplication', 'api_add_onadateapplication',
'api_change_own_onadateapplication', 'api_delete_own_onadateapplication',
'api_read_periodapplication', 'api_add_periodapplication',
'api_change_own_periodapplication', 'api_delete_own_periodapplication',
)
}
}
update_group_permissions('time_based', group_perms, apps)
class Migration(migrations.Migration):
dependencies = [
('time_based', '0022_auto_20201102_1559'),
]
operations = [
migrations.RunPython(
add_group_permissions,
migrations.RunPython.noop
)
]
| bsd-3-clause | 2b19a9f06f92279b52572820375b4a27 | 31.086207 | 93 | 0.576034 | 4.409953 | false | false | false | false |
dimagi/commcare-hq | corehq/blobs/management/commands/blob_storage_report.py | 1 | 16087 | import csv
import logging
import re
import sys
from collections import defaultdict, OrderedDict
from contextlib import contextmanager
from functools import partial
from itertools import chain
from six.moves.urllib.parse import unquote
from couchdbkit.exceptions import ResourceNotFound
from django.core.management import BaseCommand
from corehq.apps.hqwebapp.doc_lookup import get_db_from_db_name
from corehq.blobs import get_blob_db
from corehq.blobs.exceptions import NotFound
from corehq.util.decorators import change_log_level
from corehq.util.log import with_progress_bar
USAGE = "Usage: ./manage.py blob_storage_report [options] FILE [FILE ...]"
class Command(BaseCommand):
"""Report blob storage change over time using Riak CS access logs
Usage: ./manage.py blob_storage_report [options] FILE [FILE ...]
"""
help = USAGE
def add_arguments(self, parser):
parser.add_argument(
"files",
nargs="+",
help="Riak CS access logs. Use - for stdin.",
)
parser.add_argument(
"-o", "--output-file",
default=sys.stdout,
help="Write output to file.",
)
parser.add_argument(
"--csv",
action="store_true",
default=False,
dest="write_csv",
help="Output report in CSV format.",
)
parser.add_argument(
"-s", "--sample-size",
type=int,
default=500,
help="Sample size.",
)
parser.add_argument(
"-d", "--default-only",
action="store_true",
help="Ignore all except the default bucket.",
)
parser.add_argument(
"--list-blob-ids",
action="store_true",
help="List blob ids in output (--output-file recommended).",
)
@change_log_level('boto3', logging.WARNING)
@change_log_level('botocore', logging.WARNING)
def handle(self, files, output_file, write_csv, sample_size, default_only,
list_blob_ids, **options):
print("WARNING this report has not been adapted to the new blob db "
"metadata API, and therefore is probably broken.")
print("Loading PUT requests from access logs...", file=sys.stderr)
data = accumulate_put_requests(files)
sizes, samples_by_type = get_blob_sizes(data, sample_size, default_only)
with make_row_writer(output_file, write_csv) as write:
report_blobs_by_type(data, sizes, samples_by_type, write)
report_blob_sizes(data, sizes, samples_by_type, write)
report_blob_sizes(data, sizes, samples_by_type, write, summarize=True)
if list_blob_ids:
report_blob_ids(sizes, write)
def report_blobs_by_type(data, sizes, samples_by_type, write):
"""report on number of new blobs by blob bucket"""
assert len(data) < 100, len(data)
bucket_missing = defaultdict(int)
type_missing = defaultdict(int)
for domain_sizes in sizes.values():
for size in domain_sizes:
if size.length is UNKNOWN:
bucket_missing[size.bucket] += 1
type_missing[size.doc_type] += 1
write(["BUCKET", "BLOB COUNT", "NOT FOUND"])
for bucket, key_list in sorted(data.items()):
if bucket not in samples_by_type:
continue
write([bucket, len(key_list), bucket_missing.get(bucket, "")])
for doc_type, n_samples in sorted(samples_by_type[bucket].items()):
write([" " + doc_type, n_samples, type_missing.get(doc_type, "")])
write([])
def report_blob_sizes(data, sizes, samples_by_type, write, summarize=False):
"""report blob type, number of blobs, total size grouped by domain"""
def iter_headers(by_domain):
for domain in by_domain:
yield domain
yield "MEAN"
yield "COUNT"
def iter_sizes(doc_type, domain_sizes, totals):
for domain in by_domain:
blob_sizes = domain_sizes[domain]
numerics = [s.length for s in blob_sizes if s.length is not UNKNOWN]
if numerics:
bucket = blob_sizes[0].bucket
bucket_samples = sum(samples_by_type[bucket].values())
mean_size = int(mean(numerics))
est_size = (
mean_size *
len(data[bucket]) * # number blobs in bucket
(len(numerics) / bucket_samples) # porportion of samples
)
else:
mean_size = 0
est_size = 0
found_of_total = "{}/{}".format(len(numerics), len(blob_sizes))
totals[domain]["size"] += est_size
totals[domain]["found"] += len(numerics)
totals[domain]["count"] += len(blob_sizes)
yield sizeof_fmt(est_size)
yield sizeof_fmt(mean_size)
yield found_of_total if blob_sizes else "-" # FOUND/TOTAL
def iter_totals(totals):
yield sizeof_fmt(sum(t["size"] for t in totals.values()))
for domain in by_domain:
yield sizeof_fmt(totals[domain]["size"])
yield ""
yield "{found}/{count}".format(**totals[domain])
def sumlens(item):
return -sum(s.length for s in item[1] if s.length is not UNKNOWN)
if summarize:
sizes = {"EST SIZE": list(chain.from_iterable(sizes.values()))}
# get top five domains + all others combined
OTHER = "OTHER"
by_domain = OrderedDict()
by_type = defaultdict(lambda: defaultdict(list))
for i, (domain, domain_sizes) in enumerate(sorted(sizes.items(), key=sumlens)):
if i < 5:
by_domain[domain] = domain_sizes
else:
if i == 5:
by_domain[OTHER] = []
by_domain[OTHER].extend(domain_sizes)
domain = OTHER
for size in domain_sizes:
by_type[size.doc_type][domain].append(size)
def key(item):
return sum(sumlens(["ignored", sizes]) for sizes in item[1].values())
totals = {domain: {
"size": 0,
"found": 0,
"count": 0,
} for domain in by_domain}
if summarize:
write(["SUMMARY"])
else:
write(["Storage use based on sampled estimates (may be inaccurate)"])
write(["DOC_TYPE"] + list(iter_headers(by_domain)))
for doc_type, domain_sizes in sorted(by_type.items(), key=key):
write([doc_type] + list(iter_sizes(doc_type, domain_sizes, totals)))
write(["---"] + ["---" for x in iter_headers(by_domain)])
write(list(iter_totals(totals)))
write([])
def report_blob_ids(sizes, write):
def iter_sizes(sizes):
for domain_sizes in sizes.values():
for size in domain_sizes:
yield size
def key(size):
length = 0 if size.length is UNKNOWN else size.length
return size.bucket, size.domain, size.doc_type, -length
write("BUCKET DOMAIN DOC_TYPE SIZE BLOB_ID".split())
for size in sorted(iter_sizes(sizes), key=key):
write([
size.bucket,
size.domain,
size.doc_type,
sizeof_fmt(size.length) if size.length is not UNKNOWN else "",
size.blob_id,
])
write([])
def accumulate_put_requests(files):
# data[blobdb_bucket] = set([tuple_of_blob_id_parts, ...])
data = defaultdict(set)
for filepath in files:
if filepath == "-":
load_puts(sys.stdin, data)
else:
with open(filepath, "r", encoding='utf-8') as fileobj:
load_puts(fileobj, data)
return data
def get_blob_sizes(data, sample_size, default_only):
# get domain, blob type, and blob size for each put request (or a sample of them)
def iter_samples(bucket, keys_list):
for i, keys in enumerate(keys_list):
if i >= sample_size:
break
get_blob_size = SIZE_GETTERS.get(bucket)
if get_blob_size is not None:
size = get_blob_size(bucket, *keys)
else:
size = get_default_blob_size(bucket, "/".join(keys))
yield size
sizes = defaultdict(list) # {domain: [<BlobSize>, ...], ...}
samples_by_type = {} # {bucket: {<doc_type>: <n_samples>, ...}, ...}
with_progress = partial(
with_progress_bar,
oneline="concise",
stream=sys.stderr,
step=1,
)
for bucket, keys_list in sorted(data.items()):
counts = defaultdict(int) # {<doc_type>: <n_samples>, ...}
length = min(sample_size, len(keys_list))
if default_only:
if bucket != "_default":
continue
keys_list = (k for k in keys_list if not k[0].startswith("restore-response-"))
samples = iter_samples(bucket, keys_list)
for size in with_progress(samples, length, prefix=bucket):
sizes[size.domain].append(size)
counts[size.doc_type] += 1
samples_by_type[bucket] = dict(counts)
print("", file=sys.stderr)
return sizes, samples_by_type
def get_couch_blob_size(db_name, bucket, doc_id, blob_id):
doc = lookup_doc(doc_id, db_name)
key = "/".join([doc_id, blob_id])
if doc is None:
return get_default_blob_size(bucket, key)
domain = doc.get("domain", UNKNOWN)
doc_type = doc.get("doc_type", UNKNOWN)
for blob in doc["external_blobs"].values():
if blob_id == blob["id"]:
try:
length = blob["length"]
break
except KeyError:
pass
else:
size = get_default_blob_size(bucket, key)
length = size.length
return BlobSize(domain, doc_type, length, bucket, key)
def get_form_blob_size(bucket, attachment_id, subkey):
# can't get domain: cannot get attachment metadata from blob id because
# the metadata is sharded by form_id, which we do not have
blob_id = "/".join([attachment_id, subkey])
size = get_default_blob_size(bucket, blob_id)
return BlobSize(UNKNOWN, "form", size.length, bucket, blob_id)
def get_default_blob_size(bucket, blob_id):
try:
length = get_blob_db().size(blob_id, bucket)
except NotFound:
length = UNKNOWN
if blob_id.startswith("restore-response-"):
return BlobSize(UNKNOWN, "restore", length, bucket, blob_id)
return BlobSize(UNKNOWN, bucket, length, bucket, blob_id)
UNKNOWN = "(unknown)"
NOTFOUND = "(notfound)"
SIZE_GETTERS = {
"_default": get_default_blob_size,
"commcarehq": lambda *args: get_couch_blob_size("commcarehq", *args),
"commcarehq__apps": lambda *args: get_couch_blob_size("apps", *args),
"commcarehq__meta": lambda *args: get_couch_blob_size("meta", *args),
"form": get_form_blob_size,
}
class BlobSize(object):
def __init__(self, domain, doc_type, length, bucket, blob_id):
self.domain = domain
self.doc_type = doc_type
self.length = length
self.bucket = bucket
self.blob_id = blob_id
def lookup_doc(doc_id, db_name):
db = get_db_from_db_name(db_name)
try:
return db.get(doc_id)
except ResourceNotFound:
return None
def load_puts(fileobj, data):
put_expr = re.compile(r"PUT /buckets/blobdb/objects/(.*) HTTP/1\.")
for line in fileobj:
match = put_expr.search(line)
if not match:
continue
blob_path = unquote(match.group(1))
parts = blob_path.split("/")
if parts[0].startswith(("form", "commcarehq")):
if len(parts) > 3 and parts[3] == "uploads":
parts = parts[:3]
assert len(parts) == 3, parts
data[parts[0]].add((parts[1], parts[2]))
else:
if len(parts) > 2 and parts[2] == "uploads":
parts = parts[:2]
assert len(parts) == 2, parts
data[parts[0]].add((parts[1],))
@contextmanager
def make_row_writer(output_file, write_csv):
def make_row_widths_writer(rows, output_file):
widths = [len(str(item)) for item in rows[0]]
for row in rows[1:]:
for i, item in enumerate(row):
length = len(str(item))
if length > widths[i]:
widths[i] = length
template = " ".join(
"{%s:%s%s}" % (i, (">" if i else "<"), w)
for i, w in enumerate(widths)
)
def write(row):
print(template.format(*row), file=output_file)
return write
if output_file != sys.stdout:
output_file = open(output_file, "w", encoding='utf-8')
if write_csv:
writer = csv.writer(output_file, dialect="excel")
write = writer.writerow
else:
def write(row):
if row:
if len(row) == 1 and not pending:
print(row[0], file=output_file)
else:
pending.append(row)
else:
if pending:
write = make_row_widths_writer(pending, output_file)
for row in pending:
write(row)
del pending[:]
print("", file=output_file)
pending = []
try:
yield write
finally:
if pending:
write([])
assert not pending, pending
if output_file != sys.stdout:
output_file.close()
def sizeof_fmt(num):
# copied/slightly modified from corehq.couchapps.dbaccessors
if not num:
return ''
for x in ['B', 'KB', 'MB', 'GB', 'TB']:
if num < 1024.0:
return "%3.1f %s" % (num, x)
num /= 1024.0
return "%3.1f %s" % (num, 'PB')
## https://stackoverflow.com/a/27758326/10840
def mean(data):
"""Return the sample arithmetic mean of data."""
n = len(data)
if n < 1:
raise ValueError('mean requires at least one data point')
return sum(data) / n
#def _ss(data):
# """Return sum of square deviations of sequence data."""
# c = mean(data)
# ss = sum((x-c) ** 2 for x in data)
# return ss
#
#def stddev(data, ddof=0):
# """Calculates the population standard deviation
# by default; specify ddof=1 to compute the sample
# standard deviation.
# """
# n = len(data)
# if n < 2:
# raise ValueError('variance requires at least two data points')
# ss = _ss(data)
# pvar = ss / (n - ddof)
# return pvar ** 0.5
#
#
#def get_sample_size(population_size, samples, z_score=1.96, error_margin=0.05):
# """Get sample size needed to calculate a meaningful mean
#
# This function must be called multiple times to determine a suitable
# meaningful sample size. For example, the first time it is called it
# will return 100 or the population size, whichever is less. If the
# population size is very large this will probably have little value.
# Subsequent calls with the obtained samples should refine the result,
# assuming the samples have a reasonably random distribution.
#
# Sources:
# https://www.surveymonkey.com/mp/sample-size-calculator/
# https://en.wikipedia.org/wiki/Sample_size_determination
# http://courses.wcupa.edu/rbove/Berenson/10th%20ed%20CD-ROM%20topics/section8_7.pdf
#
# :param population_size: Total number of items being sampled.
# :param samples: List of samples already obtained (may be empty).
# :param z_score: Z-score for confidence level.
# :param error_margin: Acceptable margin of error percentage expressed
# as a decimal.
# :returns: The number of samples needed for a meaninful mean.
# """
# return 100
# # TODO implement this
# if len(samples) < 100:
# return min(100, population_size)
# p = .1 + sqrt((ps * (1 - ps)) / len(samples)) # UNFINISHED: this doesn't work
# z_num = z_score ** 2 * p * (1 - p)
# e_sqr = error_margin ** 2
# sample_size = (z_num / e_sqr) / (1 + (z_num / (e_sqr * population_size)))
# return int(sample_size + 1)
| bsd-3-clause | 4c8085059cc5ca152e19e9bed580bd2d | 33.820346 | 90 | 0.577982 | 3.702417 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/app_manager/views/app_summary.py | 1 | 22568 | import io
from collections import namedtuple
from django.conf import settings
from django.http import Http404
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.translation import gettext_lazy as _
from django.views.generic import View
from django.contrib import messages
from corehq.toggles import VIEW_APP_CHANGES
from couchexport.export import export_raw
from couchexport.models import Format
from couchexport.shortcuts import export_response
from dimagi.utils.web import json_response
from corehq.apps.app_manager.app_schemas.app_case_metadata import (
FormQuestionResponse,
)
from corehq.apps.app_manager.app_schemas.form_metadata import (
get_app_diff,
get_app_summary_formdata,
)
from corehq.apps.app_manager.const import WORKFLOW_FORM
from corehq.apps.app_manager.exceptions import XFormException
from corehq.apps.app_manager.models import AdvancedForm, AdvancedModule
from corehq.apps.app_manager.util import is_linked_app, is_remote_app
from corehq.apps.app_manager.view_helpers import ApplicationViewMixin
from corehq.apps.app_manager.views.utils import get_langs
from corehq.apps.app_manager.xform import VELLUM_TYPES
from corehq.apps.domain.decorators import login_or_api_key
from corehq.apps.domain.views.base import LoginAndDomainMixin
from corehq.apps.hqwebapp.views import BasePageView
class AppSummaryView(LoginAndDomainMixin, BasePageView, ApplicationViewMixin):
@property
def main_context(self):
context = super(AppSummaryView, self).main_context
context.update({
'domain': self.domain,
})
return context
def _app_dict(self, app):
lang, langs = get_langs(self.request, app)
return {
'VELLUM_TYPES': VELLUM_TYPES,
'form_name_map': _get_name_map(app),
'lang': lang,
'langs': langs,
'app_langs': app.langs,
'app_id': app.id,
'app_name': app.name,
'read_only': is_linked_app(app) or app.id != app.origin_id,
'app_version': app.version,
'latest_app_id': app.origin_id,
}
@property
def page_context(self):
if not self.app or is_remote_app(self.app):
raise Http404()
return self._app_dict(self.app)
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain, self.app_id])
class AppCaseSummaryView(AppSummaryView):
urlname = 'app_case_summary'
template_name = 'app_manager/case_summary.html'
@property
def page_context(self):
context = super(AppCaseSummaryView, self).page_context
has_form_errors = False
try:
metadata = self.app.get_case_metadata().to_json()
except XFormException:
metadata = {}
has_form_errors = True
context.update({
'page_type': 'case_summary',
'case_metadata': metadata,
'has_form_errors': has_form_errors,
})
return context
class AppFormSummaryView(AppSummaryView):
urlname = 'app_form_summary'
template_name = 'app_manager/form_summary.html'
@property
def page_context(self):
if self._show_app_changes_notification():
messages.warning(
self.request,
'Hey Dimagi User! Have you tried out '
'<a href="https://confluence.dimagi.com/display/saas/Viewing+App+Changes+between+versions" '
'target="_blank">Viewing App Changes between Versions</a> yet? It might be just what you are '
'looking for!',
extra_tags='html'
)
context = super(AppFormSummaryView, self).page_context
modules, errors = get_app_summary_formdata(self.domain, self.app, include_shadow_forms=False)
context.update({
'page_type': 'form_summary',
'modules': modules,
'errors': errors,
})
return context
def _show_app_changes_notification(self):
if settings.ENTERPRISE_MODE:
return False
if self.request.couch_user.is_dimagi and not VIEW_APP_CHANGES.enabled(self.domain):
return True
return False
class FormSummaryDiffView(AppSummaryView):
urlname = "app_form_summary_diff"
template_name = 'app_manager/form_summary_diff.html'
@property
def app(self):
return self.get_app(self.first_app.origin_id)
@property
def first_app(self):
return self.get_app(self.kwargs.get('first_app_id'))
@property
def second_app(self):
return self.get_app(self.kwargs.get('second_app_id'))
@property
def page_context(self):
context = super(FormSummaryDiffView, self).page_context
if self.first_app.origin_id != self.second_app.origin_id:
# This restriction is somewhat arbitrary, as you might want to
# compare versions between two different apps on the same domain.
# However, it breaks a bunch of assumptions in the UI
raise Http404()
first = self._app_dict(self.first_app)
second = self._app_dict(self.second_app)
first['modules'], second['modules'] = get_app_diff(self.first_app, self.second_app)
context.update({
'page_type': 'form_diff',
'app_id': self.app.origin_id,
'first': first,
'second': second,
})
return context
@property
def parent_pages(self):
pass
@property
def page_url(self):
pass
class AppDataView(View, LoginAndDomainMixin, ApplicationViewMixin):
urlname = 'app_data_json'
def get(self, request, *args, **kwargs):
modules, errors = get_app_summary_formdata(self.domain, self.app, include_shadow_forms=False)
return json_response({
'response': {
'form_data': {
'modules': modules,
'errors': errors,
},
'case_data': self.app.get_case_metadata().to_json(),
'form_name_map': _get_name_map(self.app),
},
'success': True,
})
def _get_name_map(app):
name_map = {}
for module in app.get_modules():
keywords = {'domain': app.domain, 'app_id': app.id, 'module_unique_id': module.unique_id}
module_url = reverse('view_module', kwargs=keywords)
name_map[module.unique_id] = {
'module_name': module.name,
'module_url': module_url,
}
for form in module.get_forms():
keywords = {'domain': app.domain, 'app_id': app.id,
'module_unique_id': module.unique_id}
module_url = reverse('view_module', kwargs=keywords)
del keywords['module_unique_id']
keywords['form_unique_id'] = form.unique_id
form_url = reverse('form_source', kwargs=keywords)
name_map[form.unique_id] = {
'form_name': form.name,
'module_name': module.name,
'module_url': module_url,
'form_url': form_url
}
return name_map
def _translate_name(names, language):
if not names:
return "[{}]".format(_("Unknown"))
try:
return str(names[language])
except KeyError:
first_name = next(iter(names.items()))
return "{} [{}]".format(first_name[1], first_name[0])
def _get_translated_form_name(app, form_id, language):
return _translate_name(_get_name_map(app)[form_id]['form_name'], language)
def _get_translated_module_name(app, module_id, language):
return _translate_name(_get_name_map(app)[module_id]['module_name'], language)
def _get_translated_form_link_name(app, form_link, language):
if form_link.module_unique_id:
return _get_translated_module_name(app, form_link.module_unique_id, language)
return _get_translated_form_name(app, form_link.form_id, language)
APP_SUMMARY_EXPORT_HEADER_NAMES = [
'app',
'module',
'form',
'display_filter',
'case_list_filter',
'case_type',
'case_actions',
'filter',
'module_type',
'comments',
'end_of_form_navigation',
'parent_module',
]
AppSummaryRow = namedtuple('AppSummaryRow', APP_SUMMARY_EXPORT_HEADER_NAMES)
AppSummaryRow.__new__.__defaults__ = (None, ) * len(APP_SUMMARY_EXPORT_HEADER_NAMES)
class DownloadAppSummaryView(LoginAndDomainMixin, ApplicationViewMixin, View):
urlname = 'download_app_summary'
http_method_names = ['get']
def get(self, request, domain, app_id):
language = request.GET.get('lang', 'en')
headers = [(self.app.name, tuple(APP_SUMMARY_EXPORT_HEADER_NAMES))]
data = [(self.app.name, [
AppSummaryRow(
app=self.app.name,
comments=self.app.comment,
)
])]
for module in self.app.get_modules():
try:
case_list_filter = module.case_details.short.filter
except AttributeError:
case_list_filter = None
data += [
(self.app.name, [
AppSummaryRow(
app=self.app.name,
module=_get_translated_module_name(self.app, module.unique_id, language),
display_filter=module.module_filter,
case_type=module.case_type,
case_list_filter=case_list_filter,
case_actions=module.case_details.short.filter if hasattr(module, 'case_details') else None,
filter=module.module_filter,
module_type='advanced' if isinstance(module, AdvancedModule) else 'standard',
comments=module.comment,
parent_module=(_get_translated_module_name(self.app, module.root_module_id, language)
if module.root_module_id else '')
)
])
]
for form in module.get_forms():
post_form_workflow = form.post_form_workflow
if form.post_form_workflow == WORKFLOW_FORM:
post_form_workflow = "link:\n{}".format(
"\n".join(
["{form}: {xpath} [{datums}]".format(
form=_get_translated_form_link_name(self.app, link, language),
xpath=link.xpath,
datums=", ".join(
"{}: {}".format(
datum.name, datum.xpath
) for datum in link.datums)
) for link in form.form_links]
)
)
data += [
(self.app.name, [
AppSummaryRow(
app=self.app.name,
module=_get_translated_module_name(self.app, module.unique_id, language),
form=_get_translated_form_name(self.app, form.get_unique_id(), language),
display_filter=form.form_filter,
case_type=form.get_case_type(),
case_actions=self._get_form_actions(form),
filter=form.form_filter,
module_type='advanced' if isinstance(module, AdvancedModule) else 'standard',
comments=form.comment,
end_of_form_navigation=post_form_workflow,
)
])
]
export_string = io.BytesIO()
export_raw(tuple(headers), data, export_string, Format.XLS_2007),
return export_response(
export_string,
Format.XLS_2007,
'{app_name} v.{app_version} - App Summary ({lang})'.format(
app_name=self.app.name,
app_version=self.app.version,
lang=language
),
)
def _get_form_actions(self, form):
update_types = {
'AdvancedOpenCaseAction': 'open',
'LoadUpdateAction': 'update',
}
if isinstance(form, AdvancedForm):
return "\n".join([
"{action_type}: {case_type} [{case_tag}]".format(
action_type=update_types[type(action).__name__],
case_type=action.case_type,
case_tag=action.case_tag,
)
for action in form.actions.get_all_actions()
])
else:
return form.get_action_type()
FORM_SUMMARY_EXPORT_HEADER_NAMES = [
"question_id",
"label",
"translations",
"type",
"repeat",
"group",
"option_labels",
"option_values",
"calculate",
"relevant",
"constraint",
"required",
"comment",
"default_value",
"load_properties",
"save_properties",
]
FormSummaryRow = namedtuple('FormSummaryRow', FORM_SUMMARY_EXPORT_HEADER_NAMES)
FormSummaryRow.__new__.__defaults__ = (None, ) * len(FORM_SUMMARY_EXPORT_HEADER_NAMES)
class DownloadFormSummaryView(LoginAndDomainMixin, ApplicationViewMixin, View):
urlname = 'download_form_summary'
http_method_names = ['get']
def get(self, request, domain, app_id):
language = request.GET.get('lang', 'en')
modules = list(self.app.get_modules())
case_meta = self.app.get_case_metadata()
headers = [(_('All Forms'),
('module_name', 'form_name', 'comment', 'module_display_condition', 'form_display_condition'))]
headers += [
(self._get_form_sheet_name(form, language), tuple(FORM_SUMMARY_EXPORT_HEADER_NAMES))
for module in modules for form in module.get_forms()
]
data = list((
_('All Forms'),
self.get_all_forms_row(module, form, language)
) for module in modules for form in module.get_forms())
data += list(
(self._get_form_sheet_name(form, language), self._get_form_row(form, language, case_meta))
for module in modules for form in module.get_forms()
)
export_string = io.BytesIO()
export_raw(tuple(headers), data, export_string, Format.XLS_2007),
return export_response(
export_string,
Format.XLS_2007,
'{app_name} v.{app_version} - Form Summary ({lang})'.format(
app_name=self.app.name,
app_version=self.app.version,
lang=language
),
)
def _get_form_row(self, form, language, case_meta):
form_summary_rows = []
for question in form.get_questions(
self.app.langs,
include_triggers=True,
include_groups=True,
include_translations=True
):
question_response = FormQuestionResponse(question)
form_summary_rows.append(
FormSummaryRow(
question_id=question_response.value,
label=_translate_name(question_response.translations, language),
translations=question_response.translations,
type=question_response.type,
repeat=question_response.repeat,
group=question_response.group,
option_labels="\n".join(
[_translate_name(option.translations, language) for option in question_response.options]
),
option_values=", ".join([option.value for option in question_response.options]),
calculate=question_response.calculate,
relevant=question_response.relevant,
constraint=question_response.constraint,
required="true" if question_response.required else "false",
comment=question_response.comment,
default_value=question_response.setvalue,
load_properties="\n".join(
["{} - {}".format(prop.case_type, prop.property)
for prop in case_meta.get_load_properties(form.unique_id, question['value'])]
),
save_properties="\n".join(
["{} - {}".format(prop.case_type, prop.property)
for prop in case_meta.get_save_properties(form.unique_id, question['value'])]
),
)
)
return tuple(form_summary_rows)
def _get_form_sheet_name(self, form, language):
return _get_translated_form_name(self.app, form.get_unique_id(), language)
def get_all_forms_row(self, module, form, language):
return ((
_get_translated_module_name(self.app, module.unique_id, language),
_get_translated_form_name(self.app, form.get_unique_id(), language),
form.short_comment,
module.module_filter,
form.form_filter,
),)
CASE_SUMMARY_EXPORT_HEADER_NAMES = [
'case_property_name',
'form_id',
'form_name',
'load_question_question',
'load_question_condition',
'save_question_question',
'save_question_condition',
'save_question_calculate',
]
PropertyRow = namedtuple('PropertyRow', CASE_SUMMARY_EXPORT_HEADER_NAMES)
class DownloadCaseSummaryView(ApplicationViewMixin, View):
urlname = 'download_case_summary'
http_method_names = ['get']
@method_decorator(login_or_api_key)
def get(self, request, domain, app_id):
case_metadata = self.app.get_case_metadata()
language = request.GET.get('lang', 'en')
headers = [(_('All Case Properties'), ('case_type', 'case_property', 'description')),
(_('Case Types'), ('type', 'relationships', 'opened_by', 'closed_by'))]
headers += list((
case_type.name,
tuple(CASE_SUMMARY_EXPORT_HEADER_NAMES)
)for case_type in case_metadata.case_types)
data = [(
_('All Case Properties'),
self.get_case_property_rows(case_type)
) for case_type in case_metadata.case_types]
data += [self.get_case_type_rows(case_metadata.case_types, language)]
data += [(
case_type.name,
self.get_case_questions_rows(case_type, language)
) for case_type in case_metadata.case_types]
export_string = io.BytesIO()
export_raw(tuple(headers), data, export_string, Format.XLS_2007),
return export_response(
export_string,
Format.XLS_2007,
'{app_name} v.{app_version} - Case Summary ({lang})'.format(
app_name=self.app.name,
app_version=self.app.version,
lang=language
),
)
def get_case_property_rows(self, case_type):
return tuple((case_type.name, prop.name, prop.description) for prop in case_type.properties)
def get_case_type_rows(self, case_types, language):
rows = []
form_names = {}
form_case_types = {}
for m in self.app.modules:
for f in m.forms:
form_names[f.unique_id] = _get_translated_form_name(self.app, f.unique_id, language)
form_case_types[f.unique_id] = m.case_type
for case_type in case_types:
related_case_types = [case_type.name] + case_type.child_types
opened_by = {}
closed_by = {}
for t in related_case_types:
opened_by[t] = [fid for fid in case_type.opened_by.keys() if t == form_case_types[fid]]
closed_by[t] = [fid for fid in case_type.closed_by.keys() if t == form_case_types[fid]]
relationships = case_type.relationships
relationships.update({'': [case_type.name]})
for relationship, types in relationships.items():
for type_ in types:
if relationship and not opened_by[type_] and not closed_by[type_]:
rows.append((case_type.name, "[{}] {}".format(relationship, type_)))
for i in range(max(len(opened_by[type_]), len(closed_by[type_]))):
rows.append((
case_type.name,
"[{}] {}".format(relationship, type_) if relationship else '',
form_names[opened_by[type_][i]] if i < len(opened_by[type_]) else '',
form_names[closed_by[type_][i]] if i < len(closed_by[type_]) else '',
))
return (_('Case Types'), rows)
def get_case_questions_rows(self, case_type, language):
rows = []
for prop in case_type.properties:
for form in prop.forms:
for load_question in form.load_questions:
rows.append(self._get_load_question_row(prop, form, language, load_question))
for save_question in form.save_questions:
rows.append(self._get_save_question_row(prop, form, language, save_question))
return tuple(rows)
def _get_load_question_row(self, prop, form, language, load_question):
return PropertyRow(
prop.name,
form.form_id,
_get_translated_form_name(self.app, form.form_id, language),
load_question.question.value,
"{} {} {}".format(
load_question.condition.question,
load_question.condition.operator,
load_question.condition.answer
) if load_question.condition else "",
None,
None,
None,
)
def _get_save_question_row(self, prop, form, language, save_question):
return PropertyRow(
prop.name,
form.form_id,
_get_translated_form_name(self.app, form.form_id, language),
None,
None,
save_question.question.value,
"{} {} {}".format(
save_question.condition.question,
save_question.condition.operator,
save_question.condition.answer
) if save_question.condition else "",
save_question.question.calculate,
)
| bsd-3-clause | 8600a402cd263685cab42fbf732125dd | 36.364238 | 115 | 0.559864 | 4.063378 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/data_interfaces/interfaces.py | 1 | 9696 | from django.contrib.humanize.templatetags.humanize import naturaltime
from django.urls import reverse
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy, gettext_noop
from memoized import memoized
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.es import cases as case_es
from corehq.apps.groups.models import Group
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.display import FormDisplay
from corehq.apps.reports.filters.base import BaseSingleOptionFilter
from corehq.apps.reports.generic import GenericReportView
from corehq.apps.reports.standard import ProjectReport
from corehq.apps.reports.standard.cases.basic import CaseListMixin
from corehq.apps.reports.standard.cases.data_sources import CaseDisplayES
from corehq.apps.reports.standard.inspect import SubmitHistoryMixin
from corehq.util.timezones.utils import parse_date
from .dispatcher import EditDataInterfaceDispatcher
class DataInterface(GenericReportView):
# overriding properties from GenericReportView
section_name = gettext_noop("Data")
base_template = "reports/standard/base_template.html"
asynchronous = True
dispatcher = EditDataInterfaceDispatcher
exportable = False
@property
def default_report_url(self):
return reverse('data_interfaces_default', args=[self.request.project.name])
@location_safe
class CaseReassignmentInterface(CaseListMixin, DataInterface):
name = gettext_noop("Reassign Cases")
slug = "reassign_cases"
report_template_path = 'data_interfaces/interfaces/case_management.html'
@property
@memoized
def es_results(self):
query = self._build_query()
# FB 183468: Don't allow user cases to be reassigned
query = query.NOT(case_es.case_type(USERCASE_TYPE))
return query.run().raw
@property
@memoized
def all_case_sharing_groups(self):
return Group.get_case_sharing_groups(self.domain)
def accessible_case_sharing_locations(self, user):
return Group.get_case_sharing_accessible_locations(self.domain, user)
@property
def headers(self):
headers = DataTablesHeader(
DataTablesColumn(mark_safe( # nosec: no user input
'Select <a href="#" class="select-all btn btn-xs btn-default">all'
'</a> <a href="#" class="select-none btn btn-xs btn-default">'
'none</a>'), sortable=False, span=2),
DataTablesColumn(_("Case Name"), span=3, prop_name="name.exact"),
DataTablesColumn(_("Case Type"), span=2, prop_name="type.exact"),
DataTablesColumn(_("Owner"), span=2, prop_name="owner_display", sortable=False),
DataTablesColumn(_("Last Modified"), span=3, prop_name="modified_on"),
)
return headers
@property
def rows(self):
checkbox_format = ('<input type="checkbox" class="selected-commcare-case"'
' data-caseid="{case_id}" data-owner="{owner}" data-ownertype="{owner_type}" />')
for row in self.es_results['hits'].get('hits', []):
es_case = self.get_case(row)
display = CaseDisplayES(es_case, self.timezone, self.individual)
yield [
format_html(
checkbox_format,
case_id=es_case['_id'],
owner=display.owner_id,
owner_type=display.owner_type),
display.case_link,
display.case_type,
display.owner_display,
naturaltime(parse_date(es_case['modified_on'])),
]
class FormManagementMode(object):
"""
Simple container for bulk form archive/restore mode and messages
"""
ARCHIVE_MODE = "archive"
RESTORE_MODE = "restore"
filter_options = [(ARCHIVE_MODE, gettext_lazy('Normal Forms')),
(RESTORE_MODE, gettext_lazy('Archived Forms'))]
def __init__(self, mode, validate=False):
if mode == self.RESTORE_MODE:
self.mode_name = self.RESTORE_MODE
self.button_text = _("Restore selected Forms")
self.button_class = _("btn-primary")
self.status_page_title = _("Restore Forms Status")
self.progress_text = _("Restoring your forms, this may take some time...")
self.complete_short = _("Restore complete!")
self.success_text = _("Successfully restored ")
self.fail_text = _("Restore Failed. Details:")
self.error_text = _("Problem restoring your forms! Please try again or report an issue")
self.help_message = _("To archive back any forms, use the Manage Forms report and "
"filter to Normal forms")
else:
self.mode_name = self.ARCHIVE_MODE
self.button_text = _("Archive selected forms")
self.button_class = _("btn-danger")
self.status_page_title = _("Archive Forms Status")
self.progress_text = _("Archiving your forms, this may take some time...")
self.complete_short = _("Archive complete!")
self.success_text = _("Successfully archived ")
self.fail_text = _("Archive Failed. Details:")
self.error_text = _("Problem archiving your forms! Please try again or report an issue")
self.help_message = _("To restore any archived forms, use the Manage Forms report and "
"filter to Archived forms")
if validate:
self.validate_mode(mode)
@classmethod
def validate_mode(cls, mode):
if mode not in [cls.ARCHIVE_MODE, cls.RESTORE_MODE]:
raise Exception("mode should be archive or restore")
return mode
def is_archive_mode(self):
return self.mode_name == self.ARCHIVE_MODE
class ArchiveOrNormalFormFilter(BaseSingleOptionFilter):
slug = 'archive_or_restore'
placeholder = ''
default_text = None
label = gettext_lazy('Archived/Restored')
help_text = mark_safe( # nosec: no user input
"Archived forms are removed from reports and exports and "
"any case changes they make are reversed. Archiving forms "
"can remove accidental form submissions. Use this report "
"to bulk archive forms or restore a set of archived forms. "
"<a href='https://confluence.dimagi.com/display/commcarepublic/Archive+Forms'>"
"Learn more</a>")
help_style_bubble = True
@property
def options(self):
return FormManagementMode.filter_options
@property
def selected(self):
return FormManagementMode(self.request.GET.get(self.slug)).mode_name
@location_safe
class BulkFormManagementInterface(SubmitHistoryMixin, DataInterface, ProjectReport):
name = gettext_noop("Manage Forms")
slug = "bulk_archive_forms"
report_template_path = 'data_interfaces/interfaces/archive_forms.html'
def __init__(self, request, **kwargs):
super(BulkFormManagementInterface, self).__init__(request, **kwargs)
self.fields = self.fields + ['corehq.apps.data_interfaces.interfaces.ArchiveOrNormalFormFilter']
self.mode = FormManagementMode(request.GET.get('archive_or_restore'))
@property
def template_context(self):
context = super(BulkFormManagementInterface, self).template_context
context.update({
"form_query_string": self.request.GET.urlencode(),
"mode": self.mode,
"total_xForms": int(self.es_query_result.total),
})
return context
@property
def es_query(self):
query = super(BulkFormManagementInterface, self).es_query
if self.mode.mode_name == self.mode.RESTORE_MODE:
return query.only_archived()
else:
return query
@property
def headers(self):
h = [
DataTablesColumn(
mark_safe( # nosec: no user input
"""
Select <a class="select-visible btn btn-xs btn-default">all</a>
<a class="select-none btn btn-xs btn-default">none</a>
"""
),
sortable=False, span=3
),
DataTablesColumn(_("View Form"), span=2),
DataTablesColumn(_("Username"), prop_name='form.meta.username', span=3),
DataTablesColumn(
_("Submission Time") if self.by_submission_time
else _("Completion Time"),
prop_name=self.time_field,
span=3,
),
DataTablesColumn(_("Form"), prop_name='form.@name'),
]
return DataTablesHeader(*h)
@property
def rows(self):
checkbox_format = '<input type="checkbox" class="xform-checkbox" value="{}" name="xform_ids"/>'
for form in self.es_query_result.hits:
display = FormDisplay(form, self)
yield [
format_html(checkbox_format, form["_id"]),
display.form_data_link,
display.username,
display.submission_or_completion_time,
display.readable_form_name,
]
@property
def form_ids_response(self):
# returns a list of form_ids
# this is called using ReportDispatcher.dispatch(render_as='form_ids', ***) in
# the bulk_form_management_async task
return self.es_query.get_ids()
| bsd-3-clause | 23d92cbd24313bbef680afe4a5704cf1 | 39.739496 | 104 | 0.627991 | 4.172117 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/token_auth/auth/booking.py | 1 | 6958 |
from future import standard_library
standard_library.install_aliases()
from urllib.parse import urlencode
from builtins import chr
import base64
import hashlib
import hmac
import logging
import re
from datetime import timedelta
import string
from datetime import datetime
from django.utils.dateparse import parse_datetime
from django.utils import timezone
from Crypto import Random
from Crypto.Cipher import AES
from bluebottle.token_auth.models import CheckedToken
from bluebottle.token_auth.auth.base import BaseTokenAuthentication
from bluebottle.token_auth.exceptions import TokenAuthenticationError
from bluebottle.token_auth.utils import get_settings
logger = logging.getLogger(__name__)
def _encode_message(message):
"""
Helper method which returns an encoded version of the
message passed as an argument.
It returns a tuple containing a string formed by two elements:
1. A string formed by the initialization vector and the AES-128
encrypted message.
2. The HMAC-SHA1 hash of that string.
"""
aes_key = get_settings()['aes_key'].encode('utf-8')
hmac_key = get_settings()['hmac_key'].encode('utf-8')
pad = lambda s: s + (AES.block_size - len(s) % AES.block_size) * chr(
AES.block_size - len(s) % AES.block_size)
init_vector = Random.new().read(AES.block_size)
cipher = AES.new(aes_key, AES.MODE_CBC, init_vector)
padded_message = pad(message)
aes_message = init_vector + cipher.encrypt(padded_message)
hmac_digest = hmac.new(bytes(hmac_key), bytes(aes_message), hashlib.sha1)
return aes_message, hmac_digest
def generate_token(email, username, first_name, last_name):
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = 'time={0}|username={1}|name={2} {3}|' \
'email={4}'.format(timestamp, username, first_name, last_name, email)
aes_message, hmac_digest = _encode_message(message)
token = base64.urlsafe_b64encode(aes_message + hmac_digest.digest())
return token
class TokenAuthentication(BaseTokenAuthentication):
"""
This authentication backend expects a token, encoded in URL-safe Base64, to
be received from the user to be authenticated. The token must be built like
this:
- The first 16 bytes are the AES key to be used to decrypt the message.
- The last 20 bytes are the HMAC-SHA1 signature of the message AND the AES
key, to provide an extra safety layer on the process.
- The rest of the token, between the first 16 bytes and the latest 20, is
the encrypted message to be read.
The backend performs the next operations over a received token in order to
authenticate the user who is sending it:
1. Checks that the token was not used previously, to prevent replay.
2. Decodes it through Base64.
3. Checks the HMAC-SHA1 signature of the message.
4. Decrypts the AES-encoded message to read the data.
5. Read the timestamp included in the message to check if the token already
expired or if its finally valid.
"""
def check_hmac_signature(self, message):
"""
Checks the HMAC-SHA1 signature of the message.
"""
data = message[:-20]
checksum = message[-20:]
hmac_data = hmac.new(bytes(self.settings['hmac_key'].encode('utf-8')), bytes(data), hashlib.sha1)
return True if hmac_data.digest() == checksum else False
def get_login_data(self, data):
"""
Obtains the data from the decoded message. Returns a Python tuple
of 4 elements containing the login data. The elements, from zero
to three, are:
0. Timestamp.
1. Username.
2. Complete name.
3. Email.
"""
expression = r'(.*?)\|'
pattern = r'time={0}username={0}name={0}email=(.*)'.format(expression)
login_data = re.search(pattern, data)
return login_data.groups()
def check_timestamp(self, data):
timestamp = datetime.strptime(data['timestamp'], '%Y-%m-%d %H:%M:%S')
time_limit = datetime.now() - \
timedelta(seconds=self.settings['token_expiration'])
if timestamp < time_limit:
raise TokenAuthenticationError('Authentication token expired')
def check_token_used(self):
if not self.args.get('token'):
raise TokenAuthenticationError(value='No token provided')
try:
CheckedToken.objects.get(token=self.args['token'])
raise TokenAuthenticationError(
value='Token was already used and is not valid')
except CheckedToken.DoesNotExist:
# Token was not used previously. Continue with auth process.
pass
def decrypt_message(self):
"""
Decrypts the AES encoded message.
"""
token = bytes(self.args['token'].encode('utf-8'))
message = base64.urlsafe_b64decode(token)
# Check that the message is valid (HMAC-SHA1 checking).
if not self.check_hmac_signature(message):
raise TokenAuthenticationError('HMAC authentication failed')
init_vector = message[:16]
enc_message = message[16:-20]
aes = AES.new(bytes(self.settings['aes_key'].encode('utf-8')), AES.MODE_CBC, init_vector)
message = aes.decrypt(enc_message).decode('utf-8')
# Get the login data in an easy-to-use tuple.
try:
login_data = self.get_login_data(message)
except AttributeError:
# Regex failed, so data was not valid.
raise TokenAuthenticationError('Message does not contain valid login data')
name = login_data[2].strip()
first_name = name.split(' ').pop(0)
parts = name.split(' ')
parts.pop(0)
last_name = " ".join(parts)
email = login_data[3].strip()
email = ''.join(x for x in email if x in string.printable)
data = {
'timestamp': login_data[0],
'remote_id': email,
'email': email,
'first_name': first_name,
'last_name': last_name,
'username': email
}
return data
def get_metadata(self):
metadata = "<sso-url>{0}</sso-url>".format(self.sso_url())
return metadata
def sso_url(self, target_url=None):
url = self.settings['sso_url']
if target_url:
url += '?{}'.format(urlencode({'url': target_url.encode('utf-8')}))
return url
@property
def target_url(self):
return self.args['link']
def authenticate_request(self):
self.check_token_used()
data = self.decrypt_message()
self.check_timestamp(data)
return data
def finalize(self, user, data):
timestamp = timezone.make_aware(parse_datetime(data['timestamp']))
CheckedToken.objects.create(token=self.args['token'], user=user,
timestamp=timestamp).save()
| bsd-3-clause | 7e2b2a982c1d9be10773b47dcd31f5cc | 34.141414 | 105 | 0.64142 | 4.04065 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/fixtures/tasks.py | 1 | 1969 | import datetime
from django.conf import settings
from django.template.loader import render_to_string
from soil import DownloadBase
from corehq.apps.celery import task
from corehq.apps.fixtures.download import prepare_fixture_download
from corehq.apps.fixtures.upload import upload_fixture_file
from corehq.apps.hqwebapp.tasks import send_html_email_async
@task
def fixture_upload_async(domain, download_id, replace, skip_orm, user_email=None):
task = fixture_upload_async
DownloadBase.set_progress(task, 0, 100)
download_ref = DownloadBase.get(download_id)
time_start = datetime.datetime.now()
result = upload_fixture_file(domain, download_ref.get_filename(), replace, task, skip_orm)
time_end = datetime.datetime.now()
DownloadBase.set_progress(task, 100, 100)
messages = {
'success': result.success,
'messages': result.messages,
'errors': result.errors,
'number_of_fixtures': result.number_of_fixtures
}
if user_email:
send_upload_fixture_complete_email(user_email, domain, time_start, time_end, messages)
return {
'messages': messages,
}
def send_upload_fixture_complete_email(email, domain, time_start, time_end, messages):
context = {
"username": email,
"domain": domain,
"time_start": time_start,
"time_end": time_end,
"messages": messages
}
send_html_email_async.delay(
"Your fixture upload is complete!",
email,
render_to_string('fixtures/upload_complete.html', context),
render_to_string('fixtures/upload_complete.txt', context),
email_from=settings.DEFAULT_FROM_EMAIL
)
return
@task
def async_fixture_download(table_ids, domain, download_id, owner_id):
task = async_fixture_download
DownloadBase.set_progress(task, 0, 100)
prepare_fixture_download(table_ids, domain, task, download_id, owner_id)
DownloadBase.set_progress(task, 100, 100)
| bsd-3-clause | f5ed9e9f9babf24824cd9ae677427284 | 32.372881 | 94 | 0.691214 | 3.653061 | false | false | false | false |
dimagi/commcare-hq | corehq/ex-submodules/pillowtop/tests/test_form_change_providers.py | 1 | 1582 | import itertools
import uuid
from django.test import TestCase
from corehq.form_processor.backends.sql.dbaccessors import doc_type_to_state
from corehq.form_processor.tests.utils import create_form_for_test, FormProcessorTestUtils
from pillowtop.reindexer.change_providers.form import SqlDomainXFormChangeProvider
class TestSqlDomainFormChangeProvider(TestCase):
@staticmethod
def _create_form(domain, doc_type):
form = create_form_for_test(domain, state=doc_type_to_state[doc_type])
return form.form_id
@classmethod
def setUpClass(cls):
super(TestSqlDomainFormChangeProvider, cls).setUpClass()
cls.domains = [uuid.uuid4().hex for i in range(3)]
cls.form_ids = {
(domain, doc_type): [cls._create_form(domain, doc_type) for i in range(3)]
for domain in cls.domains
for doc_type in ['XFormInstance', 'XFormArchived']
}
@classmethod
def tearDownClass(cls):
for domain in cls.domains:
FormProcessorTestUtils.delete_all_sql_forms(domain)
super(TestSqlDomainFormChangeProvider, cls).tearDownClass()
def test_change_provider(self):
provider = SqlDomainXFormChangeProvider(self.domains, chunk_size=2)
doc_ids = {change.id for change in provider.iter_all_changes()}
self.assertEqual(doc_ids, set(itertools.chain(*list(self.form_ids.values()))))
def test_change_provider_empty(self):
provider = SqlDomainXFormChangeProvider([])
self.assertEqual([], [change for change in provider.iter_all_changes()])
| bsd-3-clause | 34885a2a7f35651003fe39476835bca0 | 38.55 | 90 | 0.700379 | 3.775656 | false | true | false | false |
dimagi/commcare-hq | corehq/apps/user_importer/importer.py | 1 | 36489 | import logging
import string
import random
from collections import defaultdict, namedtuple
from datetime import datetime
from django.db import DEFAULT_DB_ALIAS
from corehq.apps.enterprise.models import EnterpriseMobileWorkerSettings
from corehq.apps.users.util import generate_mobile_username
from dimagi.utils.logging import notify_exception
from django.utils.translation import gettext as _
from couchdbkit.exceptions import (
BulkSaveError,
MultipleResultsFound,
ResourceNotFound,
ResourceConflict
)
from django.core.exceptions import ValidationError
from corehq import privileges
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.commtrack.util import get_supply_point_and_location
from corehq.apps.custom_data_fields.models import (
CustomDataFieldsDefinition,
)
from corehq.apps.domain.models import Domain
from corehq.apps.groups.models import Group
from corehq.apps.locations.models import SQLLocation
from corehq.apps.user_importer.exceptions import UserUploadError
from corehq.apps.user_importer.helpers import (
spec_value_to_boolean_or_none,
)
from corehq.apps.user_importer.validation import (
get_user_import_validators,
is_password,
)
from corehq.apps.users.audit.change_messages import UserChangeMessage
from corehq.apps.users.account_confirmation import (
send_account_confirmation_if_necessary,
send_account_confirmation_sms_if_necessary,
)
from corehq.apps.users.models import (
CommCareUser,
CouchUser,
Invitation,
UserRole,
InvitationStatus
)
from corehq.const import USER_CHANGE_VIA_BULK_IMPORTER
from corehq.toggles import DOMAIN_PERMISSIONS_MIRROR
from corehq.apps.sms.util import validate_phone_number
required_headers = set(['username'])
web_required_headers = set(['username', 'role'])
allowed_headers = set([
'data', 'email', 'group', 'language', 'name', 'password', 'phone-number',
'uncategorized_data', 'user_id', 'is_active', 'is_account_confirmed', 'send_confirmation_email',
'location_code', 'role', 'user_profile',
'User IMEIs (read only)', 'registered_on (read only)', 'last_submission (read only)',
'last_sync (read only)', 'web_user', 'remove_web_user', 'remove', 'last_access_date (read only)',
'last_login (read only)', 'last_name', 'status', 'first_name',
'send_confirmation_sms',
]) | required_headers
old_headers = {
# 'old_header_name': 'new_header_name'
'location-sms-code': 'location_code'
}
def check_headers(user_specs, domain, is_web_upload=False):
messages = []
headers = set(user_specs.fieldnames)
# Backwards warnings
for (old_name, new_name) in old_headers.items():
if old_name in headers:
messages.append(
_("'The column header '{old_name}' is deprecated, please use '{new_name}' instead.").format(
old_name=old_name, new_name=new_name
))
headers.discard(old_name)
if DOMAIN_PERMISSIONS_MIRROR.enabled(domain):
allowed_headers.add('domain')
if not is_web_upload and EnterpriseMobileWorkerSettings.is_domain_using_custom_deactivation(domain):
allowed_headers.add('deactivate_after')
illegal_headers = headers - allowed_headers
if is_web_upload:
missing_headers = web_required_headers - headers
else:
missing_headers = required_headers - headers
for header_set, label in (missing_headers, 'required'), (illegal_headers, 'illegal'):
if header_set:
messages.append(_('The following are {label} column headers: {headers}.').format(
label=label, headers=', '.join(header_set)))
if messages:
raise UserUploadError('\n'.join(messages))
class GroupMemoizer(object):
"""
If you use this to get a group, do not set group.name directly;
use group_memoizer.rename_group(group, name) instead.
"""
def __init__(self, domain):
self.groups_by_name = {}
self.groups_by_id = {}
self.groups = set()
self.updated_groups = set()
self.domain = domain
self.groups_by_user_id = defaultdict(set)
self.loaded = False
def load_all(self):
if not self.loaded:
for group in Group.by_domain(self.domain):
self.add_group(group)
self.loaded = True
def add_group(self, new_group):
# todo
# this has the possibility of missing two rows one with id one with name
# that actually refer to the same group
# and overwriting one with the other
assert new_group.name
if new_group.get_id:
self.groups_by_id[new_group.get_id] = new_group
for user_id in new_group.users:
self.groups_by_user_id[user_id].add(new_group.get_id)
self.groups_by_name[new_group.name] = new_group
self.groups.add(new_group)
def by_name(self, group_name):
if group_name not in self.groups_by_name:
group = Group.by_name(self.domain, group_name)
if not group:
self.groups_by_name[group_name] = None
return None
self.add_group(group)
return self.groups_by_name[group_name]
def by_user_id(self, user_id):
group_ids = self.groups_by_user_id.get(user_id)
if not group_ids:
return []
return [
self.get(group_id) for group_id in group_ids
]
def get(self, group_id):
if group_id not in self.groups_by_id:
group = Group.get(group_id)
if group.domain != self.domain:
raise ResourceNotFound()
self.add_group(group)
return self.groups_by_id[group_id]
def create(self, domain, name):
group = Group(domain=domain, name=name)
self.add_group(group)
return group
def rename_group(self, group, name):
# This isn't always true, you can rename A => B and then B => C,
# and what was A will now be called B when you try to change
# what was B to be called C. That's fine, but you don't want to
# delete someone else's entry
if self.groups_by_name.get(group.name) is group:
del self.groups_by_name[group.name]
group.name = name
self.add_group(group)
def group_updated(self, group_id):
self.updated_groups.add(group_id)
def save_updated(self):
updated = [self.groups_by_id[_id] for _id in self.updated_groups]
Group.bulk_save(updated)
self.updated_groups.clear()
def save_all(self):
Group.bulk_save(self.groups)
class BulkCacheBase(object):
def __init__(self, domain):
self.domain = domain
self.cache = {}
def get(self, key):
if not key:
return None
if key not in self.cache:
self.cache[key] = self.lookup(key)
return self.cache[key]
def lookup(self, key):
# base classes must implement this themselves
raise NotImplementedError
class SiteCodeToSupplyPointCache(BulkCacheBase):
"""
Cache the lookup of a supply point object from
the site code used in upload.
"""
def lookup(self, site_code):
case_location = get_supply_point_and_location(
self.domain,
site_code
)
return case_location.case
class SiteCodeToLocationCache(BulkCacheBase):
def __init__(self, domain):
self.non_admin_types = [
loc_type.name for loc_type in Domain.get_by_name(domain).location_types
if not loc_type.administrative
]
super(SiteCodeToLocationCache, self).__init__(domain)
def lookup(self, site_code):
"""
Note that this can raise SQLLocation.DoesNotExist if the location with the
given site code is not found.
"""
return SQLLocation.objects.using(DEFAULT_DB_ALIAS).get(
domain=self.domain,
site_code__iexact=site_code
)
def create_or_update_groups(domain, group_specs):
log = {"errors": []}
group_memoizer = GroupMemoizer(domain)
group_memoizer.load_all()
group_names = set()
for row in group_specs:
group_id = row.get('id')
group_name = str(row.get('name') or '')
case_sharing = row.get('case-sharing')
reporting = row.get('reporting')
data = row.get('data')
# check that group_names are unique
if group_name in group_names:
log['errors'].append(
'Your spreadsheet has multiple groups called "%s" and only the first was processed' % group_name
)
continue
else:
group_names.add(group_name)
# check that there's a group_id or a group_name
if not group_id and not group_name:
log['errors'].append('Your spreadsheet has a group with no name or id and it has been ignored')
continue
try:
if group_id:
group = group_memoizer.get(group_id)
else:
group = group_memoizer.by_name(group_name)
if not group:
group = group_memoizer.create(domain=domain, name=group_name)
except ResourceNotFound:
log["errors"].append('There are no groups on CommCare HQ with id "%s"' % group_id)
except MultipleResultsFound:
log["errors"].append("There are multiple groups on CommCare HQ named: %s" % group_name)
else:
if group_name:
group_memoizer.rename_group(group, group_name)
group.case_sharing = case_sharing
group.reporting = reporting
group.metadata = data
return group_memoizer, log
def get_location_from_site_code(site_code, location_cache):
if isinstance(site_code, str):
site_code = site_code.lower()
elif isinstance(site_code, int):
site_code = str(site_code)
else:
raise UserUploadError(
_("Unexpected format received for site code '%(site_code)s'") %
{'site_code': site_code}
)
try:
return location_cache.get(site_code)
except SQLLocation.DoesNotExist:
raise UserUploadError(
_("Could not find organization with site code '%(site_code)s'") %
{'site_code': site_code}
)
DomainInfo = namedtuple('DomainInfo', [
'validators', 'can_assign_locations', 'location_cache',
'roles_by_name', 'profiles_by_name', 'profile_name_by_id', 'group_memoizer'
])
def create_or_update_web_user_invite(email, domain, role_qualified_id, upload_user, location_id,
user_change_logger=None, send_email=True):
invite, invite_created = Invitation.objects.update_or_create(
email=email,
domain=domain,
is_accepted=False,
defaults={
'invited_by': upload_user.user_id,
'invited_on': datetime.utcnow(),
'supply_point': location_id,
'role': role_qualified_id
},
)
if invite_created and send_email:
invite.send_activation_email()
if invite_created and user_change_logger:
user_change_logger.add_info(UserChangeMessage.invited_to_domain(domain))
def find_location_id(location_codes, location_cache):
location_ids = []
for code in location_codes:
loc = get_location_from_site_code(code, location_cache)
location_ids.append(loc.location_id)
return location_ids
def check_modified_user_loc(location_ids, loc_id, assigned_loc_ids):
locations_updated = set(assigned_loc_ids) != set(location_ids)
primary_location_removed = bool(loc_id and (not location_ids or loc_id not in location_ids))
return locations_updated, primary_location_removed
def get_domain_info(
domain,
upload_domain,
user_specs,
domain_info_by_domain,
upload_user=None,
group_memoizer=None,
is_web_upload=False
):
from corehq.apps.users.views.mobile.custom_data_fields import UserFieldsView
from corehq.apps.users.views.utils import get_editable_role_choices
domain_info = domain_info_by_domain.get(domain)
if domain_info:
return domain_info
if domain == upload_domain:
domain_group_memoizer = group_memoizer or GroupMemoizer(domain)
else:
domain_group_memoizer = GroupMemoizer(domain)
domain_group_memoizer.load_all()
can_assign_locations = domain_has_privilege(domain, privileges.LOCATIONS)
location_cache = None
if can_assign_locations:
location_cache = SiteCodeToLocationCache(domain)
domain_obj = Domain.get_by_name(domain)
if domain_obj is None:
raise UserUploadError(_("Domain cannot be set to '{domain}'".format(domain=domain)))
allowed_group_names = [group.name for group in domain_group_memoizer.groups]
profiles_by_name = {}
profile_name_by_id = {}
domain_user_specs = [spec for spec in user_specs if spec.get('domain', upload_domain) == domain]
if is_web_upload:
roles_by_name = {role[1]: role[0] for role in get_editable_role_choices(domain, upload_user,
allow_admin_role=True)}
validators = get_user_import_validators(
domain_obj,
domain_user_specs,
True,
allowed_roles=list(roles_by_name),
upload_domain=upload_domain,
)
else:
roles_by_name = {role.name: role.get_qualified_id() for role in UserRole.objects.get_by_domain(domain)}
definition = CustomDataFieldsDefinition.get(domain, UserFieldsView.field_type)
if definition:
profiles = definition.get_profiles()
profiles_by_name = {
profile.name: profile
for profile in profiles
}
profile_name_by_id = {
profile.pk: profile.name
for profile in profiles
}
validators = get_user_import_validators(
domain_obj,
domain_user_specs,
False,
allowed_group_names,
list(roles_by_name),
list(profiles_by_name),
upload_domain
)
domain_info = DomainInfo(
validators,
can_assign_locations,
location_cache,
roles_by_name,
profiles_by_name,
profile_name_by_id,
domain_group_memoizer
)
domain_info_by_domain[domain] = domain_info
return domain_info
def format_location_codes(location_codes):
if location_codes and not isinstance(location_codes, list):
location_codes = [location_codes]
if location_codes is not None:
# ignore empty
location_codes = [code for code in location_codes if code]
return location_codes
def clean_phone_numbers(phone_numbers):
cleaned_numbers = []
for number in phone_numbers:
if number:
validate_phone_number(number, f'Invalid phone number detected: {number}')
cleaned_numbers.append(number)
return cleaned_numbers
def create_or_update_commcare_users_and_groups(upload_domain, user_specs, upload_user, upload_record_id,
group_memoizer=None,
update_progress=None):
""""
Creates and Updates CommCare Users
For the associated web user username passed, for each CommCareUser
if corresponding web user is present
if web user has confirmed account but not a member of domain
adds them to the domain with same role and primary location as the CommCareUser
if already a member of domain
update their role and primary location to be same as that of the CommCareUser
else creates or updates user invitation
sets Invitation with the CommCare user's role and primary location
All changes to users only, are tracked using UserChangeLogger, as an audit trail.
"""
# HELPME
#
# This method has been flagged for refactoring due to its complexity and
# frequency of touches in changesets
#
# If you are writing code that touches this method, your changeset
# should leave the method better than you found it.
#
# Please remove this flag when this method no longer triggers an 'E' or 'F'
# classification from the radon code static analysis
from corehq.apps.user_importer.helpers import CommCareUserImporter, WebUserImporter
domain_info_by_domain = {}
ret = {"errors": [], "rows": []}
current = 0
update_deactivate_after_date = EnterpriseMobileWorkerSettings.is_domain_using_custom_deactivation(
upload_domain
)
for row in user_specs:
if update_progress:
update_progress(current)
current += 1
status_row = {}
username = row.get('username')
domain = row.get('domain') or upload_domain
try:
username = generate_mobile_username(str(username), domain, False) if username else None
except ValidationError:
status_row = {
'username': username,
'row': row,
'flag': _("Username must not contain blank spaces or special characters."),
}
ret["rows"].append(status_row)
continue
status_row = {
'username': username,
'row': row,
}
# Set a dummy password to pass the validation, similar to GUI user creation
send_account_confirmation_sms = spec_value_to_boolean_or_none(row, 'send_confirmation_sms')
if send_account_confirmation_sms and not row.get('password'):
string_set = string.ascii_uppercase + string.digits + string.ascii_lowercase
password = ''.join(random.choices(string_set, k=10))
row['password'] = password
if(row.get('password')):
row['password'] = str(row.get('password'))
try:
domain_info = get_domain_info(domain, upload_domain, user_specs, domain_info_by_domain,
group_memoizer)
for validator in domain_info.validators:
validator(row)
except UserUploadError as e:
status_row['flag'] = str(e)
ret['rows'].append(status_row)
continue
data = row.get('data', {})
email = row.get('email')
group_names = list(map(str, row.get('group') or []))
language = row.get('language')
name = row.get('name')
password = row.get('password')
uncategorized_data = row.get('uncategorized_data', {})
user_id = row.get('user_id')
location_codes = row.get('location_code', []) if 'location_code' in row else None
location_codes = format_location_codes(location_codes)
role = row.get('role', None)
profile = row.get('user_profile', None)
web_user_username = row.get('web_user')
phone_numbers = row.get('phone-number', []) if 'phone-number' in row else None
deactivate_after = row.get('deactivate_after', None) if update_deactivate_after_date else None
if isinstance(deactivate_after, datetime):
deactivate_after = deactivate_after.strftime("%m-%Y")
row['deactivate_after'] = deactivate_after
try:
password = str(password) if password else None
is_active = spec_value_to_boolean_or_none(row, 'is_active')
is_account_confirmed = spec_value_to_boolean_or_none(row, 'is_account_confirmed')
send_account_confirmation_email = spec_value_to_boolean_or_none(row, 'send_confirmation_email')
remove_web_user = spec_value_to_boolean_or_none(row, 'remove_web_user')
if send_account_confirmation_sms:
is_active = False
if not user_id:
is_account_confirmed = False
user = _get_or_create_commcare_user(domain, user_id, username, is_account_confirmed,
web_user_username, password, upload_user)
commcare_user_importer = CommCareUserImporter(upload_domain, domain, user, upload_user,
is_new_user=not bool(user_id),
via=USER_CHANGE_VIA_BULK_IMPORTER,
upload_record_id=upload_record_id)
if user_id:
if is_password(password):
commcare_user_importer.update_password(password)
# overwrite password in results so we do not save it to the db
status_row['row']['password'] = 'REDACTED'
status_row['flag'] = 'updated'
else:
status_row['flag'] = 'created'
if phone_numbers is not None:
phone_numbers = clean_phone_numbers(phone_numbers)
commcare_user_importer.update_phone_numbers(phone_numbers)
if name:
commcare_user_importer.update_name(name)
commcare_user_importer.update_user_data(data, uncategorized_data, profile, domain_info)
if update_deactivate_after_date:
commcare_user_importer.update_deactivate_after(deactivate_after)
if language:
commcare_user_importer.update_language(language)
if email:
commcare_user_importer.update_email(email)
if is_active is not None:
commcare_user_importer.update_status(is_active)
# Do this here so that we validate the location code before we
# save any other information to the user, this way either all of
# the user's information is updated, or none of it
# Do not update location info if the column is not included at all
if domain_info.can_assign_locations and location_codes is not None:
commcare_user_importer.update_locations(location_codes, domain_info)
if role:
role_qualified_id = domain_info.roles_by_name[role]
commcare_user_importer.update_role(role_qualified_id)
elif not commcare_user_importer.logger.is_new_user and 'role' in row:
commcare_user_importer.update_role('none')
if web_user_username:
user.update_metadata({'login_as_user': web_user_username})
user.save()
log = commcare_user_importer.save_log()
if web_user_username:
check_can_upload_web_users(domain, upload_user)
web_user = CouchUser.get_by_username(web_user_username)
if web_user:
web_user_importer = WebUserImporter(upload_domain, domain, web_user, upload_user,
is_new_user=False,
via=USER_CHANGE_VIA_BULK_IMPORTER,
upload_record_id=upload_record_id)
user_change_logger = web_user_importer.logger
else:
web_user_importer = None
user_change_logger = None
if remove_web_user:
remove_web_user_from_domain(domain, web_user, username, upload_user,
user_change_logger)
else:
check_user_role(username, role)
if not web_user and is_account_confirmed:
raise UserUploadError(_(
"You can only set 'Is Account Confirmed' to 'True' on an existing Web User. "
f"{web_user_username} is a new username."
).format(web_user_username=web_user_username))
if web_user and not web_user.is_member_of(domain) and is_account_confirmed:
# add confirmed account to domain
# role_qualified_id would be be present here as confirmed in check_user_role
web_user_importer.add_to_domain(role_qualified_id, user.location_id)
elif not web_user or not web_user.is_member_of(domain):
create_or_update_web_user_invite(web_user_username, domain, role_qualified_id,
upload_user, user.location_id, user_change_logger,
send_email=send_account_confirmation_email)
elif web_user.is_member_of(domain):
# edit existing user in the domain
web_user_importer.update_role(role_qualified_id)
if location_codes is not None:
web_user_importer.update_primary_location(user.location_id)
web_user.save()
if web_user_importer:
web_user_importer.save_log()
if not web_user_username:
if send_account_confirmation_email:
send_account_confirmation_if_necessary(user)
if send_account_confirmation_sms:
send_account_confirmation_sms_if_necessary(user)
if is_password(password):
# Without this line, digest auth doesn't work.
# With this line, digest auth works.
# Other than that, I'm not sure what's going on
# Passing use_primary_db=True because of https://dimagi-dev.atlassian.net/browse/ICDS-465
user.get_django_user(use_primary_db=True).check_password(password)
group_change_message = commcare_user_importer.update_user_groups(domain_info, group_names)
try:
domain_info.group_memoizer.save_updated()
except BulkSaveError as e:
_error_message = (
"Oops! We were not able to save some of your group changes. "
"Please make sure no one else is editing your groups "
"and try again."
)
logging.exception((
'BulkSaveError saving groups. '
'User saw error message "%s". Errors: %s'
) % (_error_message, e.errors))
ret['errors'].append(_error_message)
if log and group_change_message:
log.change_messages.update(group_change_message)
log.save()
elif group_change_message:
log = commcare_user_importer.logger.save_only_group_changes(group_change_message)
except ValidationError as e:
status_row['flag'] = e.message
except (UserUploadError, CouchUser.Inconsistent) as e:
status_row['flag'] = str(e)
ret["rows"].append(status_row)
return ret
def _get_or_create_commcare_user(domain, user_id, username, is_account_confirmed, web_user_username, password,
upload_user):
if user_id:
user = CommCareUser.get_by_user_id(user_id, domain)
if not user:
raise UserUploadError(_(
"User with ID '{user_id}' not found"
).format(user_id=user_id, domain=domain))
check_changing_username(user, username)
# note: explicitly not including "None" here because that's the default value if not set.
# False means it was set explicitly to that value
if is_account_confirmed is False and not web_user_username:
raise UserUploadError(_(
"You can only set 'Is Account Confirmed' to 'False' on a new User."
))
else:
kwargs = {}
if is_account_confirmed is not None and not web_user_username:
kwargs['is_account_confirmed'] = is_account_confirmed
user = CommCareUser.create(domain, username, password, created_by=upload_user,
created_via=USER_CHANGE_VIA_BULK_IMPORTER, commit=False, **kwargs)
return user
def create_or_update_web_users(upload_domain, user_specs, upload_user, upload_record_id, update_progress=None):
from corehq.apps.user_importer.helpers import WebUserImporter
domain_info_by_domain = {}
ret = {"errors": [], "rows": []}
current = 0
for row in user_specs:
if update_progress:
update_progress(current)
current += 1
username = row.get('username')
domain = row.get('domain') or upload_domain
status_row = {
'username': username,
'row': row,
}
try:
domain_info = get_domain_info(domain, upload_domain, user_specs, domain_info_by_domain,
upload_user=upload_user, is_web_upload=True)
for validator in domain_info.validators:
validator(row)
except UserUploadError as e:
status_row['flag'] = str(e)
ret['rows'].append(status_row)
continue
role = row.get('role', None)
status = row.get('status')
location_codes = row.get('location_code', []) if 'location_code' in row else None
location_codes = format_location_codes(location_codes)
try:
remove = spec_value_to_boolean_or_none(row, 'remove')
check_user_role(username, role)
role_qualified_id = domain_info.roles_by_name[role]
check_can_upload_web_users(domain, upload_user)
user = CouchUser.get_by_username(username, strict=True)
if user:
check_changing_username(user, username)
web_user_importer = WebUserImporter(upload_domain, domain, user, upload_user,
is_new_user=False,
via=USER_CHANGE_VIA_BULK_IMPORTER,
upload_record_id=upload_record_id)
user_change_logger = web_user_importer.logger
if remove:
remove_web_user_from_domain(domain, user, username, upload_user, user_change_logger,
is_web_upload=True)
else:
membership = user.get_domain_membership(domain)
if membership:
modify_existing_user_in_domain(upload_domain, domain, domain_info, location_codes,
membership, role_qualified_id, upload_user, user,
web_user_importer)
else:
create_or_update_web_user_invite(username, domain, role_qualified_id, upload_user,
user.location_id, user_change_logger)
web_user_importer.save_log()
status_row['flag'] = 'updated'
else:
if remove:
remove_invited_web_user(domain, username)
status_row['flag'] = 'updated'
else:
if status == "Invited":
try:
invitation = Invitation.objects.get(domain=domain, email=username, is_accepted=False)
except Invitation.DoesNotExist:
raise UserUploadError(_("You can only set 'Status' to 'Invited' on a pending Web "
"User. {web_user} has no invitations for this project "
"space.").format(web_user=username))
if invitation.email_status == InvitationStatus.BOUNCED and invitation.email == username:
raise UserUploadError(_("The email has bounced for this user's invite. Please try "
"again with a different username").format(web_user=username))
user_invite_loc_id = None
if domain_info.can_assign_locations and location_codes is not None:
# set invite location to first item in location_codes
if len(location_codes) > 0:
user_invite_loc = get_location_from_site_code(
location_codes[0], domain_info.location_cache
)
user_invite_loc_id = user_invite_loc.location_id
create_or_update_web_user_invite(username, domain, role_qualified_id, upload_user,
user_invite_loc_id)
status_row['flag'] = 'invited'
except (UserUploadError, CouchUser.Inconsistent) as e:
status_row['flag'] = str(e)
ret["rows"].append(status_row)
return ret
def modify_existing_user_in_domain(upload_domain, domain, domain_info, location_codes, membership,
role_qualified_id, upload_user, current_user, web_user_importer,
max_tries=3):
if domain_info.can_assign_locations and location_codes is not None:
web_user_importer.update_locations(location_codes, membership, domain_info)
web_user_importer.update_role(role_qualified_id)
try:
current_user.save()
except ResourceConflict:
notify_exception(None, message="ResouceConflict during web user import",
details={'domain': domain, 'username': current_user.username})
if max_tries > 0:
current_user.clear_quickcache_for_user()
updated_user = CouchUser.get_by_username(current_user.username, strict=True)
modify_existing_user_in_domain(domain, domain_info, location_codes, membership, role_qualified_id,
upload_user, updated_user, web_user_importer, max_tries=max_tries - 1)
else:
raise
def check_user_role(username, role):
if not role:
raise UserUploadError(_(
"You cannot upload a web user without a role. {username} does not have "
"a role").format(username=username))
def check_can_upload_web_users(domain, upload_user):
if not upload_user.can_edit_web_users(domain):
raise UserUploadError(_(
"Only users with the edit web users permission can upload web users"
))
def check_changing_username(user, username):
if username and user.username != username:
raise UserUploadError(_(
'Changing usernames is not supported: %(username)r to %(new_username)r'
) % {'username': user.username, 'new_username': username})
def remove_invited_web_user(domain, username):
try:
invitation = Invitation.objects.get(domain=domain, email=username)
except Invitation.DoesNotExist:
raise UserUploadError(_("You cannot remove a web user that is not a member or invited to this project. "
"{username} is not a member or invited.").format(username=username))
invitation.delete()
def remove_web_user_from_domain(domain, user, username, upload_user, user_change_logger=None,
is_web_upload=False):
if not user or not user.is_member_of(domain):
if is_web_upload:
remove_invited_web_user(domain, username)
if user_change_logger:
user_change_logger.add_info(UserChangeMessage.invitation_revoked_for_domain(domain))
else:
raise UserUploadError(_("You cannot remove a web user that is not a member of this project."
" {web_user} is not a member.").format(web_user=user))
elif username == upload_user.username:
raise UserUploadError(_("You cannot remove yourself from a domain via bulk upload"))
else:
user.delete_domain_membership(domain)
user.save()
if user_change_logger:
user_change_logger.add_info(UserChangeMessage.domain_removal(domain))
| bsd-3-clause | 90d0f1297cf59ce858deb821e5999995 | 40.277149 | 113 | 0.586588 | 4.163985 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/auditcare/migrations/0002_uniques.py | 1 | 2537 | # Generated by Django 2.2.16 on 2021-03-24 11:38
from itertools import groupby
from django.db import migrations, models
from ..models import AccessAudit, HttpAccept, NavigationEventAudit, UserAgent, ViewName
model_map = {
HttpAccept: [AccessAudit],
UserAgent: [AccessAudit, NavigationEventAudit],
ViewName: [NavigationEventAudit],
}
field_map = {
HttpAccept: "http_accept_fk_id",
UserAgent: "user_agent_fk_id",
ViewName: "view_fk_id",
}
def delete_duplicates(model):
def update_dups(rel_model, first_id, other_ids):
field_name = field_map[model]
rel_model.objects.filter(
**{field_name + "__in": other_ids}
).update(
**{field_name: first_id},
)
def do_delete(apps, schema_editor):
def sort_key(item):
id, value = item
return value
dup_values = list(
model.objects
.values("value")
.annotate(value_count=models.Count("value"))
.filter(value_count__gt=1)
.values_list("value", flat=True)
)
dups = (
model.objects
.filter(value__in=dup_values)
.values_list("id", "value")
)
for value, pairs in groupby(sorted(dups, key=sort_key), key=sort_key):
ids = sorted(id for id, value in pairs)
first_id, *other_ids = ids
for rel_model in model_map[model]:
update_dups(rel_model, first_id, other_ids)
model.objects.filter(value=value, id__in=other_ids).delete()
return do_delete
class Migration(migrations.Migration):
atomic = False
dependencies = [
('auditcare', '0001_sqlmodels'),
]
operations = [
migrations.RunPython(delete_duplicates(HttpAccept), lambda *a: None),
migrations.AlterField(
model_name='httpaccept',
name='value',
field=models.CharField(db_index=True, max_length=255, unique=True),
),
migrations.RunPython(delete_duplicates(UserAgent), lambda *a: None),
migrations.AlterField(
model_name='useragent',
name='value',
field=models.CharField(db_index=True, max_length=255, unique=True),
),
migrations.RunPython(delete_duplicates(ViewName), lambda *a: None),
migrations.AlterField(
model_name='viewname',
name='value',
field=models.CharField(db_index=True, max_length=255, unique=True),
),
]
| bsd-3-clause | 273512851665fb73afff3d7e5fd2ba1b | 29.566265 | 87 | 0.583366 | 3.797904 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/data_analytics/management/commands/add_to_malt_table.py | 1 | 1122 | from django.core.management.base import BaseCommand
import csv
from corehq.apps.data_analytics.models import MALTRow
class Command(BaseCommand):
"""
Adds csv data to malt table for given files
e.g. ./manage.py add_to_malt_table example.csv
"""
help = 'Adds data to MALT table from given files'
def add_arguments(self, parser):
parser.add_argument(
'file_paths',
metavar='file_path',
nargs='+',
)
def handle(self, file_paths, **options):
for arg in file_paths:
with open(arg, 'r', encoding='utf-8') as file:
rows = []
reader = csv.reader(file)
header_row = True
for row in reader:
if header_row:
headers = row
header_row = False
else:
rows.append({headers[index]: item for index, item in enumerate(row)})
MALTRow.objects.bulk_create(
[MALTRow(**malt_dict) for malt_dict in rows]
)
| bsd-3-clause | a3ac9386f49c340f884e74067a412cd1 | 30.166667 | 93 | 0.510695 | 4.298851 | false | false | false | false |
dimagi/commcare-hq | corehq/form_processor/tests/test_sql_update_strategy.py | 1 | 8947 | from django.test import TestCase
from freezegun import freeze_time
from unittest.mock import patch
from testil import eq
from corehq.util.soft_assert.core import SoftAssert
from casexml.apps.case.exceptions import ReconciliationError
from casexml.apps.case.xml.parser import CaseUpdateAction, KNOWN_PROPERTIES
from corehq.form_processor.backends.sql.processor import FormProcessorSQL
from corehq.form_processor.backends.sql.update_strategy import SqlCaseUpdateStrategy
from corehq.form_processor.interfaces.processor import ProcessedForms
from corehq.form_processor.models import (
CommCareCase,
CaseTransaction,
RebuildWithReason,
)
from corehq.form_processor.utils import TestFormMetadata
from corehq.form_processor.tests.utils import sharded, FormProcessorTestUtils
from corehq.util.test_utils import get_form_ready_to_save
import uuid
from datetime import datetime
@sharded
class SqlUpdateStrategyTest(TestCase):
DOMAIN = 'update-strategy-test-' + uuid.uuid4().hex
USER_ID = 'mr_wednesday_'
@classmethod
def setUpClass(cls):
super(SqlUpdateStrategyTest, cls).setUpClass()
FormProcessorTestUtils.delete_all_sql_forms()
FormProcessorTestUtils.delete_all_sql_cases()
@classmethod
def tearDownClass(cls):
FormProcessorTestUtils.delete_all_sql_forms()
FormProcessorTestUtils.delete_all_sql_cases()
super(SqlUpdateStrategyTest, cls).tearDownClass()
@patch.object(SoftAssert, '_call')
def test_reconcile_transactions(self, soft_assert_mock):
""" tests a transanction with an early client date and late server date """
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
with freeze_time("2018-10-08"):
new_old_trans = self._create_case_transaction(case, new_old_xform)
with freeze_time("2018-10-11"):
self._save(new_old_xform, case, new_old_trans)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertTrue(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
case.save(with_tracked_models=True)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
def test_reconcile_not_necessary(self):
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
new_old_trans = self._create_case_transaction(case, new_old_xform)
self._save(new_old_xform, case, new_old_trans)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
def test_ignores_before_rebuild_transaction(self):
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
with freeze_time("2018-10-08"):
new_old_trans = self._create_case_transaction(case, new_old_xform)
with freeze_time("2018-10-11"):
self._save(new_old_xform, case, new_old_trans)
self.assertFalse(case.check_transaction_order())
with freeze_time("2018-10-13"):
new_rebuild_xform = self._create_form()
rebuild_detail = RebuildWithReason(reason="shadow's golden coin")
rebuild_transaction = CaseTransaction.rebuild_transaction(case, rebuild_detail)
self._save(new_rebuild_xform, case, rebuild_transaction)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
def test_first_transaction_not_create(self):
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-08"):
new_old_xform = self._create_form()
new_old_trans = self._create_case_transaction(case, new_old_xform)
self._save(new_old_xform, case, new_old_trans)
self.assertTrue(case.check_transaction_order())
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertRaises(ReconciliationError, update_strategy.reconcile_transactions)
@patch.object(SoftAssert, '_call')
def test_reconcile_transactions_within_fudge_factor(self, soft_assert_mock):
""" tests a transanction with an early client date and late server date """
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11 06:00"):
new_old_xform = self._create_form()
with freeze_time("2018-10-10 18:00"):
new_old_trans = self._create_case_transaction(case, new_old_xform)
with freeze_time("2018-10-11 06:00"):
self._save(new_old_xform, case, new_old_trans)
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
new_old_trans = self._create_case_transaction(case, new_old_xform)
self._save(new_old_xform, case, new_old_trans)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertTrue(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
case.save(with_tracked_models=True)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
def _create_form(self, user_id=None, received_on=None):
"""
Create the models directly so that these tests aren't dependent on any
other apps.
:return: XFormInstance
"""
user_id = user_id or 'mr_wednesday'
received_on = received_on or datetime.utcnow()
metadata = TestFormMetadata(
domain=self.DOMAIN,
received_on=received_on,
user_id=user_id
)
form = get_form_ready_to_save(metadata)
return form
def _create_case_transaction(self, case, form=None, submitted_on=None, action_types=None):
form = form or self._create_form()
submitted_on = submitted_on or datetime.utcnow()
return CaseTransaction.form_transaction(case, form, submitted_on, action_types)
def _create_case(self, case_type=None, user_id=None, case_id=None):
case_id = case_id or uuid.uuid4().hex
user_id = user_id or 'mr_wednesday'
utcnow = datetime.utcnow()
case = CommCareCase(
case_id=case_id,
domain=self.DOMAIN,
type=case_type or '',
owner_id=user_id,
opened_on=utcnow,
modified_on=utcnow,
modified_by=utcnow,
server_modified_on=utcnow
)
form = self._create_form(user_id, utcnow)
trans = self._create_case_transaction(case, form, utcnow, action_types=[128])
self._save(form, case, trans)
return CommCareCase.objects.get_case(case_id)
def _save(self, form, case, transaction):
# disable publish to Kafka to avoid intermittent errors caused by
# the nexus of kafka's consumer thread and freeze_time
with patch.object(FormProcessorSQL, "publish_changes_to_kafka"):
case.track_create(transaction)
FormProcessorSQL.save_processed_models(ProcessedForms(form, []), [case])
def _check_for_reconciliation_error_soft_assert(self, soft_assert_mock):
for call in soft_assert_mock.call_args_list:
self.assertNotIn('ReconciliationError', call[0][1])
soft_assert_mock.reset_mock()
def test_update_known_properties_with_empty_values():
def test(prop):
case = SqlCaseUpdateStrategy.case_implementation_class()
setattr(case, prop, "value")
action = CaseUpdateAction(block=None, **{prop: ""})
SqlCaseUpdateStrategy(case)._update_known_properties(action)
eq(getattr(case, prop), "")
# verify that at least one property will be tested
assert any(v is not None for v in KNOWN_PROPERTIES.values()), KNOWN_PROPERTIES
for prop, default in KNOWN_PROPERTIES.items():
if default is not None:
yield test, prop
| bsd-3-clause | a007ca9e9dabaf8ad01ace8656be3f3e | 39.121076 | 94 | 0.66268 | 3.741949 | false | true | false | false |
dimagi/commcare-hq | corehq/apps/hqcase/management/commands/initialize_es_indices.py | 1 | 2371 | from django.core.management import BaseCommand
from corehq.apps.es.registry import get_registry, registry_entry
from corehq.elastic import get_es_new
from pillowtop.reindexer.reindexer import (
prepare_index_for_reindex,
prepare_index_for_usage,
clean_index
)
class Command(BaseCommand):
help = """
Initialize all or any one specific elasticsearch indices
and prepare them for reindex
"""
def add_arguments(self, parser):
parser.add_argument(
'--index',
help='Specify any one index instead of the default all'
)
parser.add_argument(
'--reset',
action='store_true',
dest='reset',
default=False,
help='Delete existing indices before initializing.'
)
parser.add_argument(
'--set-for-usage',
action='store_true',
dest='set_for_usage',
default=False,
help="""Set usage settings on indices that are already initialized.
By default, reindex settings are applied when index is initialized"""
)
def handle(self, index=None, reset=False, set_for_usage=False, **kwargs):
es = get_es_new()
if reset and not set_for_usage:
confirm = input(
"""
Are you sure you want to want to delete existing indices
Note that this will delete any data in them. y/N?
"""
)
if confirm != "y":
print("Cancelled by user.")
return
if index:
indices = [registry_entry(index)]
else:
indices = get_registry().values()
for index in indices:
if set_for_usage:
prepare_index_for_usage(es, index)
else:
if reset:
clean_index(es, index)
prepare_index_for_reindex(es, index)
if set_for_usage:
print("index ready for usage")
else:
print(
"""Initialized all indices and applied reindex settings
After reindex is finished, you can run this command again
with --set-for-usage to remove reindex settings and make it
ready for usage.
"""
)
| bsd-3-clause | aa44597340eac817d2f7f7ddb89ca02e | 31.930556 | 86 | 0.539013 | 4.751503 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/hqmedia/tasks.py | 1 | 13739 | import itertools
import json
import os
import re
import tempfile
import zipfile
from wsgiref.util import FileWrapper
from django.conf import settings
from django.utils.translation import gettext as _
from celery.utils.log import get_task_logger
from dimagi.utils.logging import notify_exception
from soil import DownloadBase
from soil.util import expose_cached_download, expose_file_download
from corehq import toggles
from corehq.apps.app_manager.dbaccessors import get_app
from corehq.apps.celery import task
from corehq.apps.hqmedia.cache import BulkMultimediaStatusCache
from corehq.apps.hqmedia.models import CommCareMultimedia
from corehq.util.files import file_extention_from_filename
logging = get_task_logger(__name__)
MULTIMEDIA_EXTENSIONS = ('.mp3', '.wav', '.jpg', '.png', '.gif', '.3gp', '.mp4', '.zip', )
@task(serializer='pickle')
def process_bulk_upload_zip(processing_id, domain, app_id, username=None, share_media=False,
license_name=None, author=None, attribution_notes=None):
"""
Responsible for processing the uploaded zip from Bulk Upload.
"""
status = BulkMultimediaStatusCache.get(processing_id)
if not status:
# no download data available, abort
return
app = get_app(domain, app_id)
status.in_celery = True
status.save()
uploaded_zip = status.get_upload_zip()
if not uploaded_zip:
return
zipped_files = uploaded_zip.namelist()
status.total_files = len(zipped_files)
checked_paths = []
try:
save_app = False
for index, path in enumerate(zipped_files):
status.update_progress(len(checked_paths))
checked_paths.append(path)
file_name = os.path.basename(path)
try:
data = uploaded_zip.read(path)
except Exception as e:
status.add_unmatched_path(path, _("Error reading file: %s" % e))
continue
media_class = CommCareMultimedia.get_class_by_data(data, filename=path)
if not media_class:
status.add_skipped_path(path, CommCareMultimedia.get_mime_type(data))
continue
app_paths = list(app.get_all_paths_of_type(media_class.__name__))
app_paths_lower = [p.lower() for p in app_paths]
form_path = media_class.get_form_path(path, lowercase=True)
if not form_path in app_paths_lower:
status.add_unmatched_path(path,
_("Did not match any %s paths in application." % media_class.get_nice_name()))
continue
index_of_path = app_paths_lower.index(form_path)
form_path = app_paths[index_of_path] # this is the correct capitalization as specified in the form
multimedia = media_class.get_by_data(data)
if not multimedia:
status.add_unmatched_path(path,
_("Matching path found, but could not save the data to couch."))
continue
is_new = form_path not in app.multimedia_map
is_updated = multimedia.attach_data(data,
original_filename=file_name,
username=username)
if not is_updated and not getattr(multimedia, '_id'):
status.add_unmatched_path(form_path,
_("Matching path found, but didn't save new multimedia correctly."))
continue
if is_updated or is_new:
multimedia.add_domain(domain, owner=True)
if share_media:
multimedia.update_or_add_license(domain, type=license_name, author=author,
attribution_notes=attribution_notes)
save_app = True
app.create_mapping(multimedia, form_path, save=False)
media_info = multimedia.get_media_info(form_path, is_updated=is_updated, original_path=path)
status.add_matched_path(media_class, media_info)
if save_app:
app.save()
status.update_progress(len(checked_paths))
except Exception as e:
status.mark_with_error(_("Error while processing zip: %s" % e))
uploaded_zip.close()
status.complete = True
status.save()
@task(serializer='pickle')
def build_application_zip(include_multimedia_files, include_index_files, domain, app_id,
download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip",
download_targeted_version=False):
DownloadBase.set_progress(build_application_zip, 0, 100)
app = get_app(domain, app_id)
fpath = create_files_for_ccz(
app,
build_profile_id,
include_multimedia_files,
include_index_files,
download_id,
compress_zip,
filename,
download_targeted_version,
task=build_application_zip,
)
DownloadBase.set_progress(build_application_zip, 100, 100)
def _get_file_path(app, include_multimedia_files, include_index_files, build_profile_id,
download_targeted_version):
if settings.SHARED_DRIVE_CONF.transfer_enabled:
fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format(
app._id,
'mm' if include_multimedia_files else '',
'ccz' if include_index_files else '',
app.version,
build_profile_id
))
if download_targeted_version:
fpath += '-targeted'
else:
dummy, fpath = tempfile.mkstemp()
return fpath
def _build_ccz_files(build, build_profile_id, include_multimedia_files, include_index_files,
download_id, compress_zip, filename, download_targeted_version):
from corehq.apps.hqmedia.views import iter_app_files
files, errors, file_count = iter_app_files(
build, include_multimedia_files, include_index_files, build_profile_id,
download_targeted_version=download_targeted_version,
)
if toggles.CAUTIOUS_MULTIMEDIA.enabled(build.domain):
manifest = json.dumps({
'include_multimedia_files': include_multimedia_files,
'include_index_files': include_index_files,
'download_id': download_id,
'build_profile_id': build_profile_id,
'compress_zip': compress_zip,
'filename': filename,
'download_targeted_version': download_targeted_version,
}, indent=4)
manifest_filename = '{} - {} - v{} manifest.json'.format(
build.domain,
build.name,
build.version,
)
files = itertools.chain(files, [(manifest_filename, manifest)])
return files, errors, file_count
def _zip_files_for_ccz(fpath, files, current_progress, file_progress, file_count, compression, task):
file_cache = {}
with open(fpath, 'wb') as tmp:
with zipfile.ZipFile(tmp, "w", allowZip64=True) as z:
for path, data in files:
# don't compress multimedia files
extension = os.path.splitext(path)[1]
file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression
z.writestr(path, data, file_compression)
current_progress += file_progress / file_count
DownloadBase.set_progress(task, current_progress, 100)
if extension not in MULTIMEDIA_EXTENSIONS:
file_cache[path] = data
return file_cache
def create_files_for_ccz(build, build_profile_id, include_multimedia_files=True, include_index_files=True,
download_id=None, compress_zip=False, filename="commcare.zip",
download_targeted_version=False, task=None):
"""
:param task: celery task whose progress needs to be set when being run asynchronously by celery
:return: path to the ccz file
"""
compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED
current_progress = 10 # early on indicate something is happening
file_progress = 50.0 # arbitrarily say building files takes half the total time
DownloadBase.set_progress(task, current_progress, 100)
fpath = _get_file_path(build, include_multimedia_files, include_index_files, build_profile_id,
download_targeted_version)
# Don't rebuild the file if it is already there
if not (os.path.isfile(fpath) and settings.SHARED_DRIVE_CONF.transfer_enabled):
with build.timing_context("_build_ccz_files"):
files, errors, file_count = _build_ccz_files(
build, build_profile_id, include_multimedia_files, include_index_files,
download_id, compress_zip, filename, download_targeted_version
)
with build.timing_context("_zip_files_for_ccz"):
file_cache = _zip_files_for_ccz(fpath, files, current_progress, file_progress,
file_count, compression, task)
if include_index_files and toggles.LOCALE_ID_INTEGRITY.enabled(build.domain):
with build.timing_context("find_missing_locale_ids_in_ccz"):
locale_errors = find_missing_locale_ids_in_ccz(file_cache)
if locale_errors:
errors.extend(locale_errors)
notify_exception(
None,
message="CCZ missing locale ids from default/app_strings.txt",
details={'domain': build.domain, 'app_id': build.id, 'errors': locale_errors}
)
if include_index_files and include_multimedia_files:
with build.timing_context("check_ccz_multimedia_integrity"):
multimedia_errors = check_ccz_multimedia_integrity(build.domain, fpath)
if multimedia_errors:
multimedia_errors.insert(0, _(
"Please try syncing multimedia files in multimedia tab under app settings to resolve "
"issues with missing media files. Report an issue if this persists."
))
errors.extend(multimedia_errors)
if multimedia_errors:
notify_exception(
None,
message="CCZ missing multimedia files",
details={'domain': build.domain, 'app_id': build.id, 'errors': multimedia_errors}
)
if errors:
os.remove(fpath)
raise Exception('\t' + '\t'.join(errors))
else:
DownloadBase.set_progress(task, current_progress + file_progress, 100)
with build.timing_context("_expose_download_link"):
_expose_download_link(fpath, filename, compress_zip, download_id)
DownloadBase.set_progress(task, 100, 100)
return fpath
def _expose_download_link(fpath, filename, compress_zip, download_id):
common_kwargs = {
'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed',
'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename),
'download_id': download_id,
'expiry': (1 * 60 * 60),
}
if settings.SHARED_DRIVE_CONF.transfer_enabled:
expose_file_download(fpath, use_transfer=True, **common_kwargs)
else:
expose_cached_download(FileWrapper(open(fpath, 'rb')),
file_extension=file_extention_from_filename(filename),
**common_kwargs)
def find_missing_locale_ids_in_ccz(file_cache):
errors = [
_("Could not find {file_path} in CCZ").format(file_path=file_path)
for file_path in ('default/app_strings.txt', 'suite.xml') if file_path not in file_cache]
if errors:
return errors
# Each line of an app_strings.txt file is of the format "name.of.key=value of key"
# decode is necessary because Application._make_language_files calls .encode('utf-8')
app_strings_ids = {
line.decode("utf-8").split('=')[0]
for line in file_cache['default/app_strings.txt'].splitlines()
}
from corehq.apps.app_manager.xform import parse_xml
parsed = parse_xml(file_cache['suite.xml'])
suite_ids = {locale.get("id") for locale in parsed.iter("locale")}
return [
_("Locale ID {id} present in suite.xml but not in default app strings.").format(id=id)
for id in (suite_ids - app_strings_ids) if id
]
# Check that all media files present in media_suite.xml were added to the zip
def check_ccz_multimedia_integrity(domain, fpath):
errors = []
with open(fpath, 'rb') as tmp:
with zipfile.ZipFile(tmp, "r") as z:
media_suites = [f for f in z.namelist() if re.search(r'\bmedia_suite.xml\b', f)]
if len(media_suites) != 1:
message = _('Could not find media_suite.xml in CCZ')
errors.append(message)
else:
with z.open(media_suites[0]) as media_suite:
from corehq.apps.app_manager.xform import parse_xml
parsed = parse_xml(media_suite.read())
resources = {node.text for node in
parsed.findall("media/resource/location[@authority='local']")}
names = z.namelist()
missing = [r for r in resources if re.sub(r'^\.\/', '', r) not in names]
errors += [_('Media file missing from CCZ: {}').format(r) for r in missing]
return errors
| bsd-3-clause | 8002c05451815776704668411ff9fe81 | 41.015291 | 120 | 0.60412 | 3.954807 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/utils/tests/test_functional.py | 1 | 1968 | from django.urls import reverse
from django.contrib.auth.models import Group
from django.template.response import TemplateResponse
from django.http.response import HttpResponseForbidden
from django.test.client import RequestFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
class AdminPermissionsTest(BluebottleTestCase):
def setUp(self):
self.init_projects()
# Create staff user without superuser permission
self.user = BlueBottleUserFactory.create(password='testing')
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
# Add 'Staff' group for user
self.user.groups.add(Group.objects.get(name='Staff'))
self.user.save()
# Login user
self.client.login(
request=RequestFactory().post('/'), email=self.user.email, password='testing'
)
def tearDown(self):
self.client.logout()
self.user.delete()
def test_staff_forbidden_access(self):
response = self.client.get(reverse('admin:auth_group_changelist'))
self.assertIsInstance(response, HttpResponseForbidden)
def test_staff_create_initiative(self):
response = self.client.get(reverse('admin:initiatives_initiative_add'))
self.assertIsInstance(response, TemplateResponse)
def test_superuser_access(self):
self.client.logout()
# Create staff user without superuser permission
self.user = BlueBottleUserFactory.create(password='testing')
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
self.assertTrue(
self.client.login(request=RequestFactory().post('/'), email=self.user.email, password='testing'))
response = self.client.get(reverse('admin:auth_group_changelist'))
self.assertIsInstance(response, TemplateResponse)
| bsd-3-clause | 00b8a08c8a1493203c2bae21236c3c6f | 33.526316 | 109 | 0.694614 | 4.353982 | false | true | false | false |
onepercentclub/bluebottle | bluebottle/funding_pledge/models.py | 1 | 2278 | from __future__ import absolute_import
from builtins import object
from django.db import models
from django.utils.translation import gettext_lazy as _
from bluebottle.funding.models import Payment, PaymentProvider, BankAccount
class PledgePayment(Payment):
provider = 'pledge'
def refund(self):
pass
class PledgePaymentProvider(PaymentProvider):
title = 'Pledges only'
@property
def payment_methods(self):
return []
class Meta(object):
verbose_name = 'Pledge payment provider'
class PledgeBankAccount(BankAccount):
provider_class = PledgePaymentProvider
account_holder_name = models.CharField(
_("Account holder name"), max_length=100, null=True, blank=True)
account_holder_address = models.CharField(
_("Account holder address"), max_length=255, null=True, blank=True)
account_holder_postal_code = models.CharField(
_("Account holder postal code"), max_length=20, null=True, blank=True)
account_holder_city = models.CharField(
_("Account holder city"), max_length=255, null=True, blank=True)
account_holder_country = models.ForeignKey(
'geo.Country',
verbose_name=_('Account holder country'),
blank=True, null=True,
related_name="pledge_account_holder_country",
on_delete=models.CASCADE
)
account_number = models.CharField(
_("Account number"),
max_length=255, null=True, blank=True)
account_details = models.CharField(
_("Account details"),
max_length=500, null=True, blank=True)
account_bank_country = models.ForeignKey(
'geo.Country',
verbose_name=_('Account bank country'),
blank=True, null=True,
related_name="pledge_account_bank_country",
on_delete=models.CASCADE
)
def save(self, *args, **kwargs):
super(PledgeBankAccount, self).save(*args, **kwargs)
class Meta(object):
verbose_name = _('Pledge bank account')
verbose_name_plural = _('Pledge bank accounts')
class JSONAPIMeta(object):
resource_name = 'payout-accounts/pledge-external-accounts'
def __str__(self):
return u"Pledge bank account {}".format(self.account_holder_name)
from .states import * # noqa
| bsd-3-clause | b00ede0eb00bf9cfd2390845a0f6da65 | 29.783784 | 78 | 0.662862 | 3.975567 | false | false | false | false |
dimagi/commcare-hq | corehq/messaging/smsbackends/apposit/views.py | 1 | 1148 | import json
from corehq.apps.sms.api import incoming
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.apposit.models import SQLAppositBackend
from django.http import HttpResponse, HttpResponseBadRequest
class AppositIncomingView(IncomingBackendView):
urlname = 'apposit_incoming'
@property
def backend_class(self):
return SQLAppositBackend
def post(self, request, api_key, *args, **kwargs):
try:
data = json.loads(request.body.decode('utf-8'))
except:
return HttpResponseBadRequest("Expected valid JSON as HTTP request body")
from_number = data.get('from')
message = data.get('message')
message_id = data.get('messageId')
if not from_number or not message:
return HttpResponseBadRequest("Missing 'from' or 'message'")
incoming(
from_number,
message,
SQLAppositBackend.get_api_id(),
backend_message_id=message_id,
domain_scope=self.domain,
backend_id=self.backend_couch_id
)
return HttpResponse("")
| bsd-3-clause | bd250eabc96877e6a9c8ede4aa6b1788 | 30.888889 | 85 | 0.651568 | 4.236162 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/api/resources/messaging_event/pagination.py | 1 | 2685 | import logging
from base64 import b64encode
from urllib.parse import urlencode
from corehq.apps.api.resources.messaging_event.serializers import serialize_event
from corehq.apps.api.resources.messaging_event.utils import get_limit_offset
from corehq.util import reverse
LAST_OBJECT_ID = "last_object_id"
logger = logging.getLogger(__name__)
def get_paged_data(query, request_params):
limit = get_limit_offset("limit", request_params, 20, max_value=1000)
objects = _get_objects(query, request_params, limit)
return {
"objects": [serialize_event(event) for event in objects],
"meta": {
"limit": limit,
"next": _get_cursor(objects, request_params)
}
}
def _get_objects(query, request_params, limit):
"""Get the list of objects to return. For pages > 1 this will
skip the first object in the page if it was present in the previous
page (as encoded in the cursor)"""
if request_params.is_cursor:
objects = list(query[:limit + 1])
last_id = request_params.get(LAST_OBJECT_ID)
try:
last_id = int(last_id)
except ValueError:
logger.debug("invalid last_object_id: '{last_id}")
else:
if objects[0].id == last_id:
logger.debug(f"Skipping first object in API response: {last_id}")
return objects[1:] # remove the first object since it was in the last page
logger.debug("Dropping last object in API response to keep page size consistent")
return objects[:-1]
logger.debug("no cursor, returning normal page")
return list(query[:limit])
def _get_cursor(objects, request_params):
"""Generate the 'cursor' parameter which includes all query params from the current
request excluding 'limit' as well as:
- filter parameter for the next page e.g. date.gte = last date
- ID of the last object in the current page to allow skipping it in the next page
"""
if not objects:
return None
ascending_order = True
if "order_by" in request_params:
if request_params["order_by"].startswith("-"):
ascending_order = False
last_object = objects[-1]
filter_param = "date.gte" if ascending_order else "date.lte"
cursor_params = request_params.params.copy()
cursor_params.update({
filter_param: last_object.date.isoformat(),
LAST_OBJECT_ID: str(last_object.id)
})
encoded_params = urlencode(cursor_params)
next_params = {'cursor': b64encode(encoded_params.encode('utf-8'))}
return reverse('api_messaging_event_list', args=[request_params.domain], params=next_params, absolute=True)
| bsd-3-clause | e1ee8908b0f797268471f5062c78ed9d | 35.780822 | 111 | 0.65959 | 3.880058 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/userreports/transforms/custom/date.py | 1 | 2177 | import calendar
from datetime import date, datetime
from ethiopian_date import EthiopianDateConverter
from dimagi.utils.dates import force_to_datetime
def get_month_display(month_index):
try:
return calendar.month_name[int(month_index)]
except (KeyError, ValueError, TypeError):
return ""
def days_elapsed_from_date(date):
date = force_to_datetime(date)
now = datetime.utcnow()
return (now - date).days
def split_date_string(date_string):
'''
Takes a date string and splits it up into its component year, month, and day
:param date_string: A date string that is in the format YYYY-MM-DD
:return: a tuple containing (year, month, day)
'''
year, month, day = date_string.split('-')
year, month, day = int(year), int(month), int(day)
return year, month, day
def get_ethiopian_to_gregorian(date_string):
'''
Takes a string ethiopian date and converts it to
the equivalent gregorian date
:param date_string: A date string that is in the format YYYY-MM-DD
:returns: A gregorian datetime or ''
'''
if not date_string:
return ''
if not isinstance(date_string, str):
return ''
try:
year, month, day = split_date_string(date_string)
except ValueError:
return ''
try:
return EthiopianDateConverter.to_gregorian(year, month, day)
except Exception:
return ''
def get_gregorian_to_ethiopian(date_input):
'''
Takes a gregorian date string or datetime and converts it to
the equivalent ethiopian date
:param date_input: A datetime or date string
:returns: An ethiopian date string or ''
'''
if not date_input:
return ''
try:
if isinstance(date_input, date):
date_input = date_input.strftime('%Y-%m-%d')
year, month, day = split_date_string(date_input)
except ValueError:
return ''
try:
ethiopian_year, ethiopian_month, ethiopian_day = EthiopianDateConverter.to_ethiopian(year, month, day)
return '{}-{:02d}-{:02d}'.format(ethiopian_year, ethiopian_month, ethiopian_day)
except Exception:
return ''
| bsd-3-clause | 90f2948d77596af7b33128f4cba3fdb5 | 26.2125 | 110 | 0.651355 | 3.534091 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/app_manager/suite_xml/sections/menus.py | 1 | 11596 | """
MenuContributor
---------------
Menus *approximately* correspond to HQ modules.
Menus *almost* correspond to command lists, the screens in CommCare that ask the user to select a form or sub-menu.
However, if the suite contains multiple ``<menu>`` elements with the same ``id``, they will be concatenated and
displayed as a single screen.
Menu ids will typically map to the module's position in the application: the first menu is ``m0``, second is
``m1``, etc.
Highlights of menu configuration:
* Display conditions, which become ``relevant`` attributes
* Display-only forms, which becomes the ``put_in_root`` attribute
* Grid style, to determine whether the command list should be displayed as a flat list or as a grid that
emphasizes the menu icons
"""
from memoized import memoized
from corehq.apps.app_manager import id_strings
from corehq.apps.app_manager.exceptions import (
CaseXPathValidationError,
ScheduleError,
UsercaseXPathValidationError,
)
from corehq.apps.app_manager.suite_xml.contributors import (
SuiteContributorByModule,
)
from corehq.apps.app_manager.suite_xml.utils import get_module_locale_id
from corehq.apps.app_manager.suite_xml.xml_models import (
Command,
LocalizedMenu,
Menu,
)
from corehq.apps.app_manager.util import (
is_usercase_in_use,
xpath_references_case,
xpath_references_usercase,
)
from corehq.apps.app_manager.xpath import (
CaseIDXPath,
QualifiedScheduleFormXPath,
XPath,
interpolate_xpath,
session_var,
)
from corehq.util.timer import time_method
class MenuContributor(SuiteContributorByModule):
@time_method()
def get_module_contributions(self, module, training_menu):
menus = []
if hasattr(module, 'get_menus'):
for menu in module.get_menus(build_profile_id=self.build_profile_id):
menus.append(menu)
else:
module_is_source_for_v1_shadow = any(
m for m in self._v1_shadow_modules()
if (m.source_module_id == module.unique_id)
or (getattr(module, 'root_module', False)
and m.source_module_id == module.root_module.unique_id)
)
module_is_v1_shadow = getattr(module, 'shadow_module_version', 0) == 1
if module_is_v1_shadow or module_is_source_for_v1_shadow:
for v1_shadow_menu in self._generate_v1_shadow_menus(module, training_menu):
menus.append(v1_shadow_menu)
else:
root_module = None
if not module.put_in_root:
if module.root_module:
root_module = module.root_module
elif module.module_type == 'shadow' and module.source_module.root_module:
root_module = module.source_module.root_module
menu = self._generate_menu(module, root_module, training_menu, module)
if len(menu.commands):
menus.append(menu)
if self.app.grid_display_for_all_modules():
self._give_non_root_menus_grid_style(menus)
elif self.app.grid_display_for_some_modules():
if hasattr(module, 'grid_display_style') and module.grid_display_style():
self._give_non_root_menus_grid_style(menus)
if self.app.use_grid_menus:
self._give_root_menu_grid_style(menus)
return menus
@memoized
def _v1_shadow_modules(self):
return [
m for m in self.app.get_modules()
if m.module_type == 'shadow'
and m.shadow_module_version == 1
and m.source_module_id
]
def _generate_v1_shadow_menus(self, module, training_menu):
# V1 shadow modules create a 'fake' module for any child shadow menus
# These child shadow modules don't have a representation in the DB, but
# are needed in the suite to add in all the child forms.
# This behaviour has been superceded by v2 shadow modules.
id_modules = [module] # the current module and all of its shadows
root_modules = [] # the current module's parent and all of that parent's shadows
if not module.put_in_root and module.root_module:
root_modules.append(module.root_module)
for shadow in self._v1_shadow_modules():
if module.root_module.unique_id == shadow.source_module_id:
root_modules.append(shadow)
else:
root_modules.append(None)
if module.put_in_root and module.root_module:
for shadow in self._v1_shadow_modules():
if module.root_module.unique_id == shadow.source_module_id:
id_modules.append(shadow)
for id_module in id_modules:
for root_module in root_modules:
menu = self._generate_menu(module, root_module, training_menu, id_module)
if len(menu.commands):
yield menu
def _generate_menu(self, module, root_module, training_menu, id_module):
# In general, `id_module` and `module` will be the same thing.
# In the case of v1 shadow menus, `id_module` is either the current module or one of that module's shadows
# For more information, see the note in `_generate_v1_shadow_menus`.
from corehq.apps.app_manager.models import ShadowModule
menu_kwargs = {}
suffix = ""
if id_module.is_training_module:
menu_kwargs.update({'root': 'training-root'})
elif root_module:
menu_kwargs.update({'root': id_strings.menu_id(root_module)})
suffix = id_strings.menu_id(root_module) if isinstance(root_module, ShadowModule) else ""
menu_kwargs.update({'id': id_strings.menu_id(id_module, suffix)})
# Determine relevancy
if self.app.enable_module_filtering:
relevancy = id_module.module_filter
# If module has a parent, incorporate the parent's relevancy.
# This is only necessary when the child uses display only forms.
if id_module.put_in_root and id_module.root_module and id_module.root_module.module_filter:
if relevancy:
relevancy = str(XPath.and_(XPath(relevancy).paren(force=True),
XPath(id_module.root_module.module_filter).paren(force=True)))
else:
relevancy = id_module.root_module.module_filter
if relevancy:
menu_kwargs['relevant'] = interpolate_xpath(relevancy)
if self.app.enable_localized_menu_media:
module_custom_icon = module.custom_icon
menu_kwargs.update({
'menu_locale_id': get_module_locale_id(module),
'media_image': module.uses_image(build_profile_id=self.build_profile_id),
'media_audio': module.uses_audio(build_profile_id=self.build_profile_id),
'image_locale_id': id_strings.module_icon_locale(module),
'audio_locale_id': id_strings.module_audio_locale(module),
'custom_icon_locale_id': (
id_strings.module_custom_icon_locale(module, module_custom_icon.form)
if module_custom_icon and not module_custom_icon.xpath else None),
'custom_icon_form': (module_custom_icon.form if module_custom_icon else None),
'custom_icon_xpath': (module_custom_icon.xpath
if module_custom_icon and module_custom_icon.xpath else None),
})
menu = LocalizedMenu(**menu_kwargs)
else:
menu_kwargs.update({
'media_image': module.default_media_image,
'media_audio': module.default_media_audio,
'locale_id': get_module_locale_id(module),
})
menu = Menu(**menu_kwargs)
excluded_form_ids = []
if root_module and isinstance(root_module, ShadowModule):
excluded_form_ids = root_module.excluded_form_ids
if id_module and isinstance(id_module, ShadowModule):
excluded_form_ids = id_module.excluded_form_ids
commands = self._get_commands(excluded_form_ids, module)
if module.is_training_module and module.put_in_root and training_menu:
training_menu.commands.extend(commands)
else:
menu.commands.extend(commands)
return menu
@staticmethod
def _schedule_filter_conditions(form, module, case):
phase = form.get_phase()
try:
form_xpath = QualifiedScheduleFormXPath(form, phase, module, case_xpath=case)
relevant = form_xpath.filter_condition(phase.id)
except ScheduleError:
relevant = None
return relevant
@staticmethod
def _give_non_root_menus_grid_style(menus):
for menu in menus:
if not menu.id == id_strings.ROOT:
menu.style = "grid"
@staticmethod
def _give_root_menu_grid_style(menus):
for menu in menus:
if menu.id == id_strings.ROOT:
menu.style = "grid"
@staticmethod
def _give_all_menus_grid_style(menus):
for menu in menus:
menu.style = "grid"
def _get_commands(self, excluded_form_ids, module):
@memoized
def module_uses_case():
return module.all_forms_require_a_case()
@memoized
def domain_uses_usercase():
return is_usercase_in_use(self.app.domain)
for form in module.get_suite_forms():
if form.unique_id in excluded_form_ids:
continue
command = Command(id=id_strings.form_command(form, module))
if form.requires_case():
var_name = self.entries_helper.get_case_session_var_for_form(form)
case = CaseIDXPath(session_var(var_name)).case()
else:
case = None
if getattr(form, 'form_filter', None):
fixture_xpath = (
session_var(id_strings.fixture_session_var(module)) if module.fixture_select.active
else None
)
interpolated_xpath = interpolate_xpath(form.form_filter, case, fixture_xpath,
module=module, form=form)
if xpath_references_case(interpolated_xpath) and \
(not module_uses_case() or
module.put_in_root and not module.root_requires_same_case()):
raise CaseXPathValidationError(module=module, form=form)
if xpath_references_usercase(interpolated_xpath) and not domain_uses_usercase():
raise UsercaseXPathValidationError(module=module, form=form)
command.relevant = interpolated_xpath
if getattr(module, 'has_schedule', False) and module.all_forms_require_a_case():
# If there is a schedule and another filter condition, disregard it...
# Other forms of filtering are disabled in the UI
schedule_filter_condition = MenuContributor._schedule_filter_conditions(form, module, case)
if schedule_filter_condition is not None:
command.relevant = schedule_filter_condition
yield command
if hasattr(module, 'case_list') and module.case_list.show:
yield Command(id=id_strings.case_list_command(module))
| bsd-3-clause | b58567d381dc918ad671098fbe119d40 | 41.47619 | 115 | 0.605985 | 4.034795 | false | false | false | false |
dimagi/commcare-hq | corehq/ex-submodules/pillowtop/management/commands/update_es_settings.py | 1 | 2244 | from django.core.management.base import BaseCommand, CommandError
from corehq.elastic import get_es_new
from corehq.pillows.utils import get_all_expected_es_indices
class Command(BaseCommand):
help = "Update dynamic settings for existing elasticsearch indices."
def add_arguments(self, parser):
parser.add_argument(
'--noinput',
action='store_true',
dest='noinput',
default=False,
help='Skip important confirmation warnings.'
)
def handle(self, **options):
noinput = options.pop('noinput')
es_indices = list(get_all_expected_es_indices())
to_update = []
es = get_es_new()
for index_info in es_indices:
old_settings = es.indices.get_settings(index=index_info.index)
old_number_of_replicas = int(
old_settings[index_info.index]['settings']['index']['number_of_replicas']
)
new_number_of_replicas = index_info.meta['settings']['number_of_replicas']
if old_number_of_replicas != new_number_of_replicas:
print("{} [{}]:\n Number of replicas changing from {!r} to {!r}".format(
index_info.alias, index_info.index, old_number_of_replicas, new_number_of_replicas))
to_update.append((index_info, {
'number_of_replicas': new_number_of_replicas,
}))
if not to_update:
print("There is nothing to update.")
return
if (noinput or _confirm(
"Confirm that you want to update all the settings above?")):
for index_info, settings in to_update:
mapping_res = es.indices.put_settings(index=index_info.index, body=settings)
if mapping_res.get('acknowledged', False):
print("{} [{}]:\n Index settings successfully updated".format(
index_info.alias, index_info.index))
else:
print(mapping_res)
def _confirm(message):
if input(
'{} [y/n]'.format(message)
).lower() == 'y':
return True
else:
raise CommandError('abort')
| bsd-3-clause | 353099dd486c7aa7d9c6d744af1bce42 | 36.4 | 104 | 0.56016 | 4.218045 | false | false | false | false |
dimagi/commcare-hq | corehq/form_processor/migrations/0060_convert_case_ids_to_foreign_keys.py | 1 | 2783 | from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('form_processor', '0059_remove_ledgervalue_location_id'),
]
operations = [
# The other way to convert a field to a foreign key involves creating a new column
# doing a data migration and then dropping the old column which seemed ridiculous to me
migrations.RunSQL(
'ALTER TABLE "form_processor_ledgervalue" '
'ADD CONSTRAINT "cd40c15ceaad5d793e09d0b69eb4ed88" FOREIGN KEY ("case_id") '
'REFERENCES "form_processor_commcarecasesql" ("case_id") DEFERRABLE INITIALLY DEFERRED',
"ALTER TABLE form_processor_ledgervalue DROP CONSTRAINT cd40c15ceaad5d793e09d0b69eb4ed88;",
state_operations=[
migrations.RemoveField(
model_name='ledgervalue',
name='case_id',
),
migrations.AddField(
model_name='ledgervalue',
name='case',
field=models.ForeignKey(to='form_processor.CommCareCaseSQL',
to_field='case_id', db_index=False,
on_delete=models.CASCADE),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='ledgervalue',
unique_together=set([('case', 'section_id', 'entry_id')]),
),
]
),
migrations.RunSQL(
'ALTER TABLE "form_processor_ledgertransaction" '
'ADD CONSTRAINT "D35e6052ba235dcd116c9c37ba096e19" FOREIGN KEY ("case_id") '
'REFERENCES "form_processor_commcarecasesql" ("case_id") DEFERRABLE INITIALLY DEFERRED;',
"ALTER TABLE form_processor_ledgertransaction DROP CONSTRAINT D35e6052ba235dcd116c9c37ba096e19;",
state_operations=[
migrations.AddField(
model_name='ledgertransaction',
name='case',
field=models.ForeignKey(default='__none__', to='form_processor.CommCareCaseSQL',
to_field='case_id', db_index=False,
on_delete=models.CASCADE),
preserve_default=False,
),
migrations.AlterIndexTogether(
name='ledgertransaction',
index_together=set([('case', 'section_id', 'entry_id')]),
),
migrations.RemoveField(
model_name='ledgertransaction',
name='case_id',
),
]
),
]
| bsd-3-clause | fd2e3dd295aaae6104fdc15bb1ba6dd6 | 43.174603 | 109 | 0.521739 | 4.669463 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/export/models/incremental.py | 1 | 5873 | from uuid import uuid4
from django.db import models
from couchexport.models import Format
from corehq.apps.export.dbaccessors import get_properly_wrapped_export_instance
from corehq.apps.export.export import (
ExportFile,
get_export_query,
get_export_writer,
write_export_instance,
)
from corehq.apps.export.filters import ServerModifiedOnRangeFilter
from corehq.blobs import CODES, get_blob_db
from corehq.motech.models import RequestLog
from corehq.util.files import TransientTempfile
from corehq.util.metrics import metrics_track_errors
class IncrementalExport(models.Model):
domain = models.CharField(max_length=100)
name = models.CharField(max_length=255)
export_instance_id = models.CharField(max_length=126, db_index=True)
connection_settings = models.ForeignKey('motech.ConnectionSettings', on_delete=models.CASCADE)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
active = models.BooleanField(default=True)
def checkpoint(self, doc_count, last_doc_date):
return IncrementalExportCheckpoint.objects.create(
incremental_export=self,
doc_count=doc_count,
last_doc_date=last_doc_date
)
@property
def export_instance(self):
return get_properly_wrapped_export_instance(self.export_instance_id)
@property
def last_valid_checkpoint(self):
return self.checkpoints.filter(status=IncrementalExportStatus.SUCCESS).order_by('-date_created').first()
class IncrementalExportStatus(object):
SUCCESS = 1
FAILURE = 2
CHOICES = (
(SUCCESS, "success"),
(FAILURE, "failure"),
)
@staticmethod
def from_log_entry(entry):
if entry.response_status in (200, 201):
return IncrementalExportStatus.SUCCESS
else:
return IncrementalExportStatus.FAILURE
class IncrementalExportCheckpoint(models.Model):
incremental_export = models.ForeignKey(IncrementalExport, related_name='checkpoints', on_delete=models.CASCADE)
date_created = models.DateTimeField(auto_now_add=True)
doc_count = models.IntegerField(null=True)
last_doc_date = models.DateTimeField()
blob_key = models.UUIDField(default=uuid4)
status = models.PositiveSmallIntegerField(choices=IncrementalExportStatus.CHOICES, null=True)
request_log = models.ForeignKey(RequestLog, on_delete=models.CASCADE, null=True)
@property
def blob_parent_id(self):
return str(self.id)
def get_blob(self):
db = get_blob_db()
return db.get(key=str(self.blob_key), type_code=CODES.data_export)
def blob_exists(self):
db = get_blob_db()
return db.exists(key=str(self.blob_key))
def log_request(self, log_level, log_entry):
log = RequestLog.log(log_level, log_entry)
self.status = IncrementalExportStatus.from_log_entry(log_entry)
self.request_log = log
self.save()
@property
def filename(self):
date_suffix = self.date_created.replace(microsecond=0).strftime('%Y-%m-%d-%H-%M-%S')
return f'{self.incremental_export.name}-{date_suffix}.csv'
def generate_and_send_incremental_export(incremental_export, from_date):
checkpoint = _generate_incremental_export(incremental_export, from_date)
if checkpoint:
_send_incremental_export(incremental_export, checkpoint)
return checkpoint
def _generate_incremental_export(incremental_export, last_doc_date=None):
export_instance = incremental_export.export_instance
export_instance.export_format = Format.UNZIPPED_CSV # force to unzipped CSV
# Remove the date period from the ExportInstance, since this is added automatically by Daily Saved exports
export_instance.filters.date_period = None
filters = export_instance.get_filters()
if last_doc_date:
filters.append(ServerModifiedOnRangeFilter(gt=last_doc_date))
class LastDocTracker:
def __init__(self, doc_iterator):
self.doc_iterator = doc_iterator
self.last_doc = None
self.doc_count = 0
def __iter__(self):
for doc in self.doc_iterator:
self.last_doc = doc
self.doc_count += 1
yield doc
with TransientTempfile() as temp_path, metrics_track_errors('generate_incremental_exports'):
writer = get_export_writer([export_instance], temp_path, allow_pagination=False)
with writer.open([export_instance]):
query = get_export_query(export_instance, filters)
query = query.sort('server_modified_on') # reset sort to this instead of opened_on
docs = LastDocTracker(query.run().hits)
write_export_instance(writer, export_instance, docs)
export_file = ExportFile(writer.path, writer.format)
if docs.doc_count <= 0:
return
new_checkpoint = incremental_export.checkpoint(
docs.doc_count, docs.last_doc.get('server_modified_on')
)
with export_file as file_:
db = get_blob_db()
db.put(
file_,
domain=incremental_export.domain,
parent_id=new_checkpoint.blob_parent_id,
type_code=CODES.data_export,
key=str(new_checkpoint.blob_key),
timeout=24 * 60
)
return new_checkpoint
def _send_incremental_export(export, checkpoint):
requests = _get_requests(checkpoint, export)
headers = {
'Accept': 'application/json'
}
files = {'file': (checkpoint.filename, checkpoint.get_blob(), 'text/csv')}
requests.post(endpoint='', files=files, headers=headers)
def _get_requests(checkpoint, export):
return export.connection_settings.get_requests(checkpoint.id, checkpoint.log_request)
| bsd-3-clause | 67fc8b3fc47bd4a138bafb63ec6dd877 | 34.593939 | 115 | 0.674442 | 3.92318 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/initiatives/migrations/0018_auto_20191106_0928.py | 1 | 2363 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-06 08:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('initiatives', '0017_auto_20191031_1439'),
]
operations = [
migrations.AlterModelOptions(
name='initiativeplatformsettings',
options={'verbose_name': 'initiative settings', 'verbose_name_plural': 'initiative settings'},
),
migrations.AddField(
model_name='initiativeplatformsettings',
name='contact_method',
field=models.CharField(choices=[(b'mail', 'E-mail'), (b'phone', 'Phone')], default=b'mail', max_length=100),
),
migrations.AlterField(
model_name='initiative',
name='location',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='geo.Location', verbose_name='Office location'),
),
migrations.AlterField(
model_name='initiative',
name='place',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='geo.Geolocation', verbose_name='Impact location'),
),
migrations.AlterField(
model_name='initiative',
name='promoter',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='promoter_initiatives', to=settings.AUTH_USER_MODEL, verbose_name='promoter'),
),
migrations.AlterField(
model_name='initiativeplatformsettings',
name='activity_types',
field=multiselectfield.db.fields.MultiSelectField(choices=[(b'funding', 'Funding'), (b'event', 'Events'), (b'assignment', 'Assignment')], max_length=100),
),
migrations.AlterField(
model_name='initiativeplatformsettings',
name='search_filters',
field=multiselectfield.db.fields.MultiSelectField(choices=[(b'location', 'Location'), (b'date', 'Date'), (b'skill', 'Skill'), (b'type', 'Type'), (b'theme', 'Theme'), (b'category', 'Category'), (b'status', 'Status')], max_length=1000),
),
]
| bsd-3-clause | 4f7c842ffab273a02550a8d86ea1fb17 | 44.442308 | 246 | 0.629285 | 3.938333 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/sms/migrations/0003_add_backend_models.py | 1 | 7399 | from django.db import migrations, models
import jsonfield.fields
import dimagi.utils.couch.migration
class Migration(migrations.Migration):
dependencies = [
('sms', '0002_add_selfregistrationinvitation'),
]
operations = [
migrations.CreateModel(
name='SQLMobileBackend',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('couch_id', models.CharField(max_length=126, null=True, db_index=True)),
('backend_type', models.CharField(default='SMS', max_length=3, choices=[('SMS', 'SMS'), ('IVR', 'IVR')])),
('inbound_api_key', models.UUIDField(unique=True, max_length=32, editable=False, blank=True)),
('hq_api_id', models.CharField(max_length=126, null=True)),
('is_global', models.BooleanField(default=False)),
('domain', models.CharField(max_length=126, null=True, db_index=True)),
('name', models.CharField(max_length=126)),
('display_name', models.CharField(max_length=126, null=True)),
('description', models.TextField(null=True)),
('supported_countries', jsonfield.fields.JSONField(default=list)),
('extra_fields', jsonfield.fields.JSONField(default=dict)),
('deleted', models.BooleanField(default=False)),
('load_balancing_numbers', jsonfield.fields.JSONField(default=list)),
('reply_to_phone_number', models.CharField(max_length=126, null=True)),
],
options={
'db_table': 'messaging_mobilebackend',
},
bases=(dimagi.utils.couch.migration.SyncSQLToCouchMixin, models.Model),
),
migrations.CreateModel(
name='SQLMobileBackendMapping',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('couch_id', models.CharField(max_length=126, null=True, db_index=True)),
('is_global', models.BooleanField(default=False)),
('domain', models.CharField(max_length=126, null=True)),
('backend_type', models.CharField(max_length=3, choices=[('SMS', 'SMS'), ('IVR', 'IVR')])),
('prefix', models.CharField(max_length=25)),
('backend', models.ForeignKey(to='sms.SQLMobileBackend', on_delete=models.CASCADE)),
],
options={
'db_table': 'messaging_mobilebackendmapping',
},
bases=(dimagi.utils.couch.migration.SyncSQLToCouchMixin, models.Model),
),
migrations.CreateModel(
name='MobileBackendInvitation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(max_length=126, null=True, db_index=True)),
('backend', models.ForeignKey(to='sms.SQLMobileBackend', on_delete=models.CASCADE)),
('accepted', models.BooleanField(default=False)),
],
options={
'db_table': 'messaging_mobilebackendinvitation',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='sqlmobilebackend',
unique_together=set([('domain', 'name')]),
),
migrations.AlterUniqueTogether(
name='mobilebackendinvitation',
unique_together=set([('backend', 'domain')]),
),
migrations.CreateModel(
name='SQLSMSBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlmobilebackend',),
),
migrations.CreateModel(
name='SQLMegamobileBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLMachBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLHttpBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLSislogBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlhttpbackend',),
),
migrations.CreateModel(
name='SQLGrapevineBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLAppositBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLSMSGHBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLTelerivetBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLTestSMSBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLTropoBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLTwilioBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLUnicelBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlsmsbackend',),
),
migrations.CreateModel(
name='SQLYoBackend',
fields=[
],
options={
'proxy': True,
},
bases=('sms.sqlhttpbackend',),
),
migrations.AlterField(
model_name='messagingevent',
name='recipient_type',
field=models.CharField(db_index=True, max_length=3, null=True, choices=[('CAS', 'Case'), ('MOB', 'Mobile Worker'), ('WEB', 'Web User'), ('UGP', 'User Group'), ('CGP', 'Case Group'), ('MUL', 'Multiple Recipients'), ('LOC', 'Location'), ('LC+', 'Location (including child locations)'), ('VLC', 'Multiple Locations'), ('VL+', 'Multiple Locations (including child locations)'), ('UNK', 'Unknown Contact')]),
preserve_default=True,
),
]
| bsd-3-clause | 01d07c72502026b18ec236f77f5f2b48 | 34.572115 | 415 | 0.487093 | 4.650534 | false | false | false | false |
dimagi/commcare-hq | corehq/form_processor/models/util.py | 1 | 1551 | from collections import namedtuple
from functools import lru_cache
from itertools import groupby
from corehq.util.metrics import metrics_counter
def fetchall_as_namedtuple(cursor):
"Return all rows from a cursor as a namedtuple generator"
Result = _get_result_tuple(tuple(col[0] for col in cursor.description))
return (Result(*row) for row in cursor)
@lru_cache
def _get_result_tuple(names):
metrics_counter("commcare.lru.result_tuple.cachemiss")
return namedtuple('Result', names)
def sort_with_id_list(object_list, id_list, id_property):
"""Sort object list in the same order as given list of ids
SQL does not necessarily return the rows in any particular order so
we need to order them ourselves.
NOTE: this does not return the sorted list. It sorts `object_list`
in place using Python's built-in `list.sort`.
"""
def key(obj):
return index_map[getattr(obj, id_property)]
index_map = {id_: index for index, id_ in enumerate(id_list)}
object_list.sort(key=key)
def attach_prefetch_models(objects_by_id, prefetched_models, link_field_name, cached_attrib_name):
prefetched_groups = groupby(prefetched_models, lambda x: getattr(x, link_field_name))
seen = set()
for obj_id, group in prefetched_groups:
seen.add(obj_id)
obj = objects_by_id[obj_id]
setattr(obj, cached_attrib_name, list(group))
unseen = set(objects_by_id) - seen
for obj_id in unseen:
obj = objects_by_id[obj_id]
setattr(obj, cached_attrib_name, [])
| bsd-3-clause | 63284e25d163a557a03ce86574b00bf1 | 32 | 98 | 0.69568 | 3.541096 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/linked_domain/management/commands/unlink_apps.py | 1 | 2368 | from django.core.management.base import BaseCommand
from couchdbkit import ResourceNotFound
from corehq.apps.app_manager.models import Application
from corehq.apps.linked_domain.models import DomainLink, DomainLinkHistory
class Command(BaseCommand):
help = "Unlinks linked project spaces and converts the downstream app into a standalone app."
def add_arguments(self, parser):
parser.add_argument(
'linked_app_id',
help='The ID of the downstream app'
)
parser.add_argument(
'linked_domain',
help='The name of the downstream project space'
)
parser.add_argument(
'master_domain',
help='The name of the master project space'
)
def handle(self, linked_app_id, linked_domain, master_domain, **options):
try:
linked_app = Application.get(linked_app_id)
except ResourceNotFound:
print('No downstream app found for ID {} '.format(linked_app_id))
return
if linked_app.domain != linked_domain:
print("Project space in the app found from ID {} does not match the linked project space "
"that was given.".format(linked_app_id))
return
confirm = input(
"""
Found {} in project space {} linked to project space {}.
Are you sure you want to un-link these apps? [y/n]
""".format(linked_app.name, linked_domain, master_domain)
)
if confirm.lower() != 'y':
return
print('Unlinking apps')
linked_app = linked_app.convert_to_application()
linked_app.save()
self.hide_domain_link_history(linked_domain, linked_app_id, master_domain)
print('Operation completed')
@staticmethod
def hide_domain_link_history(linked_domain, linked_app_id, master_domain):
domain_link = DomainLink.all_objects.get(linked_domain=linked_domain, master_domain=master_domain)
for history in DomainLinkHistory.objects.filter(link=domain_link):
if history.model_detail['app_id'] == linked_app_id:
history.hidden = True
history.save()
if not DomainLinkHistory.objects.filter(link=domain_link).exists():
domain_link.deleted = True
domain_link.save()
| bsd-3-clause | 7e0a05b7cdb122afaf3b94982d4ce119 | 37.193548 | 106 | 0.619088 | 4.251346 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/cms/migrations/0051_auto_20171024_1631.py | 1 | 3129 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-24 14:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms', '0050_auto_20171024_1623'),
]
operations = [
migrations.AlterField(
model_name='categoriescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='linkscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='locationscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='logoscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='projectimagescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='projectscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='projectsmapcontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='quotescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='shareresultscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='slidescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='statscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='stepscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='supportertotalcontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='surveycontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='taskscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=400, null=True),
),
]
| bsd-3-clause | 321f73aa7fabfb05bed77612734f975a | 33.766667 | 74 | 0.563439 | 4.239837 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/impact/migrations/0011_auto_20200812_1038.py | 1 | 1042 | from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.utils.utils import update_group_permissions
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
def add_group_permissions(apps, schema_editor):
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
group_perms = {
'Staff': {
'perms': (
'add_impacttype', 'change_impacttype', 'delete_impacttype',
'add_impactgoal', 'change_impactgoal', 'delete_impactgoal',
)
},
}
update_group_permissions('impact', group_perms, apps)
class Migration(migrations.Migration):
dependencies = [
('impact', '0010_impacttypetranslation_unit'),
]
operations = [
migrations.RunPython(
add_group_permissions,
migrations.RunPython.noop
)
]
| bsd-3-clause | 90b8534cbcfd1b795861d47a0bb90c83 | 26.421053 | 79 | 0.633397 | 4.378151 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/organizations/tests/test_api.py | 1 | 11107 |
from future import standard_library
standard_library.install_aliases()
from urllib.parse import urlencode
import json
from django.urls import reverse
from rest_framework import status
from bluebottle.organizations.models import Organization
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.factory_models.organizations import (
OrganizationContactFactory, OrganizationFactory
)
from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient
class OrganizationsEndpointTestCase(BluebottleTestCase):
"""
Base class for test cases for ``organizations`` module.
The testing classes for ``organization`` module related to the API must
subclass this.
"""
def setUp(self):
super(OrganizationsEndpointTestCase, self).setUp()
self.client = JSONAPITestClient()
self.user_1 = BlueBottleUserFactory.create()
self.user_1_token = "JWT {0}".format(self.user_1.get_jwt_token())
self.user_2 = BlueBottleUserFactory.create()
self.organization_1 = OrganizationFactory.create(
owner=self.user_1,
name='Evil Knight'
)
self.organization_2 = OrganizationFactory.create(
owner=self.user_1,
name='Evel Knievel'
)
self.organization_3 = OrganizationFactory.create(
owner=self.user_1,
name='Hanson Kids'
)
self.organization_4 = OrganizationFactory.create(
owner=self.user_1,
name='Knight Rider'
)
self.organization_5 = OrganizationFactory.create(
owner=self.user_2,
name='Kids Club'
)
class OrganizationListTestCase(OrganizationsEndpointTestCase):
"""
Test case for ``OrganizationsList`` API view.
"""
def test_unauth_api_organizations_list_endpoint(self):
"""
Tests that the list of organizations can not be accessed if
not authenticated.
"""
response = self.client.get(reverse('organization_list'))
self.assertEqual(response.status_code, 401)
def test_auth_api_organizations_list_endpoint(self):
"""
Tests that the list of organizations can be queried if authenticated
but it will not return results unless a search term is supplied.
"""
response = self.client.get(reverse('organization_list'),
user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['meta']['pagination']['count'], 5)
def test_api_organizations_search(self):
"""
Tests that the organizations search is not intelligent.
"""
# Search for organizations with "evil" in their name.
url = "{}?{}".format(reverse('organization_list'), urlencode({'filter[search]': 'Evil'}))
response = self.client.get(url, user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Expect two organizations with 'ev'
self.assertEqual(response.data['meta']['pagination']['count'], 1)
def test_api_organizations_search_extended(self):
"""
Tests that the list of organizations can be obtained from its
endpoint with different order.
"""
url = "{}?{}".format(reverse('organization_list'), urlencode({'filter[search]': 'Knight'}))
response = self.client.get(url, user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['meta']['pagination']['count'], 2)
def test_api_organizations_search_case_insensitve(self):
"""
Tests that the organizations search is case insensitive.
"""
url = "{}?{}".format(reverse('organization_list'), urlencode({'filter[search]': 'kids'}))
response = self.client.get(url, user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['meta']['pagination']['count'], 2)
class OrganizationDetailTestCase(OrganizationsEndpointTestCase):
"""
Test case for ``OrganizationsList`` API view.
Endpoint: /api/organizations/{pk}
"""
def test_unauth_api_organizations_detail_endpoint(self):
response = self.client.get(
reverse('organization_detail', kwargs={'pk': self.organization_1.pk})
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ManageOrganizationListTestCase(OrganizationsEndpointTestCase):
"""
Test case for ``ManageOrganizationsList`` API view.
Endpoint: /api/organizations
"""
def setUp(self):
super(ManageOrganizationListTestCase, self).setUp()
self.post_data = {
'data': {
'type': 'organizations',
'attributes': {
'name': '1%Club',
'slug': 'hm',
'description': 'some description',
'website': 'http://onepercentclub.com',
}
}
}
def test_api_manage_organizations_list_user_filter(self):
"""
Tests that all organizations are returned if there is not a search term supplied.
"""
response = self.client.get(reverse('organization_list'), user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['meta']['pagination']['count'], 5)
def test_api_manage_organizations_list_post(self):
"""
Tests POSTing new data to the endpoint.
"""
post_data = self.post_data
response = self.client.post(
reverse('organization_list'),
json.dumps(post_data),
user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
org_id = response.data['id']
# Check the data.
organization = Organization.objects.get(pk=org_id)
self.assertEqual(organization.name, '1%Club')
self.assertEqual(organization.slug, '1club')
self.assertEqual(organization.description, 'some description')
def test_api_manage_organizations_update_description(self):
"""
Tests POSTing new data to the endpoint.
"""
response = self.client.post(
reverse('organization_list'),
json.dumps(self.post_data),
user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Update description
org_id = response.data['id']
self.post_data['data']['id'] = org_id
self.post_data['data']['attributes']['description'] = 'Bla bla'
url = reverse('organization_detail', kwargs={'pk': org_id})
response = self.client.put(
url,
json.dumps(self.post_data),
user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Check the data.
organization = Organization.objects.get(pk=org_id)
self.assertEqual(organization.description, 'Bla bla')
def test_api_manage_organizations_update_other_user(self):
"""
Tests POSTing new data to the endpoint.
"""
response = self.client.post(
reverse('organization_list'),
json.dumps(self.post_data),
user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Update description
org_id = response.data['id']
self.post_data['data']['id'] = org_id
self.post_data['data']['attributes']['description'] = 'Bla bla'
url = reverse('organization_detail', kwargs={'pk': org_id})
response = self.client.put(
url,
json.dumps(self.post_data),
user=BlueBottleUserFactory.create())
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self.client.patch(
url,
json.dumps(self.post_data),
user=BlueBottleUserFactory.create())
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class ManageOrganizationContactTestCase(OrganizationsEndpointTestCase):
"""
Test case for ``OrganizationContact`` API
Endpoint: /api/organizations/contacts
"""
def test_create_contact(self):
data = {
'data': {
'type': 'organization-contacts',
'attributes': {
'name': 'Brian Brown',
'email': 'brian@brown.com',
'phone': '555-1243'
}
}
}
response = self.client.post(
reverse('organization_contact_list'),
json.dumps(data),
user=self.user_1
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['name'], 'Brian Brown')
def test_create_contact_without_phone(self):
data = {
'data': {
'type': 'organization-contacts',
'attributes': {
'name': 'Brian Brown',
'email': 'brian@brown.com'
}
}
}
response = self.client.post(
reverse('organization_contact_list'),
json.dumps(data),
user=self.user_1
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['name'], 'Brian Brown')
def test_organization_contact(self):
contact = OrganizationContactFactory.create(owner=self.user_1)
response = self.client.get(
reverse('organization_contact_detail', kwargs={'pk': contact.pk}),
user=self.user_1
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['name'], contact.name)
class ManageOrganizationDetailTestCase(OrganizationsEndpointTestCase):
"""
Test case for ``OrganizationsList`` API view.
Endpoint: /api/organizations/{pk}
"""
def test_manage_organizations_detail_login_required(self):
"""
Tests that the endpoint first restricts results to logged-in users.
"""
# Making the request without logging in...
response = self.client.get(
reverse('organization_detail',
kwargs={'pk': self.organization_1.pk}))
self.assertEqual(
response.status_code, status.HTTP_401_UNAUTHORIZED, response.data)
def test_manage_organizations_detail_get_success(self):
"""
Tests a successful GET request over the endpoint.
"""
response = self.client.get(reverse('organization_detail',
kwargs={
'pk': self.organization_1.pk}),
user=self.user_1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| bsd-3-clause | 93954950b63bc3ece6e26d6d35dd704d | 33.493789 | 99 | 0.603403 | 4.218382 | false | true | false | false |
dimagi/commcare-hq | corehq/toggles/shortcuts.py | 1 | 2930 | from couchdbkit import ResourceNotFound
from django.conf import settings
from .models import Toggle
def toggle_enabled(slug, item, namespace=None):
"""
Given a toggle and a username, whether the toggle is enabled for that user
"""
from corehq.toggles import NAMESPACE_EMAIL_DOMAIN
if namespace == NAMESPACE_EMAIL_DOMAIN and '@' in item:
item = item.split('@')[-1]
item = namespaced_item(item, namespace)
if not settings.UNIT_TESTING or getattr(settings, 'DB_ENABLED', True):
toggle = Toggle.cached_get(slug)
return item in toggle.enabled_users if toggle else False
def set_toggle(slug, item, enabled, namespace=None):
"""
Sets a toggle value explicitly. Should only save anything if the value needed to be changed.
"""
if toggle_enabled(slug, item, namespace=namespace) != enabled:
ns_item = namespaced_item(item, namespace)
try:
toggle_doc = Toggle.get(slug)
except ResourceNotFound:
toggle_doc = Toggle(slug=slug, enabled_users=[])
if enabled:
toggle_doc.add(ns_item)
else:
toggle_doc.remove(ns_item)
from corehq.feature_previews import all_previews
from corehq.toggles import all_toggles, NAMESPACE_DOMAIN
static_toggles_by_slug = {t.slug: t for t in all_toggles() + all_previews()}
if namespace == NAMESPACE_DOMAIN and slug in static_toggles_by_slug:
static_toggle = static_toggles_by_slug[slug]
if static_toggle.save_fn:
static_toggle.save_fn(item, enabled)
return True
def namespaced_item(item, namespace):
return '{namespace}:{item}'.format(
namespace=namespace, item=item
) if namespace is not None else item
def parse_toggle(entry):
"""
Split a toggle entry into the namespace an the item.
:return: tuple(namespace, item)
"""
from corehq.toggles import NAMESPACE_DOMAIN, NAMESPACE_EMAIL_DOMAIN
namespace = None
if entry.startswith((NAMESPACE_DOMAIN + ':', NAMESPACE_EMAIL_DOMAIN + ':')):
namespace, entry = entry.split(":")
return namespace, entry
def find_users_with_toggle_enabled(toggle):
from corehq.toggles import ALL_NAMESPACES, NAMESPACE_USER
try:
doc = Toggle.get(toggle.slug)
except ResourceNotFound:
return []
prefixes = tuple(ns + ':' for ns in ALL_NAMESPACES if ns != NAMESPACE_USER)
# Users are not prefixed with NAMESPACE_USER, but exclude NAMESPACE_USER to keep `prefixes` short
return [u for u in doc.enabled_users if not u.startswith(prefixes)]
def find_domains_with_toggle_enabled(toggle):
from corehq.toggles import NAMESPACE_DOMAIN
try:
doc = Toggle.get(toggle.slug)
except ResourceNotFound:
return []
prefix = NAMESPACE_DOMAIN + ':'
return [user[len(prefix):] for user in doc.enabled_users if user.startswith(prefix)]
| bsd-3-clause | a4c91be28e499b1f0dd1b1285b61279b | 35.17284 | 101 | 0.66587 | 3.845144 | false | false | false | false |
onepercentclub/bluebottle | bluebottle/pages/content_plugins.py | 1 | 1124 | from django.utils.translation import gettext_lazy as _
from fluent_contents.extensions import plugin_pool, ContentPlugin
from bluebottle.pages.models import ImageTextItem, DocumentItem, ActionItem, ColumnsItem, ImageTextRoundItem
@plugin_pool.register
class ImageTextPlugin(ContentPlugin):
model = ImageTextItem
render_template = "pages/plugins/imagetext/default.html"
category = _("Multimedia")
@plugin_pool.register
class ImageTextRoundPlugin(ContentPlugin):
model = ImageTextRoundItem
render_template = "pages/plugins/imagetext/default.html"
category = _("Multimedia")
@plugin_pool.register
class DocumentPlugin(ContentPlugin):
model = DocumentItem
render_template = "pages/plugins/document/default.html"
category = _("Multimedia")
@plugin_pool.register
class ActionPlugin(ContentPlugin):
model = ActionItem
render_template = "pages/plugins/action/default.html"
category = _("Multimedia")
@plugin_pool.register
class ColumnsPlugin(ContentPlugin):
model = ColumnsItem
render_template = "pages/plugins/columns/default.html"
category = _("Multimedia")
| bsd-3-clause | f42308acf9eaab65659032ac3c6c4edc | 27.1 | 108 | 0.759786 | 3.836177 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/hqadmin/management/commands/republish_doc_changes.py | 1 | 2669 | import csv
from django.core.management import BaseCommand, CommandError
from memoized import memoized
from corehq.apps.change_feed import data_sources, topics
from corehq.apps.change_feed.producer import producer
from corehq.apps.hqadmin.management.commands.stale_data_in_es import DataRow, HEADER_ROW, get_csv_args
from corehq.toggles import DO_NOT_REPUBLISH_DOCS
from pillowtop.feed.interface import ChangeMeta
DOC_TYPE_MAP = {
'CommCareCase': (topics.CASE_SQL, data_sources.CASE_SQL),
'XFormInstance': (topics.FORM_SQL, data_sources.FORM_SQL),
'XFormArchived': (topics.FORM_SQL, data_sources.FORM_SQL),
}
class Command(BaseCommand):
"""
Republish doc changes. Meant to be used in conjunction with stale_data_in_es command
$ ./manage.py republish_doc_changes changes.tsv
"""
def add_arguments(self, parser):
parser.add_argument('stale_data_in_es_file')
parser.add_argument('--delimiter', default='\t', choices=('\t', ','))
parser.add_argument('--skip_domains', action='store_true')
def handle(self, stale_data_in_es_file, delimiter, skip_domains, *args, **options):
changes = _iter_changes(stale_data_in_es_file, skip_domains, delimiter=delimiter)
for topic, meta in changes:
producer.send_change(topic, meta)
def _iter_changes(stale_data_in_es_file, skip_domains, delimiter):
with open(stale_data_in_es_file, 'r') as f:
csv_reader = csv.reader(f, **get_csv_args(delimiter))
for csv_row in csv_reader:
data_row = DataRow(*csv_row)
if skip_domains and should_not_republish_docs(data_row.domain):
continue
# Skip the header row anywhere in the file.
# The "anywhere in the file" part is useful
# if you cat multiple stale_data_in_es_file files together.
if data_row != HEADER_ROW:
try:
topic, source = DOC_TYPE_MAP[data_row.doc_type]
except KeyError:
raise CommandError(f"Found bad doc type {data_row.doc_type}. "
"Did you use the right command to create the data?")
yield topic, _change_meta(data_row, source)
@memoized
def should_not_republish_docs(domain):
return DO_NOT_REPUBLISH_DOCS.enabled(domain)
def _change_meta(data_row, source):
return ChangeMeta(
document_id=data_row.doc_id,
data_source_type=data_sources.SOURCE_SQL,
data_source_name=source,
document_type=data_row.doc_type,
document_subtype=data_row.doc_subtype,
domain=data_row.domain,
is_deletion=False,
)
| bsd-3-clause | ef82dd1ffc450a922cffd1d8ecf4fb06 | 36.069444 | 102 | 0.65118 | 3.553928 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/users/models.py | 1 | 116453 | import hmac
import json
import logging
import re
from collections import namedtuple
from datetime import datetime, date
from hashlib import sha1
from typing import List
from uuid import uuid4
from xml.etree import cElementTree as ElementTree
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ValidationError
from django.core.serializers.json import DjangoJSONEncoder
from django.db import connection, models, router
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.html import format_html
from django.utils.translation import override as override_language
from django.utils.translation import gettext as _
from couchdbkit import MultipleResultsFound, ResourceNotFound
from couchdbkit.exceptions import BadValueError, ResourceConflict
from dateutil.relativedelta import relativedelta
from memoized import memoized
from casexml.apps.case.mock import CaseBlock
from casexml.apps.phone.models import OTARestoreCommCareUser, OTARestoreWebUser
from casexml.apps.phone.restore_caching import get_loadtest_factor_for_restore_cache_key
from corehq.form_processor.models import XFormInstance
from dimagi.ext.couchdbkit import (
BooleanProperty,
DateProperty,
DateTimeProperty,
DictProperty,
Document,
DocumentSchema,
IntegerProperty,
ListProperty,
SchemaListProperty,
SchemaProperty,
StringListProperty,
StringProperty,
)
from dimagi.utils.chunked import chunked
from dimagi.utils.couch import CriticalSection
from dimagi.utils.couch.database import get_safe_write_kwargs, iter_docs
from dimagi.utils.couch.undo import DELETED_SUFFIX, DeleteRecord
from dimagi.utils.dates import (
force_to_datetime,
get_date_from_month_and_year_string,
)
from dimagi.utils.logging import log_signal_errors, notify_exception
from dimagi.utils.modules import to_function
from dimagi.utils.web import get_static_url_prefix
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.commtrack.const import USER_LOCATION_OWNER_MAP_TYPE
from corehq.apps.domain.models import Domain, LicenseAgreement
from corehq.apps.domain.shortcuts import create_user
from corehq.apps.domain.utils import (
domain_restricts_superusers,
guess_domain_language,
)
from corehq.apps.hqwebapp.tasks import send_html_email_async
from corehq.apps.sms.mixin import CommCareMobileContactMixin, apply_leniency
from corehq.apps.user_importer.models import UserUploadRecord
from corehq.apps.users.exceptions import IllegalAccountConfirmation
from corehq.apps.users.permissions import EXPORT_PERMISSIONS
from corehq.apps.users.tasks import (
tag_cases_as_deleted_and_remove_indices,
tag_forms_as_deleted_rebuild_associated_cases,
tag_system_forms_as_deleted,
undelete_system_forms,
)
from corehq.apps.users.util import (
filter_by_app,
log_user_change,
user_display_string,
user_location_data,
username_to_user_id,
bulk_auto_deactivate_commcare_users,
is_dimagi_email,
)
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.interfaces.supply import SupplyInterface
from corehq.form_processor.models import CommCareCase
from corehq.util.dates import get_timestamp
from corehq.util.models import BouncedEmail
from corehq.util.quickcache import quickcache
from corehq.util.view_utils import absolute_reverse
from .models_role import ( # noqa
RoleAssignableBy,
RolePermission,
Permission,
StaticRole,
UserRole,
)
WEB_USER = 'web'
COMMCARE_USER = 'commcare'
MAX_WEB_USER_LOGIN_ATTEMPTS = 5
MAX_COMMCARE_USER_LOGIN_ATTEMPTS = 500
def _add_to_list(list, obj, default):
if obj in list:
list.remove(obj)
if default:
ret = [obj]
ret.extend(list)
return ret
else:
list.append(obj)
return list
def _get_default(list):
return list[0] if list else None
class PermissionInfo(namedtuple("Permission", "name, allow")):
"""Data class that represents a single permission.
Some permissions can be parameterized to restrict access to only specific items
instead of ALL items.
"""
ALLOW_ALL = "*"
def __new__(cls, name, allow=ALLOW_ALL):
allow = allow if allow == cls.ALLOW_ALL else tuple(allow)
if allow != cls.ALLOW_ALL and name not in PARAMETERIZED_PERMISSIONS:
raise TypeError(f"Permission '{name}' does not support parameterization")
return super(PermissionInfo, cls).__new__(cls, name, allow)
@property
def allow_all(self):
return self.allow == self.ALLOW_ALL
@property
def allowed_items(self):
if self.allow_all:
return []
assert isinstance(self.allow, tuple), self.allow
return self.allow
PARAMETERIZED_PERMISSIONS = {
'manage_data_registry': 'manage_data_registry_list',
'view_data_registry_contents': 'view_data_registry_contents_list',
'view_reports': 'view_report_list',
'view_tableau': 'view_tableau_list',
}
class HqPermissions(DocumentSchema):
edit_web_users = BooleanProperty(default=False)
view_web_users = BooleanProperty(default=False)
# only domain admins can edit roles, due to security issues.
view_roles = BooleanProperty(default=False)
edit_commcare_users = BooleanProperty(default=False)
view_commcare_users = BooleanProperty(default=False)
edit_groups = BooleanProperty(default=False)
view_groups = BooleanProperty(default=False)
edit_users_in_groups = BooleanProperty(default=False)
edit_locations = BooleanProperty(default=False)
view_locations = BooleanProperty(default=False)
edit_users_in_locations = BooleanProperty(default=False)
edit_motech = BooleanProperty(default=False)
edit_data = BooleanProperty(default=False)
edit_apps = BooleanProperty(default=False)
view_apps = BooleanProperty(default=False)
edit_shared_exports = BooleanProperty(default=False)
access_all_locations = BooleanProperty(default=True)
access_api = BooleanProperty(default=False)
access_web_apps = BooleanProperty(default=False)
edit_messaging = BooleanProperty(default=False)
access_release_management = BooleanProperty(default=False)
edit_reports = BooleanProperty(default=False)
download_reports = BooleanProperty(default=False)
view_reports = BooleanProperty(default=False)
view_report_list = StringListProperty(default=[])
edit_ucrs = BooleanProperty(default=False)
view_tableau = BooleanProperty(default=False)
view_tableau_list = StringListProperty(default=[])
edit_billing = BooleanProperty(default=False)
report_an_issue = BooleanProperty(default=True)
access_mobile_endpoints = BooleanProperty(default=False)
view_file_dropzone = BooleanProperty(default=False)
edit_file_dropzone = BooleanProperty(default=False)
login_as_all_users = BooleanProperty(default=False)
limited_login_as = BooleanProperty(default=False)
access_default_login_as_user = BooleanProperty(default=False)
manage_data_registry = BooleanProperty(default=False)
manage_data_registry_list = StringListProperty(default=[])
view_data_registry_contents = BooleanProperty(default=False)
view_data_registry_contents_list = StringListProperty(default=[])
@classmethod
def from_permission_list(cls, permission_list):
"""Converts a list of Permission objects into a Permissions object"""
permissions = HqPermissions.min()
for perm in permission_list:
setattr(permissions, perm.name, perm.allow_all)
if perm.name in PARAMETERIZED_PERMISSIONS:
setattr(permissions, PARAMETERIZED_PERMISSIONS[perm.name], list(perm.allowed_items))
return permissions
def normalize(self):
if not self.access_all_locations:
# The following permissions cannot be granted to location-restricted
# roles.
self.edit_web_users = False
self.view_web_users = False
self.edit_groups = False
self.view_groups = False
self.edit_apps = False
self.view_roles = False
self.edit_reports = False
self.edit_billing = False
if self.edit_web_users:
self.view_web_users = True
if self.edit_commcare_users:
self.view_commcare_users = True
if self.edit_groups:
self.view_groups = True
else:
self.edit_users_in_groups = False
if self.edit_locations:
self.view_locations = True
else:
self.edit_users_in_locations = False
if self.edit_apps:
self.view_apps = True
@classmethod
@memoized
def permission_names(cls):
"""Returns a list of permission names"""
return {
name for name, value in HqPermissions.properties().items()
if isinstance(value, BooleanProperty)
}
def to_list(self) -> List[PermissionInfo]:
"""Returns a list of Permission objects for those permissions that are enabled."""
return list(self._yield_enabled())
def _yield_enabled(self):
for name in HqPermissions.permission_names():
value = getattr(self, name)
list_value = None
if name in PARAMETERIZED_PERMISSIONS:
list_name = PARAMETERIZED_PERMISSIONS[name]
list_value = getattr(self, list_name)
if value or list_value:
yield PermissionInfo(name, allow=PermissionInfo.ALLOW_ALL if value else list_value)
def view_report(self, report):
return self.view_reports or report in self.view_report_list
def view_tableau_viz(self, viz_id):
if not self.access_all_locations:
return False
return self.view_tableau or viz_id in self.view_tableau_list
def has(self, permission, data=None):
if data:
return getattr(self, permission)(data)
else:
return getattr(self, permission)
def _getattr(self, name):
a = getattr(self, name)
if isinstance(a, list):
a = set(a)
return a
def __eq__(self, other):
for name in self.properties():
if self._getattr(name) != other._getattr(name):
return False
return True
@classmethod
def max(cls):
return HqPermissions._all(True)
@classmethod
def min(cls):
return HqPermissions._all(False)
@classmethod
def _all(cls, value: bool):
perms = HqPermissions()
for name in HqPermissions.permission_names():
setattr(perms, name, value)
return perms
class DomainMembershipError(Exception):
pass
class Membership(DocumentSchema):
# If we find a need for making UserRoles more general and decoupling it from a domain
# then most of the role stuff from Domain membership can be put in here
is_admin = BooleanProperty(default=False)
class DomainMembership(Membership):
"""
Each user can have multiple accounts on individual domains
"""
_user_type = None
domain = StringProperty()
timezone = StringProperty(default=getattr(settings, "TIME_ZONE", "UTC"))
override_global_tz = BooleanProperty(default=False)
role_id = StringProperty()
# This should not be set directly but using set_location method only
location_id = StringProperty()
assigned_location_ids = StringListProperty()
program_id = StringProperty()
last_accessed = DateProperty()
@property
def permissions(self):
if self.role:
return self.role.permissions
else:
return HqPermissions()
@classmethod
def wrap(cls, data):
if data.get('subject'):
data['domain'] = data['subject']
del data['subject']
return super(DomainMembership, cls).wrap(data)
@property
@memoized
def role(self):
if self.is_admin:
return StaticRole.domain_admin(self.domain)
elif self.role_id:
try:
return UserRole.objects.by_couch_id(self.role_id)
except UserRole.DoesNotExist:
logging.exception('no role found in domain', extra={
'role_id': self.role_id,
'domain': self.domain
})
return None
else:
return self.get_default_role()
def get_default_role(self):
if self._user_type == COMMCARE_USER:
return UserRole.commcare_user_default(self.domain)
return None
def has_permission(self, permission, data=None):
return self.is_admin or self.permissions.has(permission, data)
def viewable_reports(self):
return self.permissions.view_report_list
class Meta(object):
app_label = 'users'
class IsMemberOfMixin(DocumentSchema):
def _is_member_of(self, domain, allow_enterprise):
if not domain:
return False
if self.is_global_admin() and not domain_restricts_superusers(domain):
return True
domains = self.get_domains()
if domain in domains:
return True
if allow_enterprise:
from corehq.apps.enterprise.models import EnterprisePermissions
config = EnterprisePermissions.get_by_domain(domain)
if config.is_enabled and domain in config.domains:
return self.is_member_of(config.source_domain, allow_enterprise=False)
return False
def is_member_of(self, domain_qs, allow_enterprise=False):
"""
Takes either a domain name or a domain object and returns whether the user is part of that domain
"""
try:
domain = domain_qs.name
except Exception:
domain = domain_qs
return self._is_member_of(domain, allow_enterprise)
def is_global_admin(self):
# subclasses to override if they want this functionality
return False
class _AuthorizableMixin(IsMemberOfMixin):
"""
Use either SingleMembershipMixin or MultiMembershipMixin instead of this
"""
def get_domain_membership(self, domain, allow_enterprise=True):
domain_membership = None
try:
for d in self.domain_memberships:
if d.domain == domain:
domain_membership = d
if domain not in self.domains:
raise self.Inconsistent("Domain '%s' is in domain_memberships but not domains" % domain)
if not domain_membership:
if domain in self.domains:
raise self.Inconsistent("Domain '%s' is in domain but not in domain_memberships" % domain)
from corehq.apps.enterprise.models import EnterprisePermissions
config = EnterprisePermissions.get_by_domain(domain)
if allow_enterprise and config.is_enabled and domain in config.domains:
return self.get_domain_membership(config.source_domain, allow_enterprise=False)
except self.Inconsistent as e:
logging.warning(e)
self.domains = [d.domain for d in self.domain_memberships]
if domain_membership:
# set user type on membership to support default roles for 'commcare' users
domain_membership._user_type = self._get_user_type()
return domain_membership
def add_domain_membership(self, domain, timezone=None, **kwargs):
for d in self.domain_memberships:
if d.domain == domain:
if domain not in self.domains:
raise self.Inconsistent("Domain '%s' is in domain_memberships but not domains" % domain)
return
domain_obj = Domain.get_by_name(domain, strict=True)
if timezone:
domain_membership = DomainMembership(domain=domain, timezone=timezone, **kwargs)
else:
domain_membership = DomainMembership(domain=domain,
timezone=domain_obj.default_timezone,
**kwargs)
self.domain_memberships.append(domain_membership)
self.domains.append(domain)
def add_as_web_user(self, domain, role, location_id=None, program_id=None):
domain_obj = Domain.get_by_name(domain)
self.add_domain_membership(domain=domain)
self.set_role(domain, role)
if domain_obj.commtrack_enabled:
self.get_domain_membership(domain).program_id = program_id
if domain_obj.uses_locations and location_id:
self.set_location(domain, location_id)
self.save()
def delete_domain_membership(self, domain, create_record=False):
"""
If create_record is True, a DomainRemovalRecord is created so that the
action can be undone, and the DomainRemovalRecord is returned.
If create_record is True but the domain membership is not found,
then None is returned.
"""
self.get_by_user_id.clear(self.__class__, self.user_id, domain)
record = None
for i, dm in enumerate(self.domain_memberships):
if dm.domain == domain:
if create_record:
record = DomainRemovalRecord(
domain=domain,
user_id=self.user_id,
domain_membership=dm,
)
del self.domain_memberships[i]
break
for i, domain_name in enumerate(self.domains):
if domain_name == domain:
del self.domains[i]
break
if record:
record.save()
return record
def transfer_domain_membership(self, domain, to_user, create_record=False, is_admin=True):
to_user.add_domain_membership(domain, is_admin=is_admin)
self.delete_domain_membership(domain, create_record=create_record)
@memoized
def is_domain_admin(self, domain=None):
# this is a hack needed because we can't pass parameters from views
domain = domain or getattr(self, 'current_domain', None)
if not domain:
return False # no domain, no admin
if self.is_global_admin() and not domain_restricts_superusers(domain):
return True
dm = self.get_domain_membership(domain, allow_enterprise=True)
if dm:
return dm.is_admin
else:
return False
def get_domains(self):
domains = [dm.domain for dm in self.domain_memberships]
if set(domains) == set(self.domains):
return domains
else:
raise self.Inconsistent("domains and domain_memberships out of sync")
@memoized
def has_permission(self, domain, permission, data=None):
# is_admin is the same as having all the permissions set
if self.is_global_admin() and (domain is None or not domain_restricts_superusers(domain)):
return True
elif self.is_domain_admin(domain):
return True
dm = self.get_domain_membership(domain, allow_enterprise=True)
if dm:
return dm.has_permission(permission, data)
return False
@memoized
def get_role(self, domain=None, checking_global_admin=True, allow_enterprise=False):
"""
Get the role object for this user
"""
# default to current_domain for django templates
domain = domain or getattr(self, 'current_domain', None)
if checking_global_admin and self.is_global_admin():
return StaticRole.domain_admin(domain)
if self.is_member_of(domain, allow_enterprise):
dm = self.get_domain_membership(domain, allow_enterprise)
if dm:
return dm.role
raise DomainMembershipError()
def set_role(self, domain, role_qualified_id):
"""
role_qualified_id is either 'admin' 'user-role:[id]'
"""
dm = self.get_domain_membership(domain)
dm.is_admin = False
if role_qualified_id == "admin":
dm.is_admin = True
dm.role_id = None
elif role_qualified_id.startswith('user-role:'):
dm.role_id = role_qualified_id[len('user-role:'):]
elif role_qualified_id == 'none':
dm.role_id = None
else:
raise Exception("unexpected role_qualified_id is %r" % role_qualified_id)
self.has_permission.reset_cache(self)
self.get_role.reset_cache(self)
try:
self.is_domain_admin.reset_cache(self)
except AttributeError:
pass
DomainMembership.role.fget.reset_cache(dm)
def role_label(self, domain=None):
domain = domain or getattr(self, 'current_domain', None)
if not domain:
return None
try:
return self.get_role(domain, checking_global_admin=False).name
except TypeError:
return _("Unknown User")
except DomainMembershipError:
if self.is_global_admin():
return _("Dimagi User")
if self.is_member_of(domain, allow_enterprise=True):
return _("Enterprise User")
return _("Unauthorized User")
except Exception:
return None
class SingleMembershipMixin(_AuthorizableMixin):
domain_membership = SchemaProperty(DomainMembership)
@property
def domains(self):
return [self.domain]
@property
def domain_memberships(self):
return [self.domain_membership]
def add_domain_membership(self, domain, timezone=None, **kwargs):
raise NotImplementedError
def delete_domain_membership(self, domain, create_record=False):
raise NotImplementedError
def transfer_domain_membership(self, domain, user, create_record=False):
raise NotImplementedError
class MultiMembershipMixin(_AuthorizableMixin):
domains = StringListProperty()
domain_memberships = SchemaListProperty(DomainMembership)
class LowercaseStringProperty(StringProperty):
"""
Make sure that the string is always lowercase'd
"""
def __init__(self, validators=None, *args, **kwargs):
if validators is None:
validators = ()
def check_lowercase(value):
if value and any(char.isupper() for char in value):
raise BadValueError('uppercase characters not allowed')
validators += (check_lowercase,)
super(LowercaseStringProperty, self).__init__(validators=validators, *args, **kwargs)
class DjangoUserMixin(DocumentSchema):
username = LowercaseStringProperty()
first_name = StringProperty()
last_name = StringProperty()
email = LowercaseStringProperty()
password = StringProperty()
is_staff = BooleanProperty()
is_active = BooleanProperty()
is_superuser = BooleanProperty()
last_login = DateTimeProperty()
date_joined = DateTimeProperty()
ATTRS = (
'username',
'first_name',
'last_name',
'email',
'password',
'is_staff',
'is_active',
'is_superuser',
'last_login',
'date_joined',
)
def set_password(self, raw_password):
dummy = User()
dummy.set_password(raw_password)
self.password = dummy.password
def check_password(self, password):
""" Currently just for debugging"""
dummy = User()
dummy.password = self.password
return dummy.check_password(password)
class EulaMixin(DocumentSchema):
CURRENT_VERSION = '3.0' # Set this to the most up to date version of the eula
eulas = SchemaListProperty(LicenseAgreement)
@classmethod
def migrate_eula(cls, data):
if 'eula' in data:
data['eulas'] = [data['eula']]
data['eulas'][0]['version'] = '1.0'
del data['eula']
return data
def is_eula_signed(self, version=CURRENT_VERSION):
if self.is_superuser:
return True
for eula in self.eulas:
if eula.version == version:
return eula.signed
return False
def get_eula(self, version):
for eula in self.eulas:
if eula.version == version:
return eula
return None
@property
def eula(self, version=CURRENT_VERSION):
current_eula = self.get_eula(version)
if not current_eula:
current_eula = LicenseAgreement(type="End User License Agreement", version=version)
self.eulas.append(current_eula)
assert current_eula.type == "End User License Agreement"
return current_eula
class DeviceAppMeta(DocumentSchema):
"""Metadata for an app on a device"""
app_id = StringProperty()
build_id = StringProperty()
build_version = IntegerProperty()
last_request = DateTimeProperty()
last_submission = DateTimeProperty()
last_sync = DateTimeProperty()
last_heartbeat = DateTimeProperty()
num_unsent_forms = IntegerProperty()
num_quarantined_forms = IntegerProperty()
def _update_latest_request(self):
dates = [date for date in (self.last_submission, self.last_heartbeat, self.last_sync) if date]
self.last_request = max(dates) if dates else None
def merge(self, other):
# ensure that last_request is updated
self.last_request is None and self._update_latest_request()
other.last_request is None and other._update_latest_request()
if other.last_request <= self.last_request:
return
for key, prop in self.properties().items():
new_val = getattr(other, key)
if new_val:
old_val = getattr(self, key)
if not old_val:
setattr(self, key, new_val)
continue
prop_is_date = isinstance(prop, DateTimeProperty)
if prop_is_date and new_val > old_val:
setattr(self, key, new_val)
elif not prop_is_date and old_val != new_val:
setattr(self, key, new_val)
self._update_latest_request()
class DeviceIdLastUsed(DocumentSchema):
device_id = StringProperty()
last_used = DateTimeProperty()
commcare_version = StringProperty()
app_meta = SchemaListProperty(DeviceAppMeta)
def update_meta(self, commcare_version=None, app_meta=None):
if commcare_version:
self.commcare_version = commcare_version
if app_meta:
self._merge_app_meta(app_meta)
def _merge_app_meta(self, app_meta):
current_meta = self.get_meta_for_app(app_meta.app_id)
if not current_meta:
app_meta._update_latest_request()
self.app_meta.append(app_meta)
else:
current_meta.merge(app_meta)
def get_meta_for_app(self, app_id):
return filter_by_app(self.app_meta, app_id)
def get_last_used_app_meta(self):
try:
return max(self.app_meta, key=lambda a: a.last_request)
except ValueError:
pass
def __eq__(self, other):
return all(getattr(self, p) == getattr(other, p) for p in self.properties())
class LastSubmission(DocumentSchema):
"""Metadata for form sumbissions. This data is keyed by app_id"""
app_id = StringProperty()
submission_date = DateTimeProperty()
build_id = StringProperty()
device_id = StringProperty()
build_version = IntegerProperty()
commcare_version = StringProperty()
class LastSync(DocumentSchema):
"""Metadata for syncs and restores. This data is keyed by app_id"""
app_id = StringProperty()
sync_date = DateTimeProperty()
build_version = IntegerProperty()
class LastBuild(DocumentSchema):
"""
Build info for the app on the user's phone
when they last synced or submitted or sent heartbeat request
"""
app_id = StringProperty()
build_profile_id = StringProperty()
build_version = IntegerProperty()
build_version_date = DateTimeProperty()
class ReportingMetadata(DocumentSchema):
last_submissions = SchemaListProperty(LastSubmission)
last_submission_for_user = SchemaProperty(LastSubmission)
last_syncs = SchemaListProperty(LastSync)
last_sync_for_user = SchemaProperty(LastSync)
last_builds = SchemaListProperty(LastBuild)
last_build_for_user = SchemaProperty(LastBuild)
class CouchUser(Document, DjangoUserMixin, IsMemberOfMixin, EulaMixin):
"""
A user (for web and commcare)
"""
base_doc = 'CouchUser'
# todo: it looks like this is only ever set to a useless string and we should probably just remove it
# https://github.com/dimagi/commcare-hq/pull/14087#discussion_r90423396
device_ids = ListProperty()
# this is the real list of devices
devices = SchemaListProperty(DeviceIdLastUsed)
# most recent device with most recent app for easy reporting
last_device = SchemaProperty(DeviceIdLastUsed)
phone_numbers = ListProperty()
created_on = DateTimeProperty(default=datetime(year=1900, month=1, day=1))
last_modified = DateTimeProperty()
# For now, 'status' is things like:
# ('auto_created', 'Automatically created from form submission.'),
# ('phone_registered', 'Registered from phone'),
# ('site_edited', 'Manually added or edited from the HQ website.'),
status = StringProperty()
language = StringProperty()
subscribed_to_commcare_users = BooleanProperty(default=False)
announcements_seen = ListProperty()
user_data = DictProperty() # use metadata property instead of accessing this directly
# This should not be set directly but using set_location method only
location_id = StringProperty()
assigned_location_ids = StringListProperty()
has_built_app = BooleanProperty(default=False)
analytics_enabled = BooleanProperty(default=True)
two_factor_auth_disabled_until = DateTimeProperty()
login_attempts = IntegerProperty(default=0)
attempt_date = DateProperty()
reporting_metadata = SchemaProperty(ReportingMetadata)
_user = None
can_assign_superuser = BooleanProperty(default=False)
@classmethod
def wrap(cls, data, should_save=False):
if "organizations" in data:
del data["organizations"]
should_save = True
data = cls.migrate_eula(data)
couch_user = super(CouchUser, cls).wrap(data)
if should_save:
couch_user.save()
return couch_user
class AccountTypeError(Exception):
pass
class Inconsistent(Exception):
pass
class InvalidID(Exception):
pass
class UnsuportedOperation(Exception):
pass
def __repr__(self):
# copied from jsonobject/base.py
name = self.__class__.__name__
predefined_properties = set(self._properties_by_attr)
predefined_property_keys = set(self._properties_by_attr[p].name
for p in predefined_properties)
dynamic_properties = set(self._wrapped) - predefined_property_keys
# redact hashed password
properties = sorted(predefined_properties - {'password'}) + sorted(dynamic_properties - {'password'})
return '{name}({keyword_args})'.format(
name=name,
keyword_args=', '.join('{key}={value!r}'.format(
key=key,
value=getattr(self, key)
) for key in properties),
)
@property
def metadata(self):
return self.user_data
def update_metadata(self, data):
self.user_data.update(data)
return True
def pop_metadata(self, key, default=None):
return self.user_data.pop(key, default)
@property
def two_factor_disabled(self):
return (
self.two_factor_auth_disabled_until
and datetime.utcnow() < self.two_factor_auth_disabled_until
)
@property
def is_dimagi(self):
return is_dimagi_email(self.username)
def is_locked_out(self):
return self.supports_lockout() and self.should_be_locked_out()
def should_be_locked_out(self):
max_attempts = MAX_WEB_USER_LOGIN_ATTEMPTS if self.is_web_user() else MAX_COMMCARE_USER_LOGIN_ATTEMPTS
return self.login_attempts >= max_attempts
def supports_lockout(self):
return True
@property
def raw_username(self):
if self.doc_type == "CommCareUser":
return self.username.split("@")[0]
else:
return self.username
@property
def username_in_report(self):
return user_display_string(self.username, self.first_name, self.last_name)
def html_username(self):
username, *remaining = self.raw_username.split('@')
if remaining:
domain_name = remaining[0]
html = format_html(
'<span class="user_username">{}</span><span class="user_domainname">@{}</span>',
username,
domain_name)
else:
html = format_html("<span class='user_username'>{}</span>", username)
return html
@property
def userID(self):
return self._id
user_id = userID
def __str__(self):
return "<%s '%s'>" % (self.__class__.__name__, self.get_id)
def get_email(self):
# Do not change the name of this method because this ends up implementing
# get_email() from the CommCareMobileContactMixin for the CommCareUser
return self.email
def is_commcare_user(self):
return self._get_user_type() == COMMCARE_USER
def is_web_user(self):
return self._get_user_type() == WEB_USER
def _get_user_type(self):
if self.doc_type == 'WebUser':
return WEB_USER
elif self.doc_type == 'CommCareUser':
return COMMCARE_USER
else:
raise NotImplementedError(f'Unrecognized user type {self.doc_type!r}')
@property
def full_name(self):
return ("%s %s" % (self.first_name or '', self.last_name or '')).strip()
@property
def human_friendly_name(self):
return self.full_name if self.full_name else self.raw_username
@property
def name_in_filters(self):
username = self.username.split("@")[0]
return "%s <%s>" % (self.full_name, username) if self.full_name else username
@property
def days_since_created(self):
# Note this does not round, but returns the floor of days since creation
return (datetime.utcnow() - self.created_on).days
@property
def timestamp_created(self):
return get_timestamp(self.created_on)
formatted_name = full_name
name = full_name
def set_full_name(self, full_name):
data = full_name.split()
self.first_name = data.pop(0)
self.last_name = ' '.join(data)
def get_user_session_data(self, domain):
from corehq.apps.custom_data_fields.models import (
SYSTEM_PREFIX,
COMMCARE_USER_TYPE_KEY,
COMMCARE_USER_TYPE_DEMO,
COMMCARE_PROJECT
)
session_data = self.metadata
if self.is_commcare_user() and self.is_demo_user:
session_data.update({
COMMCARE_USER_TYPE_KEY: COMMCARE_USER_TYPE_DEMO
})
if COMMCARE_PROJECT not in session_data:
session_data[COMMCARE_PROJECT] = domain
session_data.update({
'{}_first_name'.format(SYSTEM_PREFIX): self.first_name,
'{}_last_name'.format(SYSTEM_PREFIX): self.last_name,
'{}_phone_number'.format(SYSTEM_PREFIX): self.phone_number,
})
return session_data
def delete(self, deleted_by_domain, deleted_by, deleted_via=None):
from corehq.apps.users.model_log import UserModelAction
if not deleted_by and not settings.UNIT_TESTING:
raise ValueError("Missing deleted_by")
self.clear_quickcache_for_user()
try:
user = self.get_django_user()
user.delete()
except User.DoesNotExist:
pass
if deleted_by:
# Commcare user is owned by the domain it belongs to so use self.domain for for_domain
# Web user is never deleted except in tests so keep for_domain as None
if self.is_commcare_user():
for_domain = self.domain
for_domain_required_for_log = True
else:
for_domain = None
for_domain_required_for_log = False
log_user_change(by_domain=deleted_by_domain, for_domain=for_domain,
couch_user=self, changed_by_user=deleted_by,
changed_via=deleted_via, action=UserModelAction.DELETE,
for_domain_required_for_log=for_domain_required_for_log)
super(CouchUser, self).delete() # Call the "real" delete() method.
def delete_phone_number(self, phone_number):
for i in range(0, len(self.phone_numbers)):
if self.phone_numbers[i] == phone_number:
del self.phone_numbers[i]
break
self.save()
self.delete_phone_entry(phone_number)
def get_django_user(self, use_primary_db=False):
queryset = User.objects
if use_primary_db:
queryset = queryset.using(router.db_for_write(User))
return queryset.get(username__iexact=self.username)
def add_phone_number(self, phone_number, default=False, **kwargs):
""" Don't add phone numbers if they already exist """
if not isinstance(phone_number, str):
phone_number = str(phone_number)
self.phone_numbers = _add_to_list(self.phone_numbers, phone_number, default)
def set_default_phone_number(self, phone_number):
self.add_phone_number(phone_number, True)
self.save()
def set_phone_numbers(self, new_phone_numbers, default_number=''):
self.phone_numbers = list(set(new_phone_numbers)) # ensure uniqueness
if default_number:
self.add_phone_number(default_number, True)
@property
def default_phone_number(self):
return _get_default(self.phone_numbers)
phone_number = default_phone_number
def phone_numbers_extended(self, requesting_user):
"""
Returns information about the status of each of this user's phone numbers.
requesting_user - The user that is requesting this information (from a view)
"""
from corehq.apps.sms.models import PhoneNumber
from corehq.apps.hqwebapp.doc_info import get_object_url
phone_entries = self.get_phone_entries()
def get_phone_info(phone):
info = {}
phone_entry = phone_entries.get(apply_leniency(phone))
if phone_entry and phone_entry.verified:
status = 'verified'
elif phone_entry and phone_entry.pending_verification:
status = 'pending'
else:
duplicate = PhoneNumber.get_reserved_number(phone)
if duplicate:
status = 'duplicate'
if requesting_user.is_member_of(duplicate.domain):
info['dup_url'] = get_object_url(duplicate.domain,
duplicate.owner_doc_type, duplicate.owner_id)
else:
status = 'unverified'
info.update({'number': phone, 'status': status})
return info
return [get_phone_info(phone) for phone in self.phone_numbers]
@property
def couch_id(self):
return self._id
# Couch view wrappers
@classmethod
def all(cls):
return CouchUser.view("users/by_username", include_docs=True, reduce=False)
@classmethod
def username_exists(cls, username):
reduced = cls.view('users/by_username', key=username, reduce=True).all()
if reduced:
return reduced[0]['value'] > 0
return False
@classmethod
def by_domain(cls, domain, is_active=True, reduce=False, limit=None, skip=0, strict=False, doc_type=None):
flag = "active" if is_active else "inactive"
doc_type = doc_type or cls.__name__
if cls.__name__ == "CouchUser":
key = [flag, domain]
else:
key = [flag, domain, doc_type]
extra_args = dict()
if not reduce:
extra_args.update(include_docs=True)
if limit is not None:
extra_args.update(
limit=limit,
skip=skip
)
return cls.view(
"users/by_domain",
reduce=reduce,
startkey=key,
endkey=key + [{}],
#stale=None if strict else settings.COUCH_STALE_QUERY,
**extra_args
).all()
@classmethod
def ids_by_domain(cls, domain, is_active=True):
flag = "active" if is_active else "inactive"
if cls.__name__ == "CouchUser":
key = [flag, domain]
else:
key = [flag, domain, cls.__name__]
return [r['id'] for r in cls.get_db().view("users/by_domain",
startkey=key,
endkey=key + [{}],
reduce=False,
include_docs=False,
)]
@classmethod
def total_by_domain(cls, domain, is_active=True):
data = cls.by_domain(domain, is_active, reduce=True)
return data[0].get('value', 0) if data else 0
@classmethod
def phone_users_by_domain(cls, domain):
return CouchUser.view("users/phone_users_by_domain",
startkey=[domain],
endkey=[domain, {}],
include_docs=True,
)
def is_previewer(self):
from django.conf import settings
return (self.is_superuser
or bool(re.compile(settings.PREVIEWER_RE).match(self.username)))
def sync_from_django_user(self, django_user):
if not django_user:
django_user = self.get_django_user()
for attr in DjangoUserMixin.ATTRS:
# name might be truncated so don't backwards sync
one_way_attrs = ['first_name', 'last_name']
if attr not in one_way_attrs or not getattr(self, attr):
# don't sync one-way attrs back to couch unless we didn't have
# something there in the first place. this is hack to allow
# unit test workflows that create the django user first to work
setattr(self, attr, getattr(django_user, attr))
def sync_to_django_user(self):
try:
django_user = self.get_django_user()
except User.DoesNotExist:
django_user = User(username=self.username)
for attr in DjangoUserMixin.ATTRS:
attr_val = getattr(self, attr)
if attr in [
'is_active',
'is_staff',
'is_superuser',
]:
attr_val = attr_val if attr_val is True else False
elif not attr_val and attr != 'last_login':
attr_val = ''
# truncate names when saving to django
if attr == 'first_name' or attr == 'last_name':
attr_val = attr_val[:30]
setattr(django_user, attr, attr_val)
django_user.DO_NOT_SAVE_COUCH_USER = True
return django_user
@classmethod
def wrap_correctly(cls, source, allow_deleted_doc_types=False):
try:
doc_type = source['doc_type']
except KeyError as err:
raise KeyError(f"'doc_type' not found in {source!r}") from err
if allow_deleted_doc_types:
doc_type = doc_type.replace(DELETED_SUFFIX, '')
return {
'WebUser': WebUser,
'CommCareUser': CommCareUser,
'FakeUser': FakeUser,
}[doc_type].wrap(source)
@classmethod
@quickcache(['username'], skip_arg="strict")
def get_by_username(cls, username, strict=False):
if not username:
return None
view_result = cls.get_db().view(
'users/by_username',
key=username,
include_docs=True,
reduce=False,
)
result = view_result.all()
if len(result) > 1:
raise MultipleResultsFound('"{}": {}'.format(
username, ', '.join([row['id'] for row in result])
))
result = result[0] if result else None
if result and result['doc'] and result['doc']['username'] == username:
couch_user = cls.wrap_correctly(result['doc'])
cls.get_by_user_id.set_cached_value(couch_user.__class__, couch_user.get_id).to(couch_user)
return couch_user
else:
return None
def clear_quickcache_for_user(self):
from corehq.apps.domain.views.base import (
get_domain_links_for_dropdown,
get_enterprise_links_for_dropdown,
)
from corehq.apps.sms.util import is_user_contact_active
self.get_by_username.clear(self.__class__, self.username)
self.get_by_user_id.clear(self.__class__, self.user_id)
username_to_user_id.clear(self.username)
domains = getattr(self, 'domains', None)
if domains is None:
domain = getattr(self, 'domain', None)
domains = [domain] if domain else []
for domain in domains:
self.get_by_user_id.clear(self.__class__, self.user_id, domain)
is_user_contact_active.clear(domain, self.user_id)
Domain.active_for_couch_user.clear(self)
get_domain_links_for_dropdown.clear(self)
get_enterprise_links_for_dropdown.clear(self)
@classmethod
@quickcache(['userID', 'domain'])
def get_by_user_id(cls, userID, domain=None):
"""
if domain is given, checks to make sure the user is a member of that domain
returns None if there's no user found or if the domain check fails
"""
try:
couch_user = cls.wrap_correctly(cls.get_db().get(userID))
except ResourceNotFound:
return None
if couch_user.doc_type != cls.__name__ and cls.__name__ != "CouchUser":
raise CouchUser.AccountTypeError()
if domain:
if not couch_user.is_member_of(domain):
return None
cls.get_by_username.set_cached_value(couch_user.__class__, couch_user.username).to(couch_user)
return couch_user
@classmethod
def from_django_user(cls, django_user, strict=False):
return cls.get_by_username(django_user.username, strict=strict)
@classmethod
def create(cls, domain, username, password, created_by, created_via, email=None, uuid='', date='',
first_name='', last_name='', metadata=None, **kwargs):
try:
django_user = User.objects.using(router.db_for_write(User)).get(username=username)
except User.DoesNotExist:
django_user = create_user(
username, password=password, email=email,
first_name=first_name, last_name=last_name, **kwargs
)
if uuid:
if not re.match(r'[\w-]+', uuid):
raise cls.InvalidID('invalid id %r' % uuid)
couch_user = cls(_id=uuid)
else:
couch_user = cls()
if date:
couch_user.created_on = force_to_datetime(date)
else:
couch_user.created_on = datetime.utcnow()
if 'user_data' in kwargs:
raise ValueError("Do not access user_data directly, pass metadata argument to create.")
metadata = metadata or {}
metadata.update({'commcare_project': domain})
couch_user.update_metadata(metadata)
couch_user.sync_from_django_user(django_user)
return couch_user
def to_be_deleted(self):
return self.base_doc.endswith(DELETED_SUFFIX)
@classmethod
def save_docs(cls, docs, **kwargs):
utcnow = datetime.utcnow()
for doc in docs:
doc['last_modified'] = utcnow
super(CouchUser, cls).save_docs(docs, **kwargs)
for user in docs:
user.clear_quickcache_for_user()
bulk_save = save_docs
def save(self, fire_signals=True, **params):
# HEADS UP!
# When updating this method, please also ensure that your updates also
# carry over to bulk_auto_deactivate_commcare_users.
self.last_modified = datetime.utcnow()
self.clear_quickcache_for_user()
with CriticalSection(['username-check-%s' % self.username], timeout=120):
# test no username conflict
by_username = self.get_db().view('users/by_username', key=self.username, reduce=False).first()
if by_username and by_username['id'] != self._id:
raise self.Inconsistent("CouchUser with username %s already exists" % self.username)
if self._rev and not self.to_be_deleted():
django_user = self.sync_to_django_user()
django_user.save()
super(CouchUser, self).save(**params)
if fire_signals:
self.fire_signals()
def fire_signals(self):
from .signals import couch_user_post_save
results = couch_user_post_save.send_robust(sender='couch_user', couch_user=self)
log_signal_errors(results, "Error occurred while syncing user (%s)", {'username': self.username})
@classmethod
def django_user_post_save_signal(cls, sender, django_user, created, max_tries=3):
if hasattr(django_user, 'DO_NOT_SAVE_COUCH_USER'):
del django_user.DO_NOT_SAVE_COUCH_USER
else:
couch_user = cls.from_django_user(django_user)
if couch_user:
couch_user.sync_from_django_user(django_user)
try:
# avoid triggering cyclical sync
super(CouchUser, couch_user).save(**get_safe_write_kwargs())
except ResourceConflict:
if max_tries > 0:
couch_user.clear_quickcache_for_user()
cls.django_user_post_save_signal(sender, django_user, created, max_tries - 1)
else:
raise
couch_user.clear_quickcache_for_user()
def is_deleted(self):
return self.base_doc.endswith(DELETED_SUFFIX)
def get_viewable_reports(self, domain=None, name=False, slug=False):
def slug_name(model):
try:
if slug:
return to_function(model).slug
if name:
return to_function(model).name
except AttributeError:
logging.warning("Unable to load report model: %s", model)
return None
models = self._get_viewable_report_slugs(domain)
if slug or name:
return [_f for _f in [slug_name(m) for m in models] if _f]
return models
def _get_viewable_report_slugs(self, domain):
domain = domain or getattr(self, 'current_domain', None)
if self.is_commcare_user():
role = self.get_role(domain)
if role is None:
return []
else:
return role.permissions.view_report_list
else:
dm = self.get_domain_membership(domain, allow_enterprise=True)
return dm.viewable_reports() if dm else []
def can_view_some_reports(self, domain):
return self.can_view_reports(domain) or bool(self.get_viewable_reports(domain))
def can_access_any_exports(self, domain=None):
return self.can_view_reports(domain) or any([
permission_slug for permission_slug in self._get_viewable_report_slugs(domain)
if permission_slug in EXPORT_PERMISSIONS
])
def can_view_some_tableau_viz(self, domain):
if not self.can_access_all_locations(domain):
return False
from corehq.apps.reports.models import TableauVisualization
return self.can_view_tableau(domain) or bool(TableauVisualization.for_user(domain, self))
def can_login_as(self, domain):
return (
self.has_permission(domain, 'login_as_all_users')
or self.has_permission(domain, 'limited_login_as')
)
def is_current_web_user(self, request):
return self.user_id == request.couch_user.user_id
# gets hit for can_view_reports, etc.
def __getattr__(self, item):
if item.startswith('can_'):
perm = item[len('can_'):]
if perm:
fn = self._get_perm_check_fn(perm)
fn.__name__ = item
return fn
raise AttributeError("'{}' object has no attribute '{}'".format(
self.__class__.__name__, item))
def _get_perm_check_fn(self, perm):
def fn(domain=None, data=None):
domain = domain or getattr(self, 'current_domain', None)
return self.has_permission(domain, perm, data)
return fn
def get_location_id(self, domain):
return getattr(self.get_domain_membership(domain), 'location_id', None)
def set_has_built_app(self):
if not self.has_built_app:
self.has_built_app = True
self.save()
def log_user_create(self, domain, created_by, created_via, by_domain_required_for_log=True):
from corehq.apps.users.model_log import UserModelAction
if settings.UNIT_TESTING and created_by is None and created_via is None:
return
# fallback to self if not created by any user
created_by = created_by or self
# Commcare user is owned by the domain it belongs to so use self.domain for for_domain
# Web user is not "created" by a domain but invited so keep for_domain as None
if self.is_commcare_user():
for_domain = self.domain
for_domain_required_for_log = True
else:
for_domain = None
for_domain_required_for_log = False
log_user_change(
by_domain=domain,
for_domain=for_domain,
couch_user=self,
changed_by_user=created_by,
changed_via=created_via,
action=UserModelAction.CREATE,
by_domain_required_for_log=by_domain_required_for_log,
for_domain_required_for_log=for_domain_required_for_log
)
def belongs_to_messaging_domain(self):
domains = (Domain.get_by_name(domain) for domain in self.domains)
# The reason we iterate through domains, rather than fetch them all at once (there is a view to do so)
# is due to concerns about scale. Most users belong to one or a few domains, so iteration isn't expensive.
# For users that DO belong to many domains, I'm working off the assumption that most of them are for
# enterprise domains, which have turned on messaging for most of their domains -- so we likely will
# short-circuit after only a few domains
return any(domain.granted_messaging_access for domain in domains)
class CommCareUser(CouchUser, SingleMembershipMixin, CommCareMobileContactMixin):
domain = StringProperty()
registering_device_id = StringProperty()
# used by loadtesting framework - should typically be empty
loadtest_factor = IntegerProperty()
is_loadtest_user = BooleanProperty(default=False)
is_demo_user = BooleanProperty(default=False)
demo_restore_id = IntegerProperty()
# used by user provisioning workflow. defaults to true unless explicitly overridden during
# user creation
is_account_confirmed = BooleanProperty(default=True)
# This means that this user represents a location, and has a 1-1 relationship
# with a location where location.location_type.has_user == True
user_location_id = StringProperty()
@classmethod
def wrap(cls, data):
# migrations from using role_id to using the domain_memberships
role_id = None
if 'role_id' in data:
role_id = data["role_id"]
del data['role_id']
if not data.get('domain_membership', {}).get('domain', None):
data['domain_membership'] = DomainMembership(
domain=data.get('domain', ""), role_id=role_id
).to_json()
if not data.get('user_data', {}).get('commcare_project'):
data['user_data'] = dict(data['user_data'], **{'commcare_project': data['domain']})
return super(CommCareUser, cls).wrap(data)
@property
def metadata(self):
from corehq.apps.custom_data_fields.models import PROFILE_SLUG
data = self.to_json().get('user_data', {})
profile_id = data.get(PROFILE_SLUG)
profile = self.get_user_data_profile(profile_id)
if profile:
data.update(profile.fields)
return data
def update_metadata(self, data):
from corehq.apps.custom_data_fields.models import PROFILE_SLUG
new_data = {**self.user_data, **data}
old_profile_id = self.user_data.get(PROFILE_SLUG)
profile = self.get_user_data_profile(new_data.get(PROFILE_SLUG))
if profile:
overlap = {k for k, v in profile.fields.items() if new_data.get(k) and v != new_data[k]}
if overlap:
raise ValueError("metadata properties conflict with profile: {}".format(", ".join(overlap)))
for key in profile.fields.keys():
new_data.pop(key, None)
profile_updated = old_profile_id != new_data.get(PROFILE_SLUG)
metadata_updated = new_data != self.user_data
self.user_data = new_data
return metadata_updated, profile_updated
def pop_metadata(self, key, default=None):
return self.user_data.pop(key, default)
def get_user_data_profile(self, profile_id):
from corehq.apps.users.views.mobile.custom_data_fields import UserFieldsView
from corehq.apps.custom_data_fields.models import CustomDataFieldsProfile
if not profile_id:
return None
try:
profile = CustomDataFieldsProfile.objects.get(id=profile_id)
except CustomDataFieldsProfile.DoesNotExist:
raise ValueError("Could not find profile with id {}".format(profile_id))
if profile.definition.domain != self.domain:
raise ValueError("Could not find profile with id {}".format(profile_id))
if profile.definition.field_type != UserFieldsView.field_type:
raise ValueError("Could not find profile with id {}".format(profile_id))
return profile
def _is_demo_user_cached_value_is_stale(self):
from corehq.apps.users.dbaccessors import get_practice_mode_mobile_workers
cached_demo_users = get_practice_mode_mobile_workers.get_cached_value(self.domain)
if cached_demo_users is not Ellipsis:
cached_is_demo_user = any(user['_id'] == self._id for user in cached_demo_users)
if cached_is_demo_user != self.is_demo_user:
return True
return False
def clear_quickcache_for_user(self):
from corehq.apps.users.dbaccessors import get_practice_mode_mobile_workers
self.get_usercase_id.clear(self)
get_loadtest_factor_for_restore_cache_key.clear(self.domain, self.user_id)
if self._is_demo_user_cached_value_is_stale():
get_practice_mode_mobile_workers.clear(self.domain)
super(CommCareUser, self).clear_quickcache_for_user()
def save(self, fire_signals=True, spawn_task=False, **params):
is_new_user = self.new_document # before saving, check if this is a new document
super(CommCareUser, self).save(fire_signals=fire_signals, **params)
if fire_signals:
from corehq.apps.callcenter.tasks import sync_usercases_if_applicable
from .signals import commcare_user_post_save
results = commcare_user_post_save.send_robust(sender='couch_user', couch_user=self,
is_new_user=is_new_user)
log_signal_errors(results, "Error occurred while syncing user (%s)", {'username': self.username})
sync_usercases_if_applicable(self, spawn_task)
def delete(self, deleted_by_domain, deleted_by, deleted_via=None):
from corehq.apps.ota.utils import delete_demo_restore_for_user
# clear demo restore objects if any
delete_demo_restore_for_user(self)
super(CommCareUser, self).delete(deleted_by_domain, deleted_by=deleted_by, deleted_via=deleted_via)
@property
def project(self):
return Domain.get_by_name(self.domain)
def is_domain_admin(self, domain=None):
# cloudcare workaround
return False
@classmethod
def create(cls,
domain,
username,
password,
created_by,
created_via,
email=None,
uuid='',
date='',
phone_number=None,
location=None,
commit=True,
is_account_confirmed=True,
metadata=None,
**kwargs):
"""
Main entry point into creating a CommCareUser (mobile worker).
"""
uuid = uuid or uuid4().hex
# if the account is not confirmed, also set is_active false so they can't login
if 'is_active' not in kwargs:
kwargs['is_active'] = is_account_confirmed
elif not is_account_confirmed:
assert not kwargs['is_active'], \
"it's illegal to create a user with is_active=True and is_account_confirmed=False"
commcare_user = super(CommCareUser, cls).create(domain, username, password, created_by, created_via,
email, uuid, date, metadata=None, **kwargs)
if phone_number is not None:
commcare_user.add_phone_number(phone_number)
device_id = kwargs.get('device_id', '')
# populate the couch user
commcare_user.domain = domain
commcare_user.device_ids = [device_id]
commcare_user.registering_device_id = device_id
commcare_user.is_account_confirmed = is_account_confirmed
commcare_user.domain_membership = DomainMembership(domain=domain, **kwargs)
# metadata can't be set until domain is present
if 'user_data' in kwargs:
raise ValueError("Do not access user_data directly, pass metadata argument to create.")
commcare_user.update_metadata(metadata or {})
if location:
commcare_user.set_location(location, commit=False)
if commit:
commcare_user.save(**get_safe_write_kwargs())
commcare_user.log_user_create(domain, created_by, created_via)
return commcare_user
@property
def filter_flag(self):
from corehq.apps.reports.models import HQUserType
return HQUserType.ACTIVE
def is_commcare_user(self):
return True
def is_web_user(self):
return False
def supports_lockout(self):
return not self.project.disable_mobile_login_lockout
def to_ota_restore_user(self, domain, request_user=None):
assert domain == self.domain
return OTARestoreCommCareUser(
self.domain,
self,
loadtest_factor=self.loadtest_factor or 1,
request_user=request_user,
)
def _get_form_ids(self):
return XFormInstance.objects.get_form_ids_for_user(self.domain, self.user_id)
def _get_case_ids(self):
return CommCareCase.objects.get_case_ids_in_domain_by_owners(self.domain, [self.user_id])
def _get_deleted_form_ids(self):
return XFormInstance.objects.get_deleted_form_ids_for_user(self.domain, self.user_id)
def _get_deleted_case_ids(self):
return CommCareCase.objects.get_deleted_case_ids_by_owner(self.domain, self.user_id)
def get_owner_ids(self, domain=None):
owner_ids = [self.user_id]
owner_ids.extend([g._id for g in self.get_case_sharing_groups()])
return owner_ids
def unretire(self, unretired_by_domain, unretired_by, unretired_via=None):
"""
This un-deletes a user, but does not fully restore the state to
how it previously was. Using this has these caveats:
- It will not restore Case Indexes that were removed
- It will not restore the user's phone numbers
- It will not restore reminders for cases
"""
from corehq.apps.users.model_log import UserModelAction
if not unretired_by and not settings.UNIT_TESTING:
raise ValueError("Missing unretired_by")
by_username = self.get_db().view('users/by_username', key=self.username, reduce=False).first()
if by_username and by_username['id'] != self._id:
return False, "A user with the same username already exists in the system"
if self.base_doc.endswith(DELETED_SUFFIX):
self.base_doc = self.base_doc[:-len(DELETED_SUFFIX)]
deleted_form_ids = self._get_deleted_form_ids()
XFormInstance.objects.soft_undelete_forms(self.domain, deleted_form_ids)
deleted_case_ids = self._get_deleted_case_ids()
CommCareCase.objects.soft_undelete_cases(self.domain, deleted_case_ids)
undelete_system_forms.delay(self.domain, set(deleted_form_ids), set(deleted_case_ids))
self.save()
if unretired_by:
log_user_change(
by_domain=unretired_by_domain,
for_domain=self.domain,
couch_user=self,
changed_by_user=unretired_by,
changed_via=unretired_via,
action=UserModelAction.CREATE,
)
return True, None
def retire(self, retired_by_domain, deleted_by, deleted_via=None):
from corehq.apps.users.model_log import UserModelAction
if not deleted_by and not settings.UNIT_TESTING:
raise ValueError("Missing deleted_by")
suffix = DELETED_SUFFIX
deletion_id = uuid4().hex
deletion_date = datetime.utcnow()
# doc_type remains the same, since the views use base_doc instead
if not self.base_doc.endswith(suffix):
self.base_doc += suffix
self['-deletion_id'] = deletion_id
self['-deletion_date'] = deletion_date
deleted_cases = set()
for case_id_list in chunked(self._get_case_ids(), 50):
tag_cases_as_deleted_and_remove_indices.delay(self.domain, case_id_list, deletion_id, deletion_date)
deleted_cases.update(case_id_list)
deleted_forms = set()
for form_id_list in chunked(self._get_form_ids(), 50):
tag_forms_as_deleted_rebuild_associated_cases.delay(
self.user_id, self.domain, form_id_list, deletion_id, deletion_date, deleted_cases=deleted_cases
)
deleted_forms.update(form_id_list)
tag_system_forms_as_deleted.delay(self.domain, deleted_forms, deleted_cases, deletion_id, deletion_date)
from corehq.apps.app_manager.views.utils import unset_practice_mode_configured_apps
unset_practice_mode_configured_apps(self.domain, self.get_id)
try:
django_user = self.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
if deleted_by:
log_user_change(by_domain=retired_by_domain, for_domain=self.domain,
couch_user=self, changed_by_user=deleted_by,
changed_via=deleted_via, action=UserModelAction.DELETE)
self.save()
def confirm_account(self, password):
if self.is_account_confirmed:
raise IllegalAccountConfirmation('Account is already confirmed')
assert not self.is_active, 'Active account should not be unconfirmed!'
self.is_active = True
self.is_account_confirmed = True
self.set_password(password)
self.save()
def get_case_sharing_groups(self):
from corehq.apps.groups.models import Group
from corehq.apps.locations.models import get_case_sharing_groups_for_locations
# get faked location group objects
groups = list(get_case_sharing_groups_for_locations(
self.get_sql_locations(self.domain),
self._id
))
groups += [group for group in Group.by_user_id(self._id) if group.case_sharing]
return groups
def get_reporting_groups(self):
from corehq.apps.groups.models import Group
return [group for group in Group.by_user_id(self._id) if group.reporting]
@classmethod
def cannot_share(cls, domain, limit=None, skip=0):
users_checked = list(cls.by_domain(domain, limit=limit, skip=skip))
if not users_checked:
# stop fetching when you come back with none
return []
users = [user for user in users_checked if len(user.get_case_sharing_groups()) != 1]
if limit is not None:
total = cls.total_by_domain(domain)
max_limit = min(total - skip, limit)
if len(users) < max_limit:
new_limit = max_limit - len(users_checked)
new_skip = skip + len(users_checked)
users.extend(cls.cannot_share(domain, new_limit, new_skip))
return users
return users
def get_group_ids(self):
from corehq.apps.groups.models import Group
return Group.by_user_id(self._id, wrap=False)
def set_groups(self, group_ids):
"""
:returns: True if groups were updated
"""
from corehq.apps.groups.models import Group
desired = set(group_ids)
current = set(self.get_group_ids())
touched = []
faulty_groups = []
for to_add in desired - current:
group = Group.get(to_add)
if group.domain != self.domain:
faulty_groups.append(to_add)
continue
group.add_user(self._id, save=False)
touched.append(group)
if faulty_groups:
raise ValidationError("Unable to save groups. The following group_ids are not in the current domain: "
+ ', '.join(faulty_groups))
for to_remove in current - desired:
group = Group.get(to_remove)
group.remove_user(self._id)
touched.append(group)
Group.bulk_save(touched)
return bool(touched)
def get_time_zone(self):
if self.memoized_usercase:
return self.memoized_usercase.get_time_zone()
return None
def get_language_code(self):
if self.language:
return self.language
if self.memoized_usercase:
return self.memoized_usercase.get_language_code()
return None
@property
@memoized
def location(self):
return self.sql_location
@property
def sql_location(self):
from corehq.apps.locations.models import SQLLocation
if self.location_id:
return SQLLocation.objects.get_or_None(location_id=self.location_id)
return None
def get_location_ids(self, domain):
# domain arg included here for compatibility with WebUser
return self.assigned_location_ids
def get_sql_locations(self, domain):
# domain arg included here for compatibility with WebUser
from corehq.apps.locations.models import SQLLocation
if self.assigned_location_ids:
return SQLLocation.objects.filter(location_id__in=self.assigned_location_ids)
else:
return SQLLocation.objects.none()
def add_to_assigned_locations(self, location, commit=True):
if self.location_id:
if location.location_id in self.assigned_location_ids:
return
self.assigned_location_ids.append(location.location_id)
self.get_domain_membership(self.domain).assigned_location_ids.append(location.location_id)
self.update_metadata({'commcare_location_ids': user_location_data(self.assigned_location_ids)})
if commit:
self.save()
else:
self.set_location(location, commit=commit)
@memoized
def get_sql_location(self, domain):
return self.sql_location
def set_location(self, location, commit=True):
"""
Set the primary location, and all important user data, for
the user.
:param location: may be a sql or couch location
"""
from corehq.apps.fixtures.models import UserLookupTableType
if not location.location_id:
raise AssertionError("You can't set an unsaved location")
self.update_metadata({'commcare_location_id': location.location_id})
if not location.location_type_object.administrative:
# just need to trigger a get or create to make sure
# this exists, otherwise things blow up
sp = SupplyInterface(self.domain).get_or_create_by_location(location)
self.update_metadata({
'commtrack-supply-point': sp.case_id
})
self.create_location_delegates([location])
self.update_metadata({
'commcare_primary_case_sharing_id':
location.location_id
})
self.update_fixture_status(UserLookupTableType.LOCATION)
self.location_id = location.location_id
self.get_domain_membership(self.domain).location_id = location.location_id
if self.location_id not in self.assigned_location_ids:
self.assigned_location_ids.append(self.location_id)
self.get_domain_membership(self.domain).assigned_location_ids.append(self.location_id)
self.update_metadata({'commcare_location_ids': user_location_data(self.assigned_location_ids)})
self.get_sql_location.reset_cache(self)
if commit:
self.save()
def unset_location(self, fall_back_to_next=False, commit=True):
"""
Resets primary location to next available location from assigned_location_ids.
If there are no more locations in assigned_location_ids,
then the primary location and user data are cleared
If fall_back_to_next is True, primary location is not set to next but cleared.
This option exists only to be backwards compatible when user can only have one location
"""
from corehq.apps.fixtures.models import UserLookupTableType
from corehq.apps.locations.models import SQLLocation
old_primary_location_id = self.location_id
if old_primary_location_id:
self._remove_location_from_user(old_primary_location_id)
if self.assigned_location_ids:
self.update_metadata({'commcare_location_ids': user_location_data(self.assigned_location_ids)})
elif self.metadata.get('commcare_location_ids'):
self.pop_metadata('commcare_location_ids')
if self.assigned_location_ids and fall_back_to_next:
new_primary_location_id = self.assigned_location_ids[0]
self.set_location(SQLLocation.objects.get(location_id=new_primary_location_id))
else:
self.pop_metadata('commcare_location_id', None)
self.pop_metadata('commtrack-supply-point', None)
self.pop_metadata('commcare_primary_case_sharing_id', None)
self.location_id = None
self.clear_location_delegates()
self.update_fixture_status(UserLookupTableType.LOCATION)
self.get_domain_membership(self.domain).location_id = None
self.get_sql_location.reset_cache(self)
if commit:
self.save()
def unset_location_by_id(self, location_id, fall_back_to_next=False):
"""
Unset a location that the user is assigned to that may or may not be primary location.
If the location_id is primary-location, then next available location from
assigned_location_ids is set as the primary-location.
If fall_back_to_next is True, primary location is not set to next
"""
if location_id == self.location_id:
# check if primary location
self.unset_location(fall_back_to_next)
else:
self._remove_location_from_user(location_id)
if self.assigned_location_ids:
self.update_metadata({'commcare_location_ids': user_location_data(self.assigned_location_ids)})
else:
self.pop_metadata('commcare_location_ids')
self.save()
def _remove_location_from_user(self, location_id):
from corehq.apps.fixtures.models import UserLookupTableType
try:
self.assigned_location_ids.remove(location_id)
self.update_fixture_status(UserLookupTableType.LOCATION)
except ValueError:
notify_exception(None, "Location missing from user", {
'user_id': self._id,
'location_id': location_id
})
try:
self.get_domain_membership(self.domain).assigned_location_ids.remove(location_id)
except ValueError:
notify_exception(None, "Location missing from domain membership", {
'user_id': self._id,
'location_id': location_id
})
def reset_locations(self, location_ids, commit=True):
"""
Reset user's assigned_locations to given location_ids and update user data.
This should be called after updating primary location via set_location/unset_location
If primary-location is not set, then next available location from
assigned_location_ids is set as the primary-location
"""
from corehq.apps.locations.models import SQLLocation
self.assigned_location_ids = location_ids
self.get_domain_membership(self.domain).assigned_location_ids = location_ids
if location_ids:
self.update_metadata({
'commcare_location_ids': user_location_data(location_ids)
})
else:
self.pop_metadata('commcare_location_ids', None)
# try to set primary-location if not set already
if not self.location_id and location_ids:
self.set_location(SQLLocation.objects.get(location_id=location_ids[0]), commit=False)
if commit:
self.save()
def supply_point_index_mapping(self, supply_point, clear=False):
from corehq.apps.commtrack.exceptions import (
LinkedSupplyPointNotFoundError
)
if supply_point:
return {
'supply_point-' + supply_point.case_id:
(
supply_point.type,
supply_point.case_id if not clear else ''
)
}
else:
raise LinkedSupplyPointNotFoundError(
"There was no linked supply point for the location."
)
def submit_location_block(self, caseblock, source):
from corehq.apps.hqcase.utils import submit_case_blocks
submit_case_blocks(
ElementTree.tostring(
caseblock.as_xml(), encoding='utf-8'
).decode('utf-8'),
self.domain,
device_id=__name__ + ".CommCareUser." + source,
)
def clear_location_delegates(self):
"""
Wipe all case delagate access.
"""
from casexml.apps.case.cleanup import safe_hard_delete
mapping = self.get_location_map_case()
if mapping:
safe_hard_delete(mapping)
def create_location_delegates(self, locations):
"""
Submit the case blocks creating the delgate case access
for the location(s).
"""
self.clear_location_delegates()
if not locations:
return
index = {}
for location in locations:
if not location.location_type_object.administrative:
sp = SupplyInterface(self.domain).get_by_location(location)
index.update(self.supply_point_index_mapping(sp))
from corehq.apps.commtrack.util import location_map_case_id
caseblock = CaseBlock(
create=True,
case_type=USER_LOCATION_OWNER_MAP_TYPE,
case_id=location_map_case_id(self),
owner_id=self._id,
index=index
)
self.submit_location_block(caseblock, "create_location_delegates")
def get_location_map_case(self):
"""
Returns the location mapping case for this supply point.
That lets us give access to the supply point via
delagate access.
"""
try:
from corehq.apps.commtrack.util import location_map_case_id
return CommCareCase.objects.get_case(location_map_case_id(self), self.domain)
except CaseNotFound:
return None
@property
def fixture_statuses(self):
"""Returns all of the last modified times for each fixture type"""
return get_fixture_statuses(self._id)
def fixture_status(self, fixture_type):
try:
return self.fixture_statuses[fixture_type]
except KeyError:
from corehq.apps.fixtures.models import UserLookupTableStatus
return UserLookupTableStatus.DEFAULT_LAST_MODIFIED
def update_fixture_status(self, fixture_type):
from corehq.apps.fixtures.models import UserLookupTableStatus
now = datetime.utcnow()
user_fixture_sync, new = UserLookupTableStatus.objects.get_or_create(
user_id=self._id,
fixture_type=fixture_type,
defaults={'last_modified': now},
)
if not new:
user_fixture_sync.last_modified = now
user_fixture_sync.save()
get_fixture_statuses.clear(self._id)
def __repr__(self):
return ("{class_name}(username={self.username!r})".format(
class_name=self.__class__.__name__,
self=self
))
@property
@memoized
def memoized_usercase(self):
return self.get_usercase()
def get_usercase(self):
return CommCareCase.objects.get_case_by_external_id(self.domain, self._id, USERCASE_TYPE)
@quickcache(['self._id'], lambda _: settings.UNIT_TESTING)
def get_usercase_id(self):
case = self.get_usercase()
return case.case_id if case else None
def update_device_id_last_used(self, device_id, when=None, commcare_version=None, device_app_meta=None):
"""
Sets the last_used date for the device to be the current time
Does NOT save the user object.
:returns: True if user was updated and needs to be saved
"""
when = when or datetime.utcnow()
device = self.get_device(device_id)
if device:
do_update = False
if when.date() > device.last_used.date():
do_update = True
elif device_app_meta:
existing_app_meta = device.get_meta_for_app(device_app_meta.app_id)
if not existing_app_meta:
do_update = True
else:
last_request = device_app_meta.last_request
do_update = (
last_request
and existing_app_meta.last_request
and last_request > existing_app_meta.last_request.date()
)
if do_update:
device.last_used = when
device.update_meta(commcare_version, device_app_meta)
self.last_device = DeviceIdLastUsed.wrap(self.get_last_used_device().to_json())
meta = self.last_device.get_last_used_app_meta()
self.last_device.app_meta = [meta] if meta else []
return True
else:
device = DeviceIdLastUsed(device_id=device_id, last_used=when)
device.update_meta(commcare_version, device_app_meta)
self.devices.append(device)
self.last_device = device
return True
return False
def get_last_used_device(self):
if not self.devices:
return None
return max(self.devices, key=lambda dev: dev.last_used)
def get_device(self, device_id):
for device in self.devices:
if device.device_id == device_id:
return device
def update_fixture_status_for_users(user_ids, fixture_type):
from corehq.apps.fixtures.models import UserLookupTableStatus
from dimagi.utils.chunked import chunked
now = datetime.utcnow()
for ids in chunked(user_ids, 50):
(UserLookupTableStatus.objects
.filter(user_id__in=ids,
fixture_type=fixture_type)
.update(last_modified=now))
for user_id in user_ids:
get_fixture_statuses.clear(user_id)
@quickcache(['user_id'], skip_arg=lambda user_id: settings.UNIT_TESTING)
def get_fixture_statuses(user_id):
from corehq.apps.fixtures.models import UserLookupTableType, UserLookupTableStatus
last_modifieds = {choice[0]: UserLookupTableStatus.DEFAULT_LAST_MODIFIED
for choice in UserLookupTableType.CHOICES}
for fixture_status in UserLookupTableStatus.objects.filter(user_id=user_id):
last_modifieds[fixture_status.fixture_type] = fixture_status.last_modified
return last_modifieds
class WebUser(CouchUser, MultiMembershipMixin, CommCareMobileContactMixin):
program_id = StringProperty()
last_password_set = DateTimeProperty(default=datetime(year=1900, month=1, day=1))
fcm_device_token = StringProperty()
# this property is used to mark users who signed up from internal invitations
# such as those going through the recruiting pipeline
# to better mark them in our analytics
atypical_user = BooleanProperty(default=False)
def is_global_admin(self):
# override this function to pass global admin rights off to django
return self.is_superuser
@classmethod
def create(cls, domain, username, password, created_by, created_via, email=None, uuid='', date='',
metadata=None, by_domain_required_for_log=True, **kwargs):
web_user = super(WebUser, cls).create(domain, username, password, created_by, created_via, email, uuid,
date, metadata=metadata, **kwargs)
if domain:
web_user.add_domain_membership(domain, **kwargs)
web_user.save()
web_user.log_user_create(domain, created_by, created_via,
by_domain_required_for_log=by_domain_required_for_log)
return web_user
def is_commcare_user(self):
return False
def is_web_user(self):
return True
def to_ota_restore_user(self, domain, request_user=None):
return OTARestoreWebUser(
domain,
self,
request_user=request_user
)
@quickcache(['self._id', 'domain'], lambda _: settings.UNIT_TESTING)
def get_usercase_id(self, domain):
case = self.get_usercase_by_domain(domain)
return case.case_id if case else None
def get_email(self):
# Do not change the name of this method because this is implementing
# get_email() from the CommCareMobileContactMixin
return self.email or self.username
def get_time_zone(self):
from corehq.util.timezones.utils import get_timezone_for_user
if hasattr(self, 'current_domain'):
domain = self.current_domain
elif len(self.domains) > 0:
domain = self.domains[0]
else:
return None
timezone = get_timezone_for_user(self.user_id, domain)
return timezone.zone
def get_language_code(self):
return self.language
def get_domains(self):
return [dm.domain for dm in self.domain_memberships]
@classmethod
def get_admins_by_domain(cls, domain):
user_ids = cls.ids_by_domain(domain)
for user_doc in iter_docs(cls.get_db(), user_ids):
web_user = cls.wrap(user_doc)
if web_user.is_domain_admin(domain):
yield web_user
@classmethod
def get_dimagi_emails_by_domain(cls, domain):
user_ids = cls.ids_by_domain(domain)
for user_doc in iter_docs(cls.get_db(), user_ids):
if is_dimagi_email(user_doc['email']):
yield user_doc['email']
def save(self, fire_signals=True, **params):
super().save(fire_signals=fire_signals, **params)
if fire_signals:
from corehq.apps.callcenter.tasks import sync_web_user_usercases_if_applicable
for domain in self.get_domains():
sync_web_user_usercases_if_applicable(self, domain)
def add_to_assigned_locations(self, domain, location):
membership = self.get_domain_membership(domain)
if membership.location_id:
if location.location_id in membership.assigned_location_ids:
return
membership.assigned_location_ids.append(location.location_id)
self.get_sql_locations.reset_cache(self)
self.save()
else:
self.set_location(domain, location)
def set_location(self, domain, location_object_or_id):
# set the primary location for user's domain_membership
if isinstance(location_object_or_id, str):
location_id = location_object_or_id
else:
location_id = location_object_or_id.location_id
if not location_id:
raise AssertionError("You can't set an unsaved location")
membership = self.get_domain_membership(domain)
membership.location_id = location_id
if self.location_id not in membership.assigned_location_ids:
membership.assigned_location_ids.append(location_id)
self.get_sql_locations.reset_cache(self)
self.get_sql_location.reset_cache(self)
self.save()
def unset_location(self, domain, fall_back_to_next=False, commit=True):
"""
Change primary location to next location from assigned_location_ids,
if there are no more locations in assigned_location_ids, primary location is cleared
"""
membership = self.get_domain_membership(domain)
old_location_id = membership.location_id
if old_location_id:
membership.assigned_location_ids.remove(old_location_id)
self.get_sql_locations.reset_cache(self)
if membership.assigned_location_ids and fall_back_to_next:
membership.location_id = membership.assigned_location_ids[0]
else:
membership.location_id = None
self.get_sql_location.reset_cache(self)
if commit:
self.save()
def unset_location_by_id(self, domain, location_id, fall_back_to_next=False):
"""
Unset a location that the user is assigned to that may or may not be primary location
"""
membership = self.get_domain_membership(domain)
primary_id = membership.location_id
if location_id == primary_id:
# check if primary location
self.unset_location(domain, fall_back_to_next)
else:
membership.assigned_location_ids.remove(location_id)
self.get_sql_locations.reset_cache(self)
self.save()
def reset_locations(self, domain, location_ids, commit=True):
"""
reset locations to given list of location_ids. Before calling this, primary location
should be explicitly set/unset via set_location/unset_location
"""
membership = self.get_domain_membership(domain)
membership.assigned_location_ids = location_ids
if not membership.location_id and location_ids:
membership.location_id = location_ids[0]
self.get_sql_locations.reset_cache(self)
if commit:
self.save()
@memoized
def get_sql_location(self, domain):
from corehq.apps.locations.models import SQLLocation
loc_id = self.get_location_id(domain)
if loc_id:
return SQLLocation.objects.get_or_None(domain=domain, location_id=loc_id)
def get_location_ids(self, domain):
return getattr(self.get_domain_membership(domain), 'assigned_location_ids', [])
@memoized
def get_sql_locations(self, domain=None):
from corehq.apps.locations.models import SQLLocation
loc_ids = self.get_location_ids(domain)
if loc_ids:
return SQLLocation.objects.get_locations(loc_ids)
else:
return SQLLocation.objects.none()
def get_location(self, domain):
return self.get_sql_location(domain)
def get_usercase_by_domain(self, domain):
return CommCareCase.objects.get_case_by_external_id(domain, self._id, USERCASE_TYPE)
class FakeUser(WebUser):
"""
Prevent actually saving user types that don't exist in the database
"""
def save(self, **kwargs):
raise NotImplementedError("You aren't allowed to do that!")
@property
def _id(self):
return "fake-user"
class InvalidUser(FakeUser):
"""
Public users have read-only access to certain domains
"""
def is_member_of(self, domain_qs):
return False
class DomainRequest(models.Model):
'''
Request to join domain. Requester might or might not already have an account.
'''
email = models.CharField(max_length=100, db_index=True)
full_name = models.CharField(max_length=100, db_index=True)
is_approved = models.BooleanField(default=False)
domain = models.CharField(max_length=255, db_index=True)
class Meta(object):
app_label = "users"
@classmethod
def by_domain(cls, domain, is_approved=False):
return DomainRequest.objects.filter(domain=domain, is_approved=is_approved)
@classmethod
def by_email(cls, domain, email, is_approved=False):
return DomainRequest.by_domain(domain, is_approved).filter(email=email).first()
def send_approval_email(self):
domain_name = Domain.get_by_name(self.domain).display_name()
params = {
'domain_name': domain_name,
'url': absolute_reverse("domain_homepage", args=[self.domain]),
}
text_content = render_to_string("users/email/new_domain_request.txt", params)
html_content = render_to_string("users/email/new_domain_request.html", params)
subject = _('Request to join %s approved') % domain_name
send_html_email_async.delay(subject, self.email, html_content, text_content=text_content,
email_from=settings.DEFAULT_FROM_EMAIL)
def send_request_email(self):
domain_name = Domain.get_by_name(self.domain).display_name()
params = {
'full_name': self.full_name,
'email': self.email,
'domain_name': domain_name,
'url': absolute_reverse("web_users", args=[self.domain]),
}
recipients = {u.get_email() for u in
WebUser.get_admins_by_domain(self.domain)}
text_content = render_to_string("users/email/request_domain_access.txt", params)
html_content = render_to_string("users/email/request_domain_access.html", params)
subject = _('Request from %(name)s to join %(domain)s') % {
'name': self.full_name,
'domain': domain_name,
}
send_html_email_async.delay(subject, recipients, html_content, text_content=text_content,
email_from=settings.DEFAULT_FROM_EMAIL)
class InvitationStatus(object):
BOUNCED = "Bounced"
SENT = "Sent"
DELIVERED = "Delivered"
class Invitation(models.Model):
EMAIL_ID_PREFIX = "Invitation:"
uuid = models.UUIDField(primary_key=True, db_index=True, default=uuid4)
email = models.CharField(max_length=255, db_index=True)
email_status = models.CharField(max_length=126, null=True)
invited_by = models.CharField(max_length=126) # couch id of a WebUser
invited_on = models.DateTimeField()
is_accepted = models.BooleanField(default=False)
domain = models.CharField(max_length=255)
role = models.CharField(max_length=100, null=True) # role qualified ID
program = models.CharField(max_length=126, null=True) # couch id of a Program
supply_point = models.CharField(max_length=126, null=True) # couch id of a Location
def __repr__(self):
return f"Invitation(domain='{self.domain}', email='{self.email})"
@classmethod
def by_domain(cls, domain, is_accepted=False, **filters):
return Invitation.objects.filter(domain=domain, is_accepted=is_accepted, **filters)
@classmethod
def by_email(cls, email):
return Invitation.objects.filter(email=email, is_accepted=False)
@property
def is_expired(self):
return self.invited_on.date() + relativedelta(months=1) < datetime.utcnow().date()
@property
def email_marked_as_bounced(self):
return BouncedEmail.get_hard_bounced_emails([self.email])
def send_activation_email(self, remaining_days=30):
inviter = CouchUser.get_by_user_id(self.invited_by)
url = absolute_reverse("domain_accept_invitation", args=[self.domain, self.uuid])
params = {
"domain": self.domain,
"url": url,
"days": remaining_days,
"inviter": inviter.formatted_name,
"url_prefix": get_static_url_prefix(),
}
domain_request = DomainRequest.by_email(self.domain, self.email, is_approved=True)
lang = guess_domain_language(self.domain)
with override_language(lang):
if domain_request is None:
text_content = render_to_string("domain/email/domain_invite.txt", params)
html_content = render_to_string("domain/email/domain_invite.html", params)
subject = _('Invitation from %s to join CommCareHQ') % inviter.formatted_name
else:
text_content = render_to_string("domain/email/domain_request_approval.txt", params)
html_content = render_to_string("domain/email/domain_request_approval.html", params)
subject = _('Request to join CommCareHQ approved')
send_html_email_async.delay(subject, self.email, html_content,
text_content=text_content,
cc=[inviter.get_email()],
email_from=settings.DEFAULT_FROM_EMAIL,
messaging_event_id=f"{self.EMAIL_ID_PREFIX}{self.uuid}")
def get_role_name(self):
if self.role:
if self.role == 'admin':
return _('Admin')
else:
role_id = self.role[len('user-role:'):]
try:
return UserRole.objects.by_couch_id(role_id).name
except UserRole.DoesNotExist:
return _('Unknown Role')
else:
return None
def _send_confirmation_email(self):
"""
This sends the confirmation email to the invited_by user that their
invitation was accepted.
:return:
"""
invited_user = self.email
subject = _('{} accepted your invitation to CommCare HQ').format(invited_user)
recipient = WebUser.get_by_user_id(self.invited_by).get_email()
context = {
'invited_user': invited_user,
}
html_content = render_to_string('domain/email/invite_confirmation.html',
context)
text_content = render_to_string('domain/email/invite_confirmation.txt',
context)
send_html_email_async.delay(
subject,
recipient,
html_content,
text_content=text_content
)
def accept_invitation_and_join_domain(self, web_user):
"""
Call this method to confirm that a user has accepted the invite to
a domain and add them as a member to the domain in the invitation.
:param web_user: WebUser
"""
web_user.add_as_web_user(
self.domain,
role=self.role,
location_id=self.supply_point,
program_id=self.program,
)
self.is_accepted = True
self.save()
self._send_confirmation_email()
class DomainRemovalRecord(DeleteRecord):
user_id = StringProperty()
domain_membership = SchemaProperty(DomainMembership)
def undo(self):
user = WebUser.get_by_user_id(self.user_id)
user.domain_memberships.append(self.domain_membership)
user.domains.append(self.domain)
user.save()
class UserReportingMetadataStaging(models.Model):
id = models.BigAutoField(primary_key=True)
domain = models.TextField()
user_id = models.TextField()
app_id = models.TextField(null=True) # not all form submissions include an app_id
modified_on = models.DateTimeField(auto_now=True)
created_on = models.DateTimeField(auto_now=True)
# should build_id actually be nullable?
build_id = models.TextField(null=True)
# The following properties are null if a user has not submitted a form since their last sync
xform_version = models.IntegerField(null=True)
form_meta = models.JSONField(null=True) # This could be filtered to only the parts we need
received_on = models.DateTimeField(null=True)
# The following properties are null if a user has not synced since their last form submission
device_id = models.TextField(null=True)
sync_date = models.DateTimeField(null=True)
# The following properties are set when a mobile heartbeat occurs
app_version = models.IntegerField(null=True)
num_unsent_forms = models.IntegerField(null=True)
num_quarantined_forms = models.IntegerField(null=True)
commcare_version = models.TextField(null=True)
build_profile_id = models.TextField(null=True)
last_heartbeat = models.DateTimeField(null=True)
@classmethod
def add_submission(cls, domain, user_id, app_id, build_id, version, metadata, received_on):
params = {
'domain': domain,
'user_id': user_id,
'app_id': app_id,
'build_id': build_id,
'xform_version': version,
'form_meta': json.dumps(metadata),
'received_on': received_on,
}
with connection.cursor() as cursor:
cursor.execute(f"""
INSERT INTO {cls._meta.db_table} AS staging (
domain, user_id, app_id, modified_on, created_on,
build_id,
xform_version, form_meta, received_on
) VALUES (
%(domain)s, %(user_id)s, %(app_id)s, CLOCK_TIMESTAMP(), CLOCK_TIMESTAMP(),
%(build_id)s,
%(xform_version)s, %(form_meta)s, %(received_on)s
)
ON CONFLICT (domain, user_id, app_id)
DO UPDATE SET
modified_on = CLOCK_TIMESTAMP(),
build_id = EXCLUDED.build_id,
xform_version = EXCLUDED.xform_version,
form_meta = EXCLUDED.form_meta,
received_on = EXCLUDED.received_on
WHERE staging.received_on IS NULL OR EXCLUDED.received_on > staging.received_on
""", params)
@classmethod
def add_sync(cls, domain, user_id, app_id, build_id, sync_date, device_id):
params = {
'domain': domain,
'user_id': user_id,
'app_id': app_id,
'build_id': build_id,
'sync_date': sync_date,
'device_id': device_id,
}
with connection.cursor() as cursor:
cursor.execute(f"""
INSERT INTO {cls._meta.db_table} AS staging (
domain, user_id, app_id, modified_on, created_on,
build_id,
sync_date, device_id
) VALUES (
%(domain)s, %(user_id)s, %(app_id)s, CLOCK_TIMESTAMP(), CLOCK_TIMESTAMP(),
%(build_id)s,
%(sync_date)s, %(device_id)s
)
ON CONFLICT (domain, user_id, app_id)
DO UPDATE SET
modified_on = CLOCK_TIMESTAMP(),
build_id = EXCLUDED.build_id,
sync_date = EXCLUDED.sync_date,
device_id = EXCLUDED.device_id
WHERE staging.sync_date IS NULL OR EXCLUDED.sync_date > staging.sync_date
""", params)
@classmethod
def add_heartbeat(cls, domain, user_id, app_id, build_id, sync_date, device_id,
app_version, num_unsent_forms, num_quarantined_forms,
commcare_version, build_profile_id):
params = {
'domain': domain,
'user_id': user_id,
'app_id': app_id,
'build_id': build_id,
'sync_date': sync_date,
'device_id': device_id,
'app_version': app_version,
'num_unsent_forms': num_unsent_forms,
'num_quarantined_forms': num_quarantined_forms,
'commcare_version': commcare_version,
'build_profile_id': build_profile_id,
}
with connection.cursor() as cursor:
cursor.execute(f"""
INSERT INTO {cls._meta.db_table} AS staging (
domain, user_id, app_id, modified_on, created_on,
build_id,
sync_date, device_id,
app_version, num_unsent_forms, num_quarantined_forms,
commcare_version, build_profile_id, last_heartbeat
) VALUES (
%(domain)s, %(user_id)s, %(app_id)s, CLOCK_TIMESTAMP(), CLOCK_TIMESTAMP(),
%(build_id)s,
%(sync_date)s, %(device_id)s,
%(app_version)s, %(num_unsent_forms)s, %(num_quarantined_forms)s,
%(commcare_version)s, %(build_profile_id)s, CLOCK_TIMESTAMP()
)
ON CONFLICT (domain, user_id, app_id)
DO UPDATE SET
modified_on = CLOCK_TIMESTAMP(),
build_id = COALESCE(EXCLUDED.build_id, staging.build_id),
sync_date = COALESCE(EXCLUDED.sync_date, staging.sync_date),
device_id = COALESCE(EXCLUDED.device_id, staging.device_id),
app_version = EXCLUDED.app_version,
num_unsent_forms = EXCLUDED.num_unsent_forms,
num_quarantined_forms = EXCLUDED.num_quarantined_forms,
commcare_version = EXCLUDED.commcare_version,
build_profile_id = EXCLUDED.build_profile_id,
last_heartbeat = CLOCK_TIMESTAMP()
WHERE staging.last_heartbeat is NULL or EXCLUDED.last_heartbeat > staging.last_heartbeat
""", params)
def process_record(self, user):
from corehq.pillows.synclog import mark_last_synclog
from pillowtop.processors.form import mark_latest_submission
save = False
if not user or user.is_deleted():
return
if self.received_on:
save = mark_latest_submission(
self.domain, user, self.app_id, self.build_id,
self.xform_version, self.form_meta, self.received_on, save_user=False
)
if self.device_id or self.sync_date or self.last_heartbeat:
device_app_meta = DeviceAppMeta(
app_id=self.app_id,
build_id=self.build_id,
build_version=self.app_version,
last_heartbeat=self.last_heartbeat,
last_sync=self.sync_date,
num_unsent_forms=self.num_unsent_forms,
num_quarantined_forms=self.num_quarantined_forms
)
if not self.last_heartbeat:
# coming from sync
latest_build_date = self.sync_date
else:
# coming from hearbeat
latest_build_date = self.modified_on
save |= mark_last_synclog(
self.domain, user, self.app_id, self.build_id,
self.sync_date, latest_build_date, self.device_id, device_app_meta,
commcare_version=self.commcare_version, build_profile_id=self.build_profile_id,
save_user=False
)
if save:
user.save(fire_signals=False)
class Meta(object):
unique_together = ('domain', 'user_id', 'app_id')
class ApiKeyManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(is_active=True)
class HQApiKey(models.Model):
user = models.ForeignKey(User, related_name='api_keys', on_delete=models.CASCADE)
key = models.CharField(max_length=128, blank=True, default='', db_index=True)
name = models.CharField(max_length=255, blank=True, default='')
created = models.DateTimeField(default=timezone.now)
ip_allowlist = ArrayField(models.GenericIPAddressField(), default=list)
domain = models.CharField(max_length=255, blank=True, default='')
role_id = models.CharField(max_length=40, blank=True, default='')
is_active = models.BooleanField(default=True)
deactivated_on = models.DateTimeField(blank=True, null=True)
expiration_date = models.DateTimeField(blank=True, null=True) # Not yet used
objects = ApiKeyManager()
all_objects = models.Manager()
class Meta(object):
unique_together = ('user', 'name')
def save(self, *args, **kwargs):
if not self.key:
self.key = self.generate_key()
return super().save(*args, **kwargs)
def generate_key(self):
# From tastypie
new_uuid = uuid4()
return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest()
@property
@memoized
def role(self):
if self.role_id:
try:
return UserRole.objects.by_couch_id(self.role_id)
except UserRole.DoesNotExist:
logging.exception('no role with id %s found in domain %s' % (self.role_id, self.domain))
elif self.domain:
return CouchUser.from_django_user(self.user).get_domain_membership(self.domain).role
return None
class UserHistory(models.Model):
"""
HQ Adaptation of Django's LogEntry model
"""
CREATE = 1
UPDATE = 2
DELETE = 3
ACTION_CHOICES = (
(CREATE, _('Create')),
(UPDATE, _('Update')),
(DELETE, _('Delete')),
)
by_domain = models.CharField(max_length=255, null=True)
for_domain = models.CharField(max_length=255, null=True)
user_type = models.CharField(max_length=255) # CommCareUser / WebUser
user_repr = models.CharField(max_length=255, null=True)
user_id = models.CharField(max_length=128)
changed_by_repr = models.CharField(max_length=255, null=True)
changed_by = models.CharField(max_length=128)
# ToDo: remove post migration/reset of existing records
message = models.TextField(blank=True, null=True)
# JSON structured replacement for the deprecated text message field
change_messages = models.JSONField(default=dict)
changed_at = models.DateTimeField(auto_now_add=True, editable=False)
action = models.PositiveSmallIntegerField(choices=ACTION_CHOICES)
user_upload_record = models.ForeignKey(UserUploadRecord, null=True, on_delete=models.SET_NULL)
# ToDo: remove post migration/reset of existing records
"""
dict with keys:
changed_via: one of the USER_CHANGE_VIA_* constants
changes: a dict of CouchUser attributes that changed and their new values
"""
details = models.JSONField(default=dict)
# ToDo: remove blank=true post migration/reset of existing records since it will always be present
# same as the deprecated details.changed_via
# one of the USER_CHANGE_VIA_* constants
changed_via = models.CharField(max_length=255, blank=True)
# same as the deprecated details.changes
# a dict of CouchUser attributes that changed and their new values
changes = models.JSONField(default=dict, encoder=DjangoJSONEncoder)
class Meta:
indexes = [
models.Index(fields=['by_domain']),
models.Index(fields=['for_domain']),
]
class DeactivateMobileWorkerTriggerUpdateMessage:
UPDATED = 'updated'
CREATED = 'created'
DELETED = 'deleted'
class DeactivateMobileWorkerTrigger(models.Model):
"""
This determines if a Mobile Worker / CommCareUser is to be deactivated
after a certain date.
"""
domain = models.CharField(max_length=255)
user_id = models.CharField(max_length=255)
deactivate_after = models.DateField()
@classmethod
def deactivate_mobile_workers(cls, domain, date_deactivation):
"""
This deactivates all CommCareUsers who have a matching
DeactivateMobileWorkerTrigger with deactivate_after
:param domain: String - domain name
:param date_deactivation: Date
"""
trigger_query = cls.objects.filter(
domain=domain,
deactivate_after__lte=date_deactivation
)
user_ids = trigger_query.values_list('user_id', flat=True)
for chunked_ids in chunked(user_ids, 100):
bulk_auto_deactivate_commcare_users(chunked_ids, domain)
cls.objects.filter(domain=domain, user_id__in=chunked_ids).delete()
@classmethod
def update_trigger(cls, domain, user_id, deactivate_after):
existing_trigger = cls.objects.filter(domain=domain, user_id=user_id)
if not deactivate_after:
if existing_trigger.exists():
existing_trigger.delete()
return DeactivateMobileWorkerTriggerUpdateMessage.DELETED
# noop
return
if isinstance(deactivate_after, str):
try:
deactivate_after = get_date_from_month_and_year_string(deactivate_after)
except ValueError:
raise ValueError("Deactivate After Date is not in MM-YYYY format")
if isinstance(deactivate_after, date):
if existing_trigger.exists():
trigger = existing_trigger.first()
if trigger.deactivate_after == deactivate_after:
# don't update or record a message
return
trigger.deactivate_after = deactivate_after
trigger.save()
return DeactivateMobileWorkerTriggerUpdateMessage.UPDATED
else:
cls.objects.create(
domain=domain,
user_id=user_id,
deactivate_after=deactivate_after,
)
return DeactivateMobileWorkerTriggerUpdateMessage.CREATED
@classmethod
def get_deactivate_after_date(cls, domain, user_id):
existing_trigger = cls.objects.filter(domain=domain, user_id=user_id)
if not existing_trigger.exists():
return None
return existing_trigger.first().deactivate_after
| bsd-3-clause | fa1222eda9f10503a3b0a31d0fa4e408 | 36.969677 | 114 | 0.616618 | 4.027704 | false | false | false | false |
dimagi/commcare-hq | corehq/sql_db/operations.py | 1 | 5753 | import os
import re
from django.conf import settings
from django.db import router
from django.db.migrations import RunPython, RunSQL
from django.template import engines
import attr
from corehq.util.django_migrations import noop_migration
NOOP = object()
class IndexRenameOperationException(Exception):
pass
class RunSqlLazy(RunSQL):
"""
Extension of RunSQL that reads the SQL contents from a file "just in time".
Also supports reading the SQL as a Django template and rendering
it with the provided template context.
"""
def __init__(self, sql_template_path, reverse_sql_template_path, template_context=None):
self.template_context = template_context or {}
self.rendered_forwards = False
self.rendered_backwards = False
super(RunSqlLazy, self).__init__(sql_template_path, reverse_sql_template_path)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints):
if not self.rendered_forwards:
self.sql = self._render_template(self.sql)
self.rendered_forwards = True
super(RunSqlLazy, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints):
if self.reverse_sql:
if not self.rendered_backwards:
self.reverse_sql = self._render_template(self.reverse_sql)
self.rendered_backwards = True
super(RunSqlLazy, self).database_backwards(app_label, schema_editor, from_state, to_state)
def _render_template(self, path):
if path is NOOP:
return "SELECT 1"
if isinstance(path, SQL):
template_string = path.value
else:
with open(path, encoding='utf-8') as f:
template_string = f.read()
template = engines['django'].from_string(template_string)
return template.render(self.template_context)
@attr.s
class SQL(object):
"""Marker class for SQL template strings"""
value = attr.ib()
class RawSQLMigration(object):
"""
Helper class for running raw SQL migrations.
Usage:
migrator = RawSQLMigration(('base', 'path'), {'variable': 'value'})
migrator.get_migration('sql_template.sql')
The reverse migration will be a no-op by default. To make a
non-reversible migration, set the reverse template to None:
migrator.get_migration('sql_template.sql', None) # non-reversible
Raw SQL templates (instead of a template names) can be passed to
`get_migration`:
migrator.get_migration(
'get_something.sql',
SQL("DROP FUNCTION get_something(TEXT);"),
)
"""
def __init__(self, base_path_tuple, template_context=None):
self.template_context = template_context
self.base_path = os.path.join(*base_path_tuple)
def get_migration(self, forward_template, reverse_template=NOOP, testing_only=False):
if testing_only and not settings.UNIT_TESTING:
return noop_migration()
if isinstance(forward_template, SQL):
forward_path = forward_template
else:
forward_path = os.path.join(self.base_path, forward_template)
if reverse_template is NOOP:
reverse_path = NOOP
elif reverse_template is None:
reverse_path = None # make the migration non-reversible
elif isinstance(reverse_template, SQL):
reverse_path = reverse_template
else:
reverse_path = os.path.join(self.base_path, reverse_template)
return RunSqlLazy(
forward_path,
reverse_path,
self.template_context
)
def _validate_old_index_name(index_name, table_name):
if not index_name.startswith(table_name):
raise IndexRenameOperationException(
"Expected all indexes on table %s to start with the table name" % table_name
)
def _validate_identifier(name):
allowed_chars = re.compile(r'^[\w\$]+$')
if not allowed_chars.match(name):
raise IndexRenameOperationException("Invalid identifier given: %s" % name)
def _rename_table_indexes(from_table, to_table):
def fcn(apps, schema_editor):
with schema_editor.connection.cursor() as cursor:
cursor.execute('SELECT indexname FROM pg_indexes WHERE tablename = %s', [from_table])
indexes = [row[0] for row in cursor.fetchall()]
for index_name in indexes:
_validate_old_index_name(index_name, from_table)
new_index_name = index_name.replace(from_table, to_table, 1)
_validate_identifier(index_name)
_validate_identifier(new_index_name)
cursor.execute('ALTER INDEX %s RENAME TO %s' % (index_name, new_index_name))
return fcn
def rename_table_indexes(from_table, to_table):
"""
Returns a migration operation to rename table indexes to prevent index name
collision when renaming models. This should be used in conjunction with a
migrations.RenameModel operation, with this operation being placed right before
it.
NOTE: Django unapplies migration operations in LIFO order. In order to
unapply this rename_table_indexes operation, we would have to unapply the
rename_table_indexes and rename table operation in FIFO order. So for
now, not allowing the reverse.
"""
return RunPython(
_rename_table_indexes(from_table, to_table)
)
| bsd-3-clause | 5bc244fcda6aa4aace552831675d3775 | 34.294479 | 102 | 0.652703 | 4.059986 | false | false | false | false |
dimagi/commcare-hq | corehq/messaging/scheduling/models/content.py | 1 | 18514 | import jsonfield as old_jsonfield
from contextlib import contextmanager
from copy import deepcopy
from datetime import datetime
from corehq import toggles
from corehq.apps.accounting.utils import domain_is_on_trial
from corehq.apps.app_manager.dbaccessors import get_app, get_latest_released_app
from corehq.apps.app_manager.exceptions import FormNotFoundException
from corehq.apps.domain.models import Domain
from corehq.apps.hqwebapp.tasks import send_mail_async
from corehq.apps.smsforms.app import start_session
from corehq.apps.smsforms.tasks import send_first_message
from corehq.apps.smsforms.util import form_requires_input, critical_section_for_smsforms_sessions
from corehq.form_processor.utils import is_commcarecase
from corehq.messaging.scheduling.models.abstract import Content
from corehq.apps.reminders.models import EmailUsage
from corehq.apps.sms.models import MessagingEvent, PhoneNumber, PhoneBlacklist, Email
from corehq.apps.sms.util import touchforms_error_is_config_error, get_formplayer_exception
from corehq.apps.smsforms.models import SQLXFormsSession
from memoized import memoized
from corehq.util.metrics import metrics_counter
from dimagi.utils.modules import to_function
from django.conf import settings
from django.db import models
from django.http import Http404
from corehq.apps.formplayer_api.smsforms.api import TouchformsError
@contextmanager
def no_op_context_manager():
yield
class SMSContent(Content):
message = old_jsonfield.JSONField(default=dict)
def create_copy(self):
"""
See Content.create_copy() for docstring
"""
return SMSContent(
message=deepcopy(self.message),
)
def render_message(self, message, recipient, logged_subevent):
if not message:
logged_subevent.error(MessagingEvent.ERROR_NO_MESSAGE)
return None
renderer = self.get_template_renderer(recipient)
try:
return renderer.render(message)
except:
logged_subevent.error(MessagingEvent.ERROR_CANNOT_RENDER_MESSAGE)
return None
def send(self, recipient, logged_event, phone_entry=None):
logged_subevent = logged_event.create_subevent_from_contact_and_content(
recipient,
self,
case_id=self.case.case_id if self.case else None,
)
phone_entry_or_number = phone_entry or self.get_two_way_entry_or_phone_number(
recipient, domain_for_toggles=logged_event.domain)
if not phone_entry_or_number:
logged_subevent.error(MessagingEvent.ERROR_NO_PHONE_NUMBER)
return
message = self.get_translation_from_message_dict(
Domain.get_by_name(logged_event.domain),
self.message,
recipient.get_language_code()
)
message = self.render_message(message, recipient, logged_subevent)
self.send_sms_message(logged_event.domain, recipient, phone_entry_or_number, message, logged_subevent)
logged_subevent.completed()
class EmailContent(Content):
subject = old_jsonfield.JSONField(default=dict)
message = old_jsonfield.JSONField(default=dict)
TRIAL_MAX_EMAILS = 50
def create_copy(self):
"""
See Content.create_copy() for docstring
"""
return EmailContent(
subject=deepcopy(self.subject),
message=deepcopy(self.message),
)
def render_subject_and_message(self, subject, message, recipient):
renderer = self.get_template_renderer(recipient)
return renderer.render(subject), renderer.render(message)
def send(self, recipient, logged_event, phone_entry=None):
email_usage = EmailUsage.get_or_create_usage_record(logged_event.domain)
is_trial = domain_is_on_trial(logged_event.domain)
domain_obj = Domain.get_by_name(logged_event.domain)
logged_subevent = logged_event.create_subevent_from_contact_and_content(
recipient,
self,
case_id=self.case.case_id if self.case else None,
)
subject = self.get_translation_from_message_dict(
domain_obj,
self.subject,
recipient.get_language_code()
)
message = self.get_translation_from_message_dict(
domain_obj,
self.message,
recipient.get_language_code()
)
try:
subject, message = self.render_subject_and_message(subject, message, recipient)
except:
logged_subevent.error(MessagingEvent.ERROR_CANNOT_RENDER_MESSAGE)
return
subject = subject or '(No Subject)'
if not message:
logged_subevent.error(MessagingEvent.ERROR_NO_MESSAGE)
return
email_address = recipient.get_email()
if not email_address:
logged_subevent.error(MessagingEvent.ERROR_NO_EMAIL_ADDRESS)
return
if is_trial and EmailUsage.get_total_count(logged_event.domain) >= self.TRIAL_MAX_EMAILS:
logged_subevent.error(MessagingEvent.ERROR_TRIAL_EMAIL_LIMIT_REACHED)
return
metrics_counter('commcare.messaging.email.sent', tags={'domain': logged_event.domain})
send_mail_async.delay(subject, message, settings.DEFAULT_FROM_EMAIL,
[email_address], logged_subevent.id,
domain=logged_event.domain)
email = Email(
domain=logged_event.domain,
date=logged_subevent.date_last_activity, # use date from subevent for consistency
couch_recipient_doc_type=logged_subevent.recipient_type,
couch_recipient=logged_subevent.recipient_id,
messaging_subevent_id=logged_subevent.pk,
recipient_address=email_address,
subject=subject,
body=message,
)
email.save()
email_usage.update_count()
class SMSSurveyContent(Content):
app_id = models.CharField(max_length=126, null=True)
form_unique_id = models.CharField(max_length=126)
# See corehq.apps.smsforms.models.SQLXFormsSession for an
# explanation of these properties
expire_after = models.IntegerField()
reminder_intervals = models.JSONField(default=list)
submit_partially_completed_forms = models.BooleanField(default=False)
include_case_updates_in_partial_submissions = models.BooleanField(default=False)
def create_copy(self):
"""
See Content.create_copy() for docstring
"""
return SMSSurveyContent(
app_id=None,
form_unique_id=None,
expire_after=self.expire_after,
reminder_intervals=deepcopy(self.reminder_intervals),
submit_partially_completed_forms=self.submit_partially_completed_forms,
include_case_updates_in_partial_submissions=self.include_case_updates_in_partial_submissions,
)
@memoized
def get_memoized_app_module_form(self, domain):
try:
if toggles.SMS_USE_LATEST_DEV_APP.enabled(domain, toggles.NAMESPACE_DOMAIN):
app = get_app(domain, self.app_id)
else:
app = get_latest_released_app(domain, self.app_id)
form = app.get_form(self.form_unique_id)
module = form.get_module()
except (Http404, FormNotFoundException):
return None, None, None, None
return app, module, form, form_requires_input(form)
def phone_has_opted_out(self, phone_entry_or_number):
if isinstance(phone_entry_or_number, PhoneNumber):
pb = PhoneBlacklist.get_by_phone_number_or_none(phone_entry_or_number.phone_number)
else:
pb = PhoneBlacklist.get_by_phone_number_or_none(phone_entry_or_number)
return pb is not None and not pb.send_sms
def get_critical_section(self, recipient):
if self.critical_section_already_acquired:
return no_op_context_manager()
return critical_section_for_smsforms_sessions(recipient.get_id)
def send(self, recipient, logged_event, phone_entry=None):
app, module, form, requires_input = self.get_memoized_app_module_form(logged_event.domain)
if any([o is None for o in (app, module, form)]):
logged_event.error(MessagingEvent.ERROR_CANNOT_FIND_FORM)
return
logged_subevent = logged_event.create_subevent_from_contact_and_content(
recipient,
self,
case_id=self.case.case_id if self.case else None,
)
# We don't try to look up the phone number from the user case in this scenario
# because this use case involves starting a survey session, which can be
# very different if the contact is a user or is a case. So here if recipient
# is a user we only allow them to fill out the survey as the user contact, and
# not the user case contact.
phone_entry_or_number = (
phone_entry or
self.get_two_way_entry_or_phone_number(
recipient, try_usercase=False, domain_for_toggles=logged_event.domain)
)
if phone_entry_or_number is None:
logged_subevent.error(MessagingEvent.ERROR_NO_PHONE_NUMBER)
return
if requires_input and not isinstance(phone_entry_or_number, PhoneNumber):
logged_subevent.error(MessagingEvent.ERROR_NO_TWO_WAY_PHONE_NUMBER)
return
with self.get_critical_section(recipient):
# Get the case to submit the form against, if any
case_id = None
if is_commcarecase(recipient):
case_id = recipient.case_id
elif self.case:
case_id = self.case.case_id
if form.requires_case() and not case_id:
logged_subevent.error(MessagingEvent.ERROR_NO_CASE_GIVEN)
return
session, responses = self.start_smsforms_session(
logged_event.domain,
recipient,
case_id,
phone_entry_or_number,
logged_subevent,
self.get_workflow(logged_event),
app,
form
)
if session:
logged_subevent.xforms_session = session
logged_subevent.save()
# send_first_message is a celery task
# but we first call it synchronously to save resources in the 99% case
# send_first_message will retry itself as a delayed celery task
# if there are conflicting sessions preventing it from sending immediately
send_first_message(
logged_event.domain,
recipient,
phone_entry_or_number,
session,
responses,
logged_subevent,
self.get_workflow(logged_event)
)
def start_smsforms_session(self, domain, recipient, case_id, phone_entry_or_number, logged_subevent, workflow,
app, form):
# Close all currently open sessions
SQLXFormsSession.close_all_open_sms_sessions(domain, recipient.get_id)
# Start the new session
try:
session, responses = start_session(
SQLXFormsSession.create_session_object(
domain,
recipient,
(phone_entry_or_number.phone_number
if isinstance(phone_entry_or_number, PhoneNumber)
else phone_entry_or_number),
app,
form,
expire_after=self.expire_after,
reminder_intervals=self.reminder_intervals,
submit_partially_completed_forms=self.submit_partially_completed_forms,
include_case_updates_in_partial_submissions=self.include_case_updates_in_partial_submissions
),
domain,
recipient,
app,
form,
case_id,
yield_responses=True
)
except TouchformsError as e:
logged_subevent.error(
MessagingEvent.ERROR_TOUCHFORMS_ERROR,
additional_error_text=get_formplayer_exception(domain, e)
)
if touchforms_error_is_config_error(domain, e):
# Don't reraise the exception because this means there are configuration
# issues with the form that need to be fixed. The error is logged in the
# above lines.
return None, None
# Reraise the exception so that the framework retries it again later
raise
except:
logged_subevent.error(MessagingEvent.ERROR_TOUCHFORMS_ERROR)
# Reraise the exception so that the framework retries it again later
raise
session.workflow = workflow
session.save()
return session, responses
class IVRSurveyContent(Content):
"""
IVR is no longer supported, but in order to display old configurations we
need to keep this model around.
"""
# The unique id of the form that will be used as the IVR Survey
app_id = models.CharField(max_length=126, null=True)
form_unique_id = models.CharField(max_length=126)
# If empty list, this is ignored. Otherwise, this is a list of intervals representing
# minutes to wait.
# After waiting the amount of minutes specified by each interval, the framework will
# check if an outbound IVR call was answered for this event. If not, it will retry
# the outbound call again.
reminder_intervals = models.JSONField(default=list)
# At the end of the IVR call, if this is True, the form will be submitted in its current
# state regardless if it was completed or not.
submit_partially_completed_forms = models.BooleanField(default=False)
# Only matters when submit_partially_completed_forms is True.
# If True, then case updates will be included in partial form submissions, otherwise
# they will be excluded.
include_case_updates_in_partial_submissions = models.BooleanField(default=False)
# The maximum number of times to attempt asking a question on a phone call
# before giving up and hanging up. This is meant to prevent long running calls
# where the user is giving invalid answers or not answering at all.
max_question_attempts = models.IntegerField(default=5)
def send(self, recipient, logged_event, phone_entry=None):
pass
class SMSCallbackContent(Content):
"""
This use case is no longer supported, but in order to display old configurations we
need to keep this model around.
The way that this use case worked was as follows. When the event fires for the
first time, the SMS message is sent as it is for SMSContent. The recipient is then
expected to perform a "call back" or "flash back" to the system, where they call
a phone number, let it ring, and hang up. CommCareHQ records the inbound call when
this happens.
Then, for every interval specified by reminder_intervals, the system will wait
that number of minutes and then check for the expected inbound call from the
recipient. If the inbound call was received, then no further action is needed.
If not, the SMS message is sent again. On the last interval, the SMS is not
sent again and the expected callback event is just closed out.
The results of the expected call back are stored in an entry in
corehq.apps.sms.models.ExpectedCallback.
"""
message = models.JSONField(default=dict)
# This is a list of intervals representing minutes to wait. It should never be empty.
# See the explanation above to understand how this is used.
reminder_intervals = models.JSONField(default=list)
def send(self, recipient, logged_event, phone_entry=None):
pass
class CustomContent(Content):
# Should be a key in settings.AVAILABLE_CUSTOM_SCHEDULING_CONTENT
# which points to a function to call at runtime to get a list of
# messsages to send to the recipient.
custom_content_id = models.CharField(max_length=126)
def create_copy(self):
"""
See Content.create_copy() for docstring
"""
return CustomContent(
custom_content_id=self.custom_content_id,
)
def get_list_of_messages(self, recipient):
if not self.schedule_instance:
raise ValueError(
"Expected CustomContent to be invoked in the context of a "
"ScheduleInstance. Please pass ScheduleInstance to .set_context()"
)
if self.custom_content_id not in settings.AVAILABLE_CUSTOM_SCHEDULING_CONTENT:
raise ValueError("Encountered unexpected custom content id %s" % self.custom_content_id)
custom_function = to_function(
settings.AVAILABLE_CUSTOM_SCHEDULING_CONTENT[self.custom_content_id][0]
)
messages = custom_function(recipient, self.schedule_instance)
if not isinstance(messages, list):
raise TypeError("Expected content to be a list of messages")
return messages
def send(self, recipient, logged_event, phone_entry=None):
logged_subevent = logged_event.create_subevent_from_contact_and_content(
recipient,
self,
case_id=self.case.case_id if self.case else None,
)
phone_entry_or_number = self.get_two_way_entry_or_phone_number(
recipient, domain_for_toggles=logged_event.domain)
if not phone_entry_or_number:
logged_subevent.error(MessagingEvent.ERROR_NO_PHONE_NUMBER)
return
# An empty list of messages returned from a custom content handler means
# we shouldn't send anything, so we don't log an error for that.
try:
for message in self.get_list_of_messages(recipient):
self.send_sms_message(logged_event.domain, recipient, phone_entry_or_number, message,
logged_subevent)
except Exception as error:
logged_subevent.error(MessagingEvent.ERROR_CANNOT_RENDER_MESSAGE, additional_error_text=str(error))
raise
logged_subevent.completed()
| bsd-3-clause | 394d9d757637633f203c87ca4c6d6712 | 38.987041 | 114 | 0.643999 | 4.178289 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/userreports/reports/factory.py | 1 | 9217 | import json
from django.conf import settings
from django.utils.translation import gettext as _
from jsonobject.exceptions import BadValueError
from corehq.apps.userreports.const import (
AGGGREGATION_TYPE_ARRAY_AGG_LAST_VALUE,
)
from corehq.apps.userreports.exceptions import BadSpecError
from corehq.apps.userreports.reports.specs import (
AggregateDateColumn,
ArrayAggLastValueReportColumn,
ExpandedColumn,
ExpressionColumn,
FieldColumn,
IntegerBucketsColumn,
LocationColumn,
MultibarAggregateChartSpec,
MultibarChartSpec,
OrderBySpec,
PercentageColumn,
PieChartSpec,
SumWhenColumn,
SumWhenTemplateColumn,
)
from corehq.apps.userreports.reports.sum_when_templates import (
AdultFemaleMigrantDeathSpec,
AdultFemaleResidentDeathSpec,
AgeAtDeathRangeMigrantSpec,
AgeAtDeathRangeResidentSpec,
CCSPhaseNullTemplateSpec,
CCSPhaseTemplateSpec,
ClosedOnNullTemplateSpec,
ComplementaryFeedingTemplateSpec,
FemaleAgeAtDeathSpec,
FemaleDeathTypeMigrantSpec,
FemaleDeathTypeResidentSpec,
OpenDisabilityTypeSpec,
OpenFemaleDisabledSpec,
OpenFemaleHHCasteNotSpec,
OpenFemaleHHCasteSpec,
OpenFemaleHHMinoritySpec,
OpenFemaleMigrantDistinctFromSpec,
OpenFemaleMigrantSpec,
OpenFemaleResidentSpec,
OpenFemaleSpec,
OpenMaleDisabledSpec,
OpenMaleHHCasteNotSpec,
OpenMaleHHCasteSpec,
OpenMaleHHMinoritySpec,
OpenMaleMigrantDistinctFromSpec,
OpenMaleMigrantSpec,
OpenMaleResidentSpec,
OpenPregnantMigrantSpec,
OpenPregnantResidentSpec,
ReachedReferralHealthProblem2ProblemsSpec,
ReachedReferralHealthProblem3ProblemsSpec,
ReachedReferralHealthProblem5ProblemsSpec,
ReachedReferralHealthProblemSpec,
ReferralHealthProblem2ProblemsSpec,
ReferralHealthProblem3ProblemsSpec,
ReferralHealthProblem5ProblemsSpec,
ReferralHealthProblemSpec,
UnderXMonthsTemplateSpec,
YearRangeTemplateSpec,
)
from custom.nutrition_project.ucr.sum_when_templates import (
BreastfeedingSpec,
ChildDeathSpec,
ChildDeliverySpec,
ChildLowBirthWeightSpec,
ChildWeighedSpec,
ComplementaryFeedingStartedCheckSpec,
GenderAndResidentTypeSpec,
ImmediateBreastfeedingInitiatedSpec,
LatestBMICategorySpec,
NutritionCenterOpenTodaySpec,
OnTimeVisitCheckSpec,
WomanDeathSpec,
WomanDeathTypeSpec,
)
class ReportColumnFactory(object):
class_map = {
'aggregate_date': AggregateDateColumn,
'expanded': ExpandedColumn,
'expression': ExpressionColumn,
'field': FieldColumn,
'integer_buckets': IntegerBucketsColumn,
'location': LocationColumn,
'percent': PercentageColumn,
'sum_when': SumWhenColumn,
'sum_when_template': SumWhenTemplateColumn,
AGGGREGATION_TYPE_ARRAY_AGG_LAST_VALUE: ArrayAggLastValueReportColumn,
}
@classmethod
def from_spec(cls, spec, is_static, domain=None):
column_type = spec.get('type') or 'field'
if column_type not in cls.class_map:
raise BadSpecError(
'Unknown or missing column type: {} must be in [{}]'.format(
column_type,
', '.join(cls.class_map)
)
)
column_class = cls.class_map[column_type]
if column_class.restricted_to_static(domain) and not (is_static or settings.UNIT_TESTING):
raise BadSpecError("{} columns are only available to static report configs"
.format(column_type))
try:
return column_class.wrap(spec)
except BadValueError as e:
raise BadSpecError(_(
'Problem creating column from spec: {}, message is: {}'
).format(
json.dumps(spec, indent=2),
str(e),
))
class ChartFactory(object):
spec_map = {
'pie': PieChartSpec,
'multibar': MultibarChartSpec,
'multibar-aggregate': MultibarAggregateChartSpec,
}
@classmethod
def from_spec(cls, spec):
if spec.get('type') not in cls.spec_map:
raise BadSpecError(_('Illegal chart type: {0}, must be one of the following choices: ({1})').format(
spec.get('type', _('(missing from spec)')),
', '.join(cls.spec_map)
))
try:
return cls.spec_map[spec['type']].wrap(spec)
except BadValueError as e:
raise BadSpecError(_('Problem creating chart from spec: {}, message is: {}').format(
json.dumps(spec, indent=2),
str(e),
))
class ReportOrderByFactory(object):
@classmethod
def from_spec(cls, spec):
return OrderBySpec.wrap(spec)
class SumWhenTemplateFactory(object):
spec_map = {
'adult_female_migrant_death': AdultFemaleMigrantDeathSpec,
'adult_female_resident_death': AdultFemaleResidentDeathSpec,
'age_at_death_range_migrant': AgeAtDeathRangeMigrantSpec,
'age_at_death_range_resident': AgeAtDeathRangeResidentSpec,
'ccs_phase': CCSPhaseTemplateSpec,
'ccs_phase_null': CCSPhaseNullTemplateSpec,
'complementary_feeding': ComplementaryFeedingTemplateSpec,
'closed_on_null': ClosedOnNullTemplateSpec,
'female_age_at_death': FemaleAgeAtDeathSpec,
'female_death_type_migrant': FemaleDeathTypeMigrantSpec,
'female_death_type_resident': FemaleDeathTypeResidentSpec,
'open_disability_type': OpenDisabilityTypeSpec,
'open_female': OpenFemaleSpec,
'open_female_disabled': OpenFemaleDisabledSpec,
'open_female_hh_caste': OpenFemaleHHCasteSpec,
'open_female_hh_caste_not': OpenFemaleHHCasteNotSpec,
'open_female_hh_minority': OpenFemaleHHMinoritySpec,
'open_female_migrant': OpenFemaleMigrantSpec,
'open_female_migrant_distinct_from': OpenFemaleMigrantDistinctFromSpec,
'open_female_resident': OpenFemaleResidentSpec,
'open_male_disabled': OpenMaleDisabledSpec,
'open_male_hh_caste': OpenMaleHHCasteSpec,
'open_male_hh_caste_not': OpenMaleHHCasteNotSpec,
'open_male_hh_minority': OpenMaleHHMinoritySpec,
'open_male_migrant': OpenMaleMigrantSpec,
'open_male_migrant_distinct_from': OpenMaleMigrantDistinctFromSpec,
'open_male_resident': OpenMaleResidentSpec,
'open_pregnant_migrant': OpenPregnantMigrantSpec,
'open_pregnant_resident': OpenPregnantResidentSpec,
'reached_referral_health_problem': ReachedReferralHealthProblemSpec,
'reached_referral_health_problem_2_problems': ReachedReferralHealthProblem2ProblemsSpec,
'reached_referral_health_problem_3_problems': ReachedReferralHealthProblem3ProblemsSpec,
'reached_referral_health_problem_5_problems': ReachedReferralHealthProblem5ProblemsSpec,
'referral_health_problem': ReferralHealthProblemSpec,
'referral_health_problem_2_problems': ReferralHealthProblem2ProblemsSpec,
'referral_health_problem_3_problems': ReferralHealthProblem3ProblemsSpec,
'referral_health_problem_5_problems': ReferralHealthProblem5ProblemsSpec,
'under_x_months': UnderXMonthsTemplateSpec,
'year_range': YearRangeTemplateSpec,
# India Nutrition Project templates
ChildDeliverySpec.type.choices[0]: ChildDeliverySpec,
ChildWeighedSpec.type.choices[0]: ChildWeighedSpec,
ChildLowBirthWeightSpec.type.choices[0]: ChildLowBirthWeightSpec,
ChildDeathSpec.type.choices[0]: ChildDeathSpec,
WomanDeathSpec.type.choices[0]: WomanDeathSpec,
WomanDeathTypeSpec.type.choices[0]: WomanDeathTypeSpec,
GenderAndResidentTypeSpec.type.choices[0]: GenderAndResidentTypeSpec,
NutritionCenterOpenTodaySpec.type.choices[0]: NutritionCenterOpenTodaySpec,
OnTimeVisitCheckSpec.type.choices[0]: OnTimeVisitCheckSpec,
BreastfeedingSpec.type.choices[0]: BreastfeedingSpec,
ImmediateBreastfeedingInitiatedSpec.type.choices[0]: ImmediateBreastfeedingInitiatedSpec,
ComplementaryFeedingStartedCheckSpec.type.choices[0]: ComplementaryFeedingStartedCheckSpec,
LatestBMICategorySpec.type.choices[0]: LatestBMICategorySpec,
}
@classmethod
def make_template(cls, spec):
if spec.get('type') not in cls.spec_map:
raise BadSpecError(_('Illegal sum_when_template type: "{0}", must be in: ({1})').format(
spec.get('type'),
', '.join(cls.spec_map)
))
try:
template = cls.spec_map[spec['type']].wrap(spec)
except BadValueError as e:
raise BadSpecError(_('Problem creating template: {}, message is: {}').format(
json.dumps(spec, indent=2),
str(e),
))
expected = template.bind_count()
actual = len(template.binds)
if expected != actual:
raise BadSpecError(_('Expected {} binds in sum_when_template {}, found {}').format(
expected,
spec['type'],
actual
))
return template
| bsd-3-clause | af520d09eea3d587f83d23fd76f7a7c4 | 37.890295 | 112 | 0.685364 | 3.65754 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/linked_domain/view_helpers.py | 1 | 14273 | from django.utils.translation import gettext as _
from corehq.apps.app_manager.dbaccessors import get_brief_apps_in_domain
from corehq.apps.app_manager.util import is_linked_app
from corehq.apps.fixtures.models import LookupTable
from corehq.apps.linked_domain.const import (
DOMAIN_LEVEL_DATA_MODELS,
FEATURE_FLAG_DATA_MODEL_TOGGLES,
FEATURE_FLAG_DATA_MODELS,
LINKED_MODELS_MAP,
MODEL_APP,
MODEL_FIXTURE,
MODEL_KEYWORD,
MODEL_REPORT,
MODEL_UCR_EXPRESSION,
SUPERUSER_DATA_MODELS,
)
from corehq.apps.linked_domain.dbaccessors import (
get_actions_in_domain_link_history,
)
from corehq.apps.linked_domain.models import (
AppLinkDetail,
FixtureLinkDetail,
KeywordLinkDetail,
ReportLinkDetail,
UCRExpressionLinkDetail,
)
from corehq.apps.linked_domain.util import server_to_user_time, is_keyword_linkable
from corehq.apps.sms.models import Keyword
from corehq.apps.userreports.models import ReportConfiguration, UCRExpression
from corehq.apps.userreports.util import get_existing_reports
def build_domain_link_view_model(link, timezone):
return {
'downstream_domain': link.linked_domain,
'upstream_domain': link.master_domain,
'upstream_url': link.upstream_url,
'downstream_url': link.downstream_url,
'is_remote': link.is_remote,
'last_update': server_to_user_time(link.last_pull, timezone) if link.last_pull else _('Never'),
'has_full_access': link.has_full_access(),
}
def get_upstream_and_downstream_apps(domain):
"""
Return 2 lists of app_briefs
The upstream_list contains apps that originated in the specified domain
The downstream_list contains apps that have been pulled from a domain upstream of the specified domain
"""
upstream_list = {}
downstream_list = {}
briefs = get_brief_apps_in_domain(domain, include_remote=False)
for brief in briefs:
if is_linked_app(brief):
downstream_list[brief._id] = brief
else:
upstream_list[brief._id] = brief
return upstream_list, downstream_list
def get_upstream_and_downstream_fixtures(domain, upstream_link):
"""
Return 2 lists of fixtures
The upstream_list contains fixtures that originated in the specified domain
The downstream_list contains fixtures that have been pulled from a domain upstream of the specified domain
"""
upstream_list = get_fixtures_for_domain(domain)
downstream_list = get_fixtures_for_domain(upstream_link.master_domain) if upstream_link else {}
return upstream_list, downstream_list
def get_fixtures_for_domain(domain):
fixtures = LookupTable.objects.filter(domain=domain, is_global=True)
return {f.tag: f for f in fixtures}
def get_upstream_and_downstream_reports(domain):
"""
Return 2 lists of reports
The upstream_list contains reports that originated in the specified domain
The downstream_list contains reports that have been pulled from a domain upstream of the specified domain
"""
upstream_list = {}
downstream_list = {}
reports = get_existing_reports(domain)
for report in reports:
if report.report_meta.master_id:
downstream_list[report.get_id] = report
else:
upstream_list[report.get_id] = report
return upstream_list, downstream_list
def get_upstream_and_downstream_keywords(domain):
"""
Return 2 lists of keywords
The upstream_list contains keywords that originated in the specified domain
The downstream_list contains keywords that have been pulled from a domain upstream of the specified domain
"""
upstream_list = {}
downstream_list = {}
keywords = Keyword.objects.filter(domain=domain)
for keyword in keywords:
if keyword.upstream_id:
downstream_list[str(keyword.id)] = keyword
else:
upstream_list[str(keyword.id)] = keyword
return upstream_list, downstream_list
def get_upstream_and_downstream_ucr_expressions(domain):
"""
Return 2 lists of ucr expressions
The upstream_list contains ucr expressions that originated in the specified domain
The downstream_list contains ucr expressions that have been pulled from a domain
upstream of the specified domain
"""
upstream_list = {}
downstream_list = {}
ucr_expressions = UCRExpression.objects.filter(domain=domain)
for ucr_expression in ucr_expressions:
if ucr_expression.upstream_id:
downstream_list[str(ucr_expression.id)] = ucr_expression
else:
upstream_list[str(ucr_expression.id)] = ucr_expression
return upstream_list, downstream_list
def build_app_view_model(app, last_update=None):
if not app:
return None
return build_linked_data_view_model(
model_type=MODEL_APP,
name=f"{LINKED_MODELS_MAP[MODEL_APP]} ({app.name})",
detail=AppLinkDetail(app_id=app._id).to_json(),
last_update=last_update,
)
def build_fixture_view_model(fixture, last_update=None):
if not fixture:
return None
return build_linked_data_view_model(
model_type=MODEL_FIXTURE,
name=f"{LINKED_MODELS_MAP[MODEL_FIXTURE]} ({fixture.tag})",
detail=FixtureLinkDetail(tag=fixture.tag).to_json(),
last_update=last_update,
can_update=fixture.is_global,
)
def build_report_view_model(report, last_update=None):
if not report:
return None
return build_linked_data_view_model(
model_type=MODEL_REPORT,
name=f"{LINKED_MODELS_MAP[MODEL_REPORT]} ({report.title})",
detail=ReportLinkDetail(report_id=report.get_id).to_json(),
last_update=last_update,
)
def build_keyword_view_model(keyword, last_update=None):
if not keyword:
return None
return build_linked_data_view_model(
model_type=MODEL_KEYWORD,
name=f"{LINKED_MODELS_MAP[MODEL_KEYWORD]} ({keyword.keyword})",
detail=KeywordLinkDetail(keyword_id=str(keyword.id)).to_json(),
last_update=last_update,
is_linkable=is_keyword_linkable(keyword),
)
def build_ucr_expression_view_model(ucr_expression, last_update=None):
if not ucr_expression:
return None
return build_linked_data_view_model(
model_type=MODEL_UCR_EXPRESSION,
name=f"{LINKED_MODELS_MAP[MODEL_UCR_EXPRESSION]} ({ucr_expression.name})",
detail=UCRExpressionLinkDetail(ucr_expression_id=str(ucr_expression.id)).to_json(),
last_update=last_update,
)
def build_feature_flag_view_models(domain, ignore_models=None):
ignore_models = ignore_models or []
view_models = []
for model, name in FEATURE_FLAG_DATA_MODELS:
if model not in ignore_models and FEATURE_FLAG_DATA_MODEL_TOGGLES[model].enabled(domain):
view_models.append(
build_linked_data_view_model(
model_type=model,
name=name,
detail=None,
last_update=_('Never')
)
)
return view_models
def build_domain_level_view_models(ignore_models=None):
ignore_models = ignore_models or []
view_models = []
for model, name in DOMAIN_LEVEL_DATA_MODELS:
if model not in ignore_models:
view_models.append(
build_linked_data_view_model(
model_type=model,
name=name,
detail=None,
last_update=_('Never')
)
)
return view_models
def build_superuser_view_models(ignore_models=None):
ignore_models = ignore_models or []
view_models = []
for model, name in SUPERUSER_DATA_MODELS:
if model not in ignore_models:
view_models.append(
build_linked_data_view_model(
model_type=model,
name=name,
detail=None,
last_update=_('Never')
)
)
return view_models
def build_linked_data_view_model(model_type, name, detail, last_update=None, can_update=True, is_linkable=True):
return {
'type': model_type,
'name': name,
'detail': detail,
'last_update': last_update,
'can_update': can_update,
'is_linkable': is_linkable,
}
def build_view_models_from_data_models(
domain, apps, fixtures, reports, keywords, ucr_expressions, ignore_models=None, is_superuser=False
):
"""
Based on the provided data models, convert to view models, ignoring any models specified in ignore_models
:return: list of view models (dicts) used to render elements on the release content page
"""
view_models = []
if is_superuser:
superuser_view_models = build_superuser_view_models(ignore_models=ignore_models)
view_models.extend(superuser_view_models)
domain_level_view_models = build_domain_level_view_models(ignore_models=ignore_models)
view_models.extend(domain_level_view_models)
feature_flag_view_models = build_feature_flag_view_models(domain, ignore_models=ignore_models)
view_models.extend(feature_flag_view_models)
for app in apps.values():
app_view_model = build_app_view_model(app)
if app_view_model:
view_models.append(app_view_model)
for fixture in fixtures.values():
fixture_view_model = build_fixture_view_model(fixture)
if fixture_view_model:
view_models.append(fixture_view_model)
for report in reports.values():
report_view_model = build_report_view_model(report)
if report_view_model:
view_models.append(report_view_model)
for keyword in keywords.values():
keyword_view_model = build_keyword_view_model(keyword)
if keyword_view_model:
view_models.append(keyword_view_model)
for ucr_expression in ucr_expressions.values():
ucr_expression_view_model = build_ucr_expression_view_model(ucr_expression)
if ucr_expression_view_model:
view_models.append(ucr_expression_view_model)
return view_models
def pop_app_for_action(action, apps):
app = None
if action.model_detail:
app_id = action.wrapped_detail.app_id
app = apps.pop(app_id, None)
return app
def pop_fixture_for_action(action, fixtures, domain):
fixture = None
if action.model_detail:
tag = action.wrapped_detail.tag
fixture = fixtures.pop(tag, None)
if not fixture:
fixture = LookupTable.objects.by_domain_tag(domain, tag)
return fixture
def pop_report(report_id, reports):
report = reports.pop(report_id, None)
if report is None:
report = ReportConfiguration.get(report_id)
if report.doc_type == "ReportConfiguration-Deleted":
return None
return report
def pop_keyword(keyword_id, keywords):
keyword = keywords.pop(keyword_id, None)
if keyword is None:
try:
keyword = Keyword.objects.get(id=keyword_id)
except Keyword.DoesNotExist:
keyword = None
return keyword
def pop_ucr_expression(ucr_expression_id, ucr_expressions):
ucr_expression = ucr_expressions.pop(ucr_expression_id, None)
if ucr_expression is None:
try:
ucr_expression = UCRExpression.objects.get(id=ucr_expression_id)
except UCRExpression.DoesNotExist:
ucr_expression = None
return ucr_expression
def build_pullable_view_models_from_data_models(
domain, upstream_link, apps, fixtures, reports, keywords, ucr_expressions, timezone, is_superuser=False
):
"""
Data models that originated in this domain's upstream domain that are available to pull
:return: list of view models (dicts) used to render linked data models that can be pulled
"""
linked_data_view_models = []
if not upstream_link:
return linked_data_view_models
models_seen = set()
history = get_actions_in_domain_link_history(upstream_link)
for action in history:
if action.row_number != 1:
# first row is the most recent
continue
models_seen.add(action.model)
last_update = server_to_user_time(action.date, timezone)
if action.model == MODEL_APP:
app = pop_app_for_action(action, apps)
view_model = build_app_view_model(app, last_update=last_update)
elif action.model == MODEL_FIXTURE:
fixture = pop_fixture_for_action(action, fixtures, domain)
view_model = build_fixture_view_model(fixture, last_update=last_update)
elif action.model == MODEL_REPORT:
report = pop_report(action.wrapped_detail.report_id, reports)
view_model = build_report_view_model(report, last_update=last_update)
elif action.model == MODEL_KEYWORD:
keyword = pop_keyword(action.wrapped_detail.keyword_id, keywords)
view_model = build_keyword_view_model(keyword, last_update=last_update)
elif action.model == MODEL_UCR_EXPRESSION:
ucr_expression = pop_ucr_expression(action.wrapped_detail.ucr_expression_id, ucr_expressions)
view_model = build_ucr_expression_view_model(ucr_expression, last_update=last_update)
else:
view_model = build_linked_data_view_model(
model_type=action.model,
name=LINKED_MODELS_MAP[action.model],
detail=action.model_detail,
last_update=last_update,
)
if view_model:
if view_model['type'] not in dict(SUPERUSER_DATA_MODELS).keys() or is_superuser:
linked_data_view_models.append(view_model)
# Add data models that have never been pulled into the downstream domain before
# ignoring any models we have already added via domain history
linked_data_view_models.extend(
build_view_models_from_data_models(
domain,
apps,
fixtures,
reports,
keywords,
ucr_expressions,
ignore_models=models_seen,
is_superuser=is_superuser
)
)
return linked_data_view_models
| bsd-3-clause | 2d0d1c34c5929f0f7b3b7a569d8bf933 | 33.227818 | 112 | 0.654032 | 3.762984 | false | false | false | false |
dimagi/commcare-hq | corehq/apps/registry/signals.py | 1 | 1763 | from django.dispatch import Signal, receiver
from corehq.apps.registry.notifications import (
send_invitation_email,
send_invitation_response_email,
send_grant_email,
)
data_registry_activated = Signal() # providing args: registry
data_registry_deactivated = Signal() # providing args: registry
data_registry_schema_changed = Signal() # providing args: registry, new_schema, old_schema
data_registry_invitation_created = Signal() # providing args: registry, invitation
data_registry_invitation_removed = Signal() # providing args: registry, invitation
data_registry_invitation_accepted = Signal() # providing args: registry, invitation, previous_status
data_registry_invitation_rejected = Signal() # providing args: registry, invitation, previous_status
data_registry_grant_created = Signal() # providing args: registry, from_domain, to_domains
data_registry_grant_removed = Signal() # providing args: registry, from_domain, to_domains
data_registry_deleted = Signal() # providing args: registry
@receiver(data_registry_invitation_created)
def send_invitation_email_receiver(sender, **kwargs):
send_invitation_email(kwargs["registry"], kwargs["invitation"])
@receiver([data_registry_invitation_accepted, data_registry_invitation_rejected])
def send_invitation_response_email_receiver(sender, **kwargs):
registry = kwargs["registry"]
invitation = kwargs["invitation"]
if invitation.domain == registry.domain:
# don't send emails for the owning domain's invitation
return
send_invitation_response_email(registry, invitation)
@receiver(data_registry_grant_created)
def send_grant_email_receiver(sender, **kwargs):
send_grant_email(kwargs["registry"], kwargs["from_domain"], kwargs["to_domains"])
| bsd-3-clause | d5a2f18ba57c8d17dce92cbe6c75dbd5 | 45.394737 | 101 | 0.757232 | 3.832609 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.